Compare commits

..

8 Commits

Author SHA1 Message Date
Yuan Teoh
f6fef90359 chore: update docs and root test 2026-01-22 22:06:56 -08:00
Yuan Teoh
87c5953b75 chore: update yaml for tests 2026-01-22 17:18:57 -08:00
Yuan Teoh
7f0c49a4df chore: update yaml tag for tools 2026-01-22 17:11:12 -08:00
Yuan Teoh
ad8df40791 chore: update yaml tag for auth, embedding model, prompts, sources 2026-01-21 22:49:22 -08:00
Yuan Teoh
c29355ff82 chore: update unmarshal function for ToolsFile 2026-01-21 22:49:06 -08:00
Yuan Teoh
70f5550910 Update cmd/root_test.go
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2026-01-20 11:25:03 -08:00
Yuan Teoh
348c9fde08 chore: add preprocessing function to convert tools file 2026-01-20 11:25:03 -08:00
Yuan Teoh
aef539bcf3 refactor!: update Kind field to Type in source code (#2312)
Update source code `Kind` to `Type`. It's only changes within our code.
Changes to yaml tag (that will affect users) will be done in later PRs.

This is a breaking change since it updates telemetry's span attribute
from `source_kind` to `source_type`.

Related #817

Future updates will include: 
* Updating a preprocessing function to convert config file from v1 to v2
* Update unmarshal function for ToolsFile to convert config file (test
will fail since the yaml tag is not yet updated).
* Update yaml tag (test will pass).
2026-01-20 11:20:41 -08:00
365 changed files with 2136 additions and 10661 deletions

View File

@@ -87,7 +87,7 @@ steps:
- "CLOUD_SQL_POSTGRES_REGION=$_REGION"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv:
["CLOUD_SQL_POSTGRES_USER", "CLOUD_SQL_POSTGRES_PASS", "CLIENT_ID", "API_KEY"]
["CLOUD_SQL_POSTGRES_USER", "CLOUD_SQL_POSTGRES_PASS", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
@@ -134,7 +134,7 @@ steps:
- "ALLOYDB_POSTGRES_DATABASE=$_DATABASE_NAME"
- "ALLOYDB_POSTGRES_REGION=$_REGION"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["ALLOYDB_POSTGRES_USER", "ALLOYDB_POSTGRES_PASS", "CLIENT_ID", "API_KEY"]
secretEnv: ["ALLOYDB_POSTGRES_USER", "ALLOYDB_POSTGRES_PASS", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
@@ -171,23 +171,6 @@ steps:
alloydbainl \
alloydbainl
- id: "alloydb-omni"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"AlloyDB Omni" \
alloydbomni \
postgres
- id: "bigtable"
name: golang:1
waitFor: ["compile-test-binary"]
@@ -310,26 +293,7 @@ steps:
.ci/test_with_coverage.sh \
"Cloud Healthcare API" \
cloudhealthcare \
cloudhealthcare
- id: "cloud-logging-admin"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "LOGADMIN_PROJECT=$PROJECT_ID"
secretEnv: ["CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Cloud Logging Admin" \
cloudloggingadmin \
cloudloggingadmin
cloudhealthcare || echo "Integration tests failed."
- id: "postgres"
name: golang:1
@@ -341,7 +305,7 @@ steps:
- "POSTGRES_HOST=$_POSTGRES_HOST"
- "POSTGRES_PORT=$_POSTGRES_PORT"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["POSTGRES_USER", "POSTGRES_PASS", "CLIENT_ID", "API_KEY"]
secretEnv: ["POSTGRES_USER", "POSTGRES_PASS", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
@@ -693,7 +657,7 @@ steps:
"Looker" \
looker \
looker
- id: "mindsdb"
name: golang:1
waitFor: ["compile-test-binary"]
@@ -881,7 +845,7 @@ steps:
"Snowflake" \
snowflake \
snowflake
- id: "cassandra"
name: golang:1
waitFor: ["compile-test-binary"]
@@ -923,16 +887,16 @@ steps:
tar -C /usr/local -xzf go.tar.gz
export PATH="/usr/local/go/bin:$$PATH"
go test -v ./tests/oracle/... \
go test -v ./internal/sources/oracle/... \
-coverprofile=oracle_coverage.out \
-coverpkg=./internal/sources/oracle/...,./internal/tools/oracle/...
# Coverage check
total_coverage=$(go tool cover -func=oracle_coverage.out | grep "total:" | awk '{print $3}')
echo "Oracle total coverage: $total_coverage"
coverage_numeric=$(echo "$total_coverage" | sed 's/%//')
if awk -v cov="$coverage_numeric" 'BEGIN {exit !(cov < 60)}'; then
echo "Coverage failure: $total_coverage is below 60%."
if awk -v cov="$coverage_numeric" 'BEGIN {exit !(cov < 20)}'; then
echo "Coverage failure: $total_coverage is below 20%."
exit 1
fi
@@ -1000,13 +964,6 @@ steps:
availableSecrets:
secretManager:
# Common secrets
- versionName: projects/$PROJECT_ID/secrets/client_id/versions/latest
env: CLIENT_ID
- versionName: projects/$PROJECT_ID/secrets/api_key/versions/latest
env: API_KEY
# Resource-specific secrets
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_user/versions/latest
env: CLOUD_SQL_POSTGRES_USER
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_pass/versions/latest
@@ -1023,6 +980,8 @@ availableSecrets:
env: POSTGRES_USER
- versionName: projects/$PROJECT_ID/secrets/postgres_pass/versions/latest
env: POSTGRES_PASS
- versionName: projects/$PROJECT_ID/secrets/client_id/versions/latest
env: CLIENT_ID
- versionName: projects/$PROJECT_ID/secrets/neo4j_user/versions/latest
env: NEO4J_USER
- versionName: projects/$PROJECT_ID/secrets/neo4j_pass/versions/latest

View File

@@ -23,18 +23,13 @@ steps:
- |
set -ex
export VERSION=$(cat ./cmd/version.txt)
chmod +x .ci/sample_tests/run_tests.sh
.ci/sample_tests/run_tests.sh
chmod +x .ci/quickstart_test/run_go_tests.sh
.ci/quickstart_test/run_go_tests.sh
env:
- 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}'
- 'GCP_PROJECT=${_GCP_PROJECT}'
- 'DATABASE_NAME=${_DATABASE_NAME}'
- 'DB_USER=${_DB_USER}'
- 'TARGET_ROOT=docs/en/getting-started/quickstart/go'
- 'TARGET_LANG=go'
- 'TABLE_NAME=hotels_go'
- 'SQL_FILE=.ci/sample_tests/setup_hotels.sql'
- 'AGENT_FILE_PATTERN=quickstart.go'
secretEnv: ['TOOLS_YAML_CONTENT', 'GOOGLE_API_KEY', 'DB_PASSWORD']
availableSecrets:

View File

@@ -23,18 +23,13 @@ steps:
- |
set -ex
export VERSION=$(cat ./cmd/version.txt)
chmod +x .ci/sample_tests/run_tests.sh
.ci/sample_tests/run_tests.sh
chmod +x .ci/quickstart_test/run_js_tests.sh
.ci/quickstart_test/run_js_tests.sh
env:
- 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}'
- 'GCP_PROJECT=${_GCP_PROJECT}'
- 'DATABASE_NAME=${_DATABASE_NAME}'
- 'DB_USER=${_DB_USER}'
- 'TARGET_ROOT=docs/en/getting-started/quickstart/js'
- 'TARGET_LANG=js'
- 'TABLE_NAME=hotels_js'
- 'SQL_FILE=.ci/sample_tests/setup_hotels.sql'
- 'AGENT_FILE_PATTERN=quickstart.js'
secretEnv: ['TOOLS_YAML_CONTENT', 'GOOGLE_API_KEY', 'DB_PASSWORD']
availableSecrets:

View File

@@ -23,18 +23,13 @@ steps:
- |
set -ex
export VERSION=$(cat ./cmd/version.txt)
chmod +x .ci/sample_tests/run_tests.sh
.ci/sample_tests/run_tests.sh
chmod +x .ci/quickstart_test/run_py_tests.sh
.ci/quickstart_test/run_py_tests.sh
env:
- 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}'
- 'GCP_PROJECT=${_GCP_PROJECT}'
- 'DATABASE_NAME=${_DATABASE_NAME}'
- 'DB_USER=${_DB_USER}'
- 'TARGET_ROOT=docs/en/getting-started/quickstart/python'
- 'TARGET_LANG=python'
- 'TABLE_NAME=hotels_python'
- 'SQL_FILE=.ci/sample_tests/setup_hotels.sql'
- 'AGENT_FILE_PATTERN=quickstart.py'
secretEnv: ['TOOLS_YAML_CONTENT', 'GOOGLE_API_KEY', 'DB_PASSWORD']
availableSecrets:

View File

@@ -0,0 +1,125 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash
set -e
TABLE_NAME="hotels_go"
QUICKSTART_GO_DIR="docs/en/getting-started/quickstart/go"
SQL_FILE=".ci/quickstart_test/setup_hotels_sample.sql"
PROXY_PID=""
TOOLBOX_PID=""
install_system_packages() {
apt-get update && apt-get install -y \
postgresql-client \
wget \
gettext-base \
netcat-openbsd
}
start_cloud_sql_proxy() {
wget "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy
chmod +x /usr/local/bin/cloud-sql-proxy
cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" &
PROXY_PID=$!
for i in {1..30}; do
if nc -z 127.0.0.1 5432; then
echo "Cloud SQL Proxy is up and running."
return
fi
sleep 1
done
echo "Cloud SQL Proxy failed to start within the timeout period."
exit 1
}
setup_toolbox() {
TOOLBOX_YAML="/tools.yaml"
echo "${TOOLS_YAML_CONTENT}" > "$TOOLBOX_YAML"
if [ ! -f "$TOOLBOX_YAML" ]; then echo "Failed to create tools.yaml"; exit 1; fi
wget "https://storage.googleapis.com/genai-toolbox/v${VERSION}/linux/amd64/toolbox" -O "/toolbox"
chmod +x "/toolbox"
/toolbox --tools-file "$TOOLBOX_YAML" &
TOOLBOX_PID=$!
sleep 2
}
setup_orch_table() {
export TABLE_NAME
envsubst < "$SQL_FILE" | psql -h "$PGHOST" -p "$PGPORT" -U "$DB_USER" -d "$DATABASE_NAME"
}
run_orch_test() {
local orch_dir="$1"
local orch_name
orch_name=$(basename "$orch_dir")
if [ "$orch_name" == "openAI" ]; then
echo -e "\nSkipping framework '${orch_name}': Temporarily excluded."
return
fi
(
set -e
setup_orch_table
echo "--- Preparing module for $orch_name ---"
cd "$orch_dir"
if [ -f "go.mod" ]; then
go mod tidy
fi
cd ..
export ORCH_NAME="$orch_name"
echo "--- Running tests for $orch_name ---"
go test -v ./...
)
}
cleanup_all() {
echo "--- Final cleanup: Shutting down processes and dropping table ---"
if [ -n "$TOOLBOX_PID" ]; then
kill $TOOLBOX_PID || true
fi
if [ -n "$PROXY_PID" ]; then
kill $PROXY_PID || true
fi
}
trap cleanup_all EXIT
# Main script execution
install_system_packages
start_cloud_sql_proxy
export PGHOST=127.0.0.1
export PGPORT=5432
export PGPASSWORD="$DB_PASSWORD"
export GOOGLE_API_KEY="$GOOGLE_API_KEY"
setup_toolbox
for ORCH_DIR in "$QUICKSTART_GO_DIR"/*/; do
if [ ! -d "$ORCH_DIR" ]; then
continue
fi
run_orch_test "$ORCH_DIR"
done

View File

@@ -0,0 +1,125 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash
set -e
TABLE_NAME="hotels_js"
QUICKSTART_JS_DIR="docs/en/getting-started/quickstart/js"
SQL_FILE=".ci/quickstart_test/setup_hotels_sample.sql"
# Initialize process IDs to empty at the top of the script
PROXY_PID=""
TOOLBOX_PID=""
install_system_packages() {
apt-get update && apt-get install -y \
postgresql-client \
wget \
gettext-base \
netcat-openbsd
}
start_cloud_sql_proxy() {
wget "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy
chmod +x /usr/local/bin/cloud-sql-proxy
cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" &
PROXY_PID=$!
for i in {1..30}; do
if nc -z 127.0.0.1 5432; then
echo "Cloud SQL Proxy is up and running."
return
fi
sleep 1
done
echo "Cloud SQL Proxy failed to start within the timeout period."
exit 1
}
setup_toolbox() {
TOOLBOX_YAML="/tools.yaml"
echo "${TOOLS_YAML_CONTENT}" > "$TOOLBOX_YAML"
if [ ! -f "$TOOLBOX_YAML" ]; then echo "Failed to create tools.yaml"; exit 1; fi
wget "https://storage.googleapis.com/genai-toolbox/v${VERSION}/linux/amd64/toolbox" -O "/toolbox"
chmod +x "/toolbox"
/toolbox --tools-file "$TOOLBOX_YAML" &
TOOLBOX_PID=$!
sleep 2
}
setup_orch_table() {
export TABLE_NAME
envsubst < "$SQL_FILE" | psql -h "$PGHOST" -p "$PGPORT" -U "$DB_USER" -d "$DATABASE_NAME"
}
run_orch_test() {
local orch_dir="$1"
local orch_name
orch_name=$(basename "$orch_dir")
(
set -e
echo "--- Preparing environment for $orch_name ---"
setup_orch_table
cd "$orch_dir"
echo "Installing dependencies for $orch_name..."
if [ -f "package-lock.json" ]; then
npm ci
else
npm install
fi
cd ..
echo "--- Running tests for $orch_name ---"
export ORCH_NAME="$orch_name"
node --test quickstart.test.js
echo "--- Cleaning environment for $orch_name ---"
rm -rf "${orch_name}/node_modules"
)
}
cleanup_all() {
echo "--- Final cleanup: Shutting down processes and dropping table ---"
if [ -n "$TOOLBOX_PID" ]; then
kill $TOOLBOX_PID || true
fi
if [ -n "$PROXY_PID" ]; then
kill $PROXY_PID || true
fi
}
trap cleanup_all EXIT
# Main script execution
install_system_packages
start_cloud_sql_proxy
export PGHOST=127.0.0.1
export PGPORT=5432
export PGPASSWORD="$DB_PASSWORD"
export GOOGLE_API_KEY="$GOOGLE_API_KEY"
setup_toolbox
for ORCH_DIR in "$QUICKSTART_JS_DIR"/*/; do
if [ ! -d "$ORCH_DIR" ]; then
continue
fi
run_orch_test "$ORCH_DIR"
done

View File

@@ -0,0 +1,115 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash
set -e
TABLE_NAME="hotels_python"
QUICKSTART_PYTHON_DIR="docs/en/getting-started/quickstart/python"
SQL_FILE=".ci/quickstart_test/setup_hotels_sample.sql"
PROXY_PID=""
TOOLBOX_PID=""
install_system_packages() {
apt-get update && apt-get install -y \
postgresql-client \
python3-venv \
wget \
gettext-base \
netcat-openbsd
}
start_cloud_sql_proxy() {
wget "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy
chmod +x /usr/local/bin/cloud-sql-proxy
cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" &
PROXY_PID=$!
for i in {1..30}; do
if nc -z 127.0.0.1 5432; then
echo "Cloud SQL Proxy is up and running."
return
fi
sleep 1
done
echo "Cloud SQL Proxy failed to start within the timeout period."
exit 1
}
setup_toolbox() {
TOOLBOX_YAML="/tools.yaml"
echo "${TOOLS_YAML_CONTENT}" > "$TOOLBOX_YAML"
if [ ! -f "$TOOLBOX_YAML" ]; then echo "Failed to create tools.yaml"; exit 1; fi
wget "https://storage.googleapis.com/genai-toolbox/v${VERSION}/linux/amd64/toolbox" -O "/toolbox"
chmod +x "/toolbox"
/toolbox --tools-file "$TOOLBOX_YAML" &
TOOLBOX_PID=$!
sleep 2
}
setup_orch_table() {
export TABLE_NAME
envsubst < "$SQL_FILE" | psql -h "$PGHOST" -p "$PGPORT" -U "$DB_USER" -d "$DATABASE_NAME"
}
run_orch_test() {
local orch_dir="$1"
local orch_name
orch_name=$(basename "$orch_dir")
(
set -e
setup_orch_table
cd "$orch_dir"
local VENV_DIR=".venv"
python3 -m venv "$VENV_DIR"
source "$VENV_DIR/bin/activate"
pip install -r requirements.txt
echo "--- Running tests for $orch_name ---"
cd ..
ORCH_NAME="$orch_name" pytest
rm -rf "$VENV_DIR"
)
}
cleanup_all() {
echo "--- Final cleanup: Shutting down processes and dropping table ---"
if [ -n "$TOOLBOX_PID" ]; then
kill $TOOLBOX_PID || true
fi
if [ -n "$PROXY_PID" ]; then
kill $PROXY_PID || true
fi
}
trap cleanup_all EXIT
# Main script execution
install_system_packages
start_cloud_sql_proxy
export PGHOST=127.0.0.1
export PGPORT=5432
export PGPASSWORD="$DB_PASSWORD"
export GOOGLE_API_KEY="$GOOGLE_API_KEY"
setup_toolbox
for ORCH_DIR in "$QUICKSTART_PYTHON_DIR"/*/; do
if [ ! -d "$ORCH_DIR" ]; then
continue
fi
run_orch_test "$ORCH_DIR"
done

View File

@@ -1,57 +0,0 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
steps:
- name: "${_IMAGE}"
id: "py-pre-post-processing-test"
entrypoint: "bash"
args:
- -c
- |
set -ex
chmod +x .ci/sample_tests/run_tests.sh
.ci/sample_tests/run_tests.sh
env:
- "CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}"
- "GCP_PROJECT=${_GCP_PROJECT}"
- "DATABASE_NAME=${_DATABASE_NAME}"
- "DB_USER=${_DB_USER}"
- "TARGET_ROOT=${_TARGET_ROOT}"
- "TARGET_LANG=${_TARGET_LANG}"
- "TABLE_NAME=${_TABLE_NAME}"
- "SQL_FILE=${_SQL_FILE}"
- "AGENT_FILE_PATTERN=${_AGENT_FILE_PATTERN}"
secretEnv: ["TOOLS_YAML_CONTENT", "GOOGLE_API_KEY", "DB_PASSWORD"]
availableSecrets:
secretManager:
- versionName: projects/${_GCP_PROJECT}/secrets/${_TOOLS_YAML_SECRET}/versions/5
env: "TOOLS_YAML_CONTENT"
- versionName: projects/${_GCP_PROJECT_NUMBER}/secrets/${_API_KEY_SECRET}/versions/latest
env: "GOOGLE_API_KEY"
- versionName: projects/${_GCP_PROJECT}/secrets/${_DB_PASS_SECRET}/versions/latest
env: "DB_PASSWORD"
timeout: 1200s
substitutions:
_TARGET_LANG: "python"
_IMAGE: "gcr.io/google.com/cloudsdktool/cloud-sdk:537.0.0"
_TARGET_ROOT: "docs/en/samples/pre_post_processing/python"
_TABLE_NAME: "hotels_py_pre_post_processing"
_SQL_FILE: ".ci/sample_tests/setup_hotels.sql"
_AGENT_FILE_PATTERN: "agent.py"
options:
logging: CLOUD_LOGGING_ONLY

View File

@@ -1,202 +0,0 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
# --- Configuration (from Environment Variables) ---
# TARGET_ROOT: The directory to search for tests (e.g., docs/en/getting-started/quickstart/js)
# TARGET_LANG: python, js, go
# TABLE_NAME: Database table name to use
# SQL_FILE: Path to the SQL setup file
# AGENT_FILE_PATTERN: Filename to look for (e.g., quickstart.js or agent.py)
VERSION=$(cat ./cmd/version.txt)
# Process IDs & Logs
PROXY_PID=""
TOOLBOX_PID=""
PROXY_LOG="cloud_sql_proxy.log"
TOOLBOX_LOG="toolbox_server.log"
install_system_packages() {
echo "Installing system packages..."
apt-get update && apt-get install -y \
postgresql-client \
wget \
gettext-base \
netcat-openbsd
if [[ "$TARGET_LANG" == "python" ]]; then
apt-get install -y python3-venv
fi
}
start_cloud_sql_proxy() {
echo "Starting Cloud SQL Proxy..."
wget -q "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy
chmod +x /usr/local/bin/cloud-sql-proxy
cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" > "$PROXY_LOG" 2>&1 &
PROXY_PID=$!
# Health Check
for i in {1..30}; do
if nc -z 127.0.0.1 5432; then
echo "Cloud SQL Proxy is up and running."
return
fi
sleep 1
done
echo "ERROR: Cloud SQL Proxy failed to start. Logs:"
cat "$PROXY_LOG"
exit 1
}
setup_toolbox() {
echo "Setting up Toolbox server..."
TOOLBOX_YAML="/tools.yaml"
echo "${TOOLS_YAML_CONTENT}" > "$TOOLBOX_YAML"
wget -q "https://storage.googleapis.com/genai-toolbox/v${VERSION}/linux/amd64/toolbox" -O "/toolbox"
chmod +x "/toolbox"
/toolbox --tools-file "$TOOLBOX_YAML" > "$TOOLBOX_LOG" 2>&1 &
TOOLBOX_PID=$!
# Health Check
for i in {1..15}; do
if nc -z 127.0.0.1 5000; then
echo "Toolbox server is up and running."
return
fi
sleep 1
done
echo "ERROR: Toolbox server failed to start. Logs:"
cat "$TOOLBOX_LOG"
exit 1
}
setup_db_table() {
echo "Setting up database table $TABLE_NAME using $SQL_FILE..."
export TABLE_NAME
envsubst < "$SQL_FILE" | psql -h 127.0.0.1 -p 5432 -U "$DB_USER" -d "$DATABASE_NAME"
}
run_python_test() {
local dir=$1
local name=$(basename "$dir")
echo "--- Running Python Test: $name ---"
(
cd "$dir"
python3 -m venv .venv
source .venv/bin/activate
pip install -q -r requirements.txt pytest
cd ..
local test_file=$(find . -maxdepth 1 -name "*test.py" | head -n 1)
if [ -n "$test_file" ]; then
echo "Found native test: $test_file. Running pytest..."
export ORCH_NAME="$name"
export PYTHONPATH="../"
pytest "$test_file"
else
echo "No native test found. running agent directly..."
export PYTHONPATH="../"
python3 "${name}/${AGENT_FILE_PATTERN}"
fi
rm -rf "${name}/.venv"
)
}
run_js_test() {
local dir=$1
local name=$(basename "$dir")
echo "--- Running JS Test: $name ---"
(
cd "$dir"
if [ -f "package-lock.json" ]; then npm ci -q; else npm install -q; fi
cd ..
# Looking for a JS test file in the parent directory
local test_file=$(find . -maxdepth 1 -name "*test.js" | head -n 1)
if [ -n "$test_file" ]; then
echo "Found native test: $test_file. Running node --test..."
export ORCH_NAME="$name"
node --test "$test_file"
else
echo "No native test found. running agent directly..."
node "${name}/${AGENT_FILE_PATTERN}"
fi
rm -rf "${name}/node_modules"
)
}
run_go_test() {
local dir=$1
local name=$(basename "$dir")
if [ "$name" == "openAI" ]; then
echo -e "\nSkipping framework '${name}': Temporarily excluded."
return
fi
echo "--- Running Go Test: $name ---"
(
cd "$dir"
if [ -f "go.mod" ]; then
go mod tidy
fi
cd ..
local test_file=$(find . -maxdepth 1 -name "*test.go" | head -n 1)
if [ -n "$test_file" ]; then
echo "Found native test: $test_file. Running go test..."
export ORCH_NAME="$name"
go test -v ./...
else
echo "No native test found. running agent directly..."
cd "$name"
go run "."
fi
)
}
cleanup() {
echo "Cleaning up background processes..."
[ -n "$TOOLBOX_PID" ] && kill "$TOOLBOX_PID" || true
[ -n "$PROXY_PID" ] && kill "$PROXY_PID" || true
}
trap cleanup EXIT
# --- Execution ---
install_system_packages
start_cloud_sql_proxy
export PGHOST=127.0.0.1
export PGPORT=5432
export PGPASSWORD="$DB_PASSWORD"
export GOOGLE_API_KEY="$GOOGLE_API_KEY"
setup_toolbox
setup_db_table
echo "Scanning $TARGET_ROOT for tests with pattern $AGENT_FILE_PATTERN..."
find "$TARGET_ROOT" -name "$AGENT_FILE_PATTERN" | while read -r agent_file; do
sample_dir=$(dirname "$agent_file")
if [[ "$TARGET_LANG" == "python" ]]; then
run_python_test "$sample_dir"
elif [[ "$TARGET_LANG" == "js" ]]; then
run_js_test "$sample_dir"
elif [[ "$TARGET_LANG" == "go" ]]; then
run_go_test "$sample_dir"
fi
done

View File

@@ -40,7 +40,7 @@ jobs:
group: docs-deployment
cancel-in-progress: false
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
with:
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
@@ -51,12 +51,12 @@ jobs:
extended: true
- name: Setup Node
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
with:
node-version: "22"
- name: Cache dependencies
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}

View File

@@ -30,14 +30,14 @@ jobs:
steps:
- name: Checkout main branch (for latest templates and theme)
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
with:
ref: 'main'
submodules: 'recursive'
fetch-depth: 0
- name: Checkout old content from tag into a temporary directory
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
with:
ref: ${{ github.event.inputs.version_tag }}
path: 'old_version_source' # Checkout into a temp subdir
@@ -57,7 +57,7 @@ jobs:
with:
hugo-version: "0.145.0"
extended: true
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
with:
node-version: "22"

View File

@@ -30,7 +30,7 @@ jobs:
cancel-in-progress: false
steps:
- name: Checkout Code at Tag
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
with:
ref: ${{ github.event.release.tag_name }}
@@ -44,7 +44,7 @@ jobs:
extended: true
- name: Setup Node
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
with:
node-version: "22"

View File

@@ -34,7 +34,7 @@ jobs:
group: "preview-${{ github.event.number }}"
cancel-in-progress: true
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
with:
ref: versioned-gh-pages

View File

@@ -49,7 +49,7 @@ jobs:
group: "preview-${{ github.event.number }}"
cancel-in-progress: true
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
with:
# Checkout the PR's HEAD commit (supports forks).
ref: ${{ github.event.pull_request.head.sha }}
@@ -62,12 +62,12 @@ jobs:
extended: true
- name: Setup Node
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
with:
node-version: "22"
- name: Cache dependencies
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}

View File

@@ -22,47 +22,38 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
- name: Restore lychee cache
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
with:
path: .lycheecache
key: cache-lychee-${{ github.sha }}
restore-keys: cache-lychee-
- name: Link Checker
id: lychee-check
uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 # v2
continue-on-error: true
with:
args: >
--quiet
--verbose
--no-progress
--cache
--max-cache-age 1d
--exclude '^neo4j\+.*' --exclude '^bolt://.*'
README.md
docs/
output: lychee-report.md
format: markdown
fail: true
jobSummary: false
debug: false
output: /tmp/foo.txt
fail: true
jobSummary: true
debug: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Display Failure Report
# Run this ONLY if the link checker failed
if: steps.lychee-check.outcome == 'failure'
# This step only runs if the 'lychee_check' step fails, ensuring the
# context note only appears when the developer needs to troubleshoot.
- name: Display Link Context Note on Failure
if: ${{ failure() }}
run: |
echo "## Link Resolution Note" >> $GITHUB_STEP_SUMMARY
echo "Local links and directory changes work differently on GitHub than on the docsite." >> $GITHUB_STEP_SUMMARY
echo "You must ensure fixes pass the **GitHub check** and also work with **\`hugo server\`**." >> $GITHUB_STEP_SUMMARY
echo "See [Link Checking and Fixing with Lychee](https://github.com/googleapis/genai-toolbox/blob/main/DEVELOPER.md#link-checking-and-fixing-with-lychee) for more details." >> $GITHUB_STEP_SUMMARY
echo "---" >> $GITHUB_STEP_SUMMARY
echo "### Broken Links Found" >> $GITHUB_STEP_SUMMARY
cat ./lychee-report.md >> $GITHUB_STEP_SUMMARY
exit 1

View File

@@ -51,11 +51,11 @@ jobs:
console.log('Failed to remove label. Another job may have already removed it!');
}
- name: Setup Go
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
go-version: "1.25"
- name: Checkout code
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
ref: ${{ github.event.pull_request.head.sha }}
repository: ${{ github.event.pull_request.head.repo.full_name }}

View File

@@ -29,7 +29,7 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
- name: Wait for image in Artifact Registry
shell: bash

View File

@@ -29,7 +29,7 @@ jobs:
issues: 'write'
pull-requests: 'write'
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: micnncim/action-label-syncer@3abd5ab72fda571e69fffd97bd4e0033dd5f495c # v1.3.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -57,12 +57,12 @@ jobs:
}
- name: Setup Go
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
go-version: "1.24"
- name: Checkout code
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
ref: ${{ github.event.pull_request.head.sha }}
repository: ${{ github.event.pull_request.head.repo.full_name }}

View File

@@ -51,10 +51,6 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
# Add a new version block here before every release
# The order of versions in this file is mirrored into the dropdown
[[params.versions]]
version = "v0.26.0"
url = "https://googleapis.github.io/genai-toolbox/v0.26.0/"
[[params.versions]]
version = "v0.25.0"
url = "https://googleapis.github.io/genai-toolbox/v0.25.0/"

View File

@@ -23,7 +23,8 @@ https://cloud.dgraph.io/login
https://dgraph.io/docs
# MySQL Community downloads and main site (often protected by bot mitigation)
^https?://(.*\.)?mysql\.com/.*
https://dev.mysql.com/downloads/installer/
https://www.mysql.com/
# Claude desktop download link
https://claude.ai/download
@@ -36,9 +37,9 @@ https://dev.mysql.com/doc/refman/8.4/en/sql-prepared-statements.html
https://dev.mysql.com/doc/refman/8.4/en/user-names.html
# npmjs links can occasionally trigger rate limiting during high-frequency CI builds
^https?://(www\.)?npmjs\.com/.*
https://www.npmjs.com/package/@toolbox-sdk/core
https://www.npmjs.com/package/@toolbox-sdk/adk
https://www.oceanbase.com/
# Ignore social media and blog profiles to reduce external request overhead
https://medium.com/@mcp_toolbox

View File

@@ -1,30 +1,5 @@
# Changelog
## [0.26.0](https://github.com/googleapis/genai-toolbox/compare/v0.25.0...v0.26.0) (2026-01-22)
### ⚠ BREAKING CHANGES
* Validate tool naming ([#2305](https://github.com/googleapis/genai-toolbox/issues/2305)) ([5054212](https://github.com/googleapis/genai-toolbox/commit/5054212fa43017207fe83275d27b9fbab96e8ab5))
* **tools/cloudgda:** Update description and parameter name for cloudgda tool ([#2288](https://github.com/googleapis/genai-toolbox/issues/2288)) ([6b02591](https://github.com/googleapis/genai-toolbox/commit/6b025917032394a66840488259db8ff2c3063016))
### Features
* Add new `user-agent-metadata` flag ([#2302](https://github.com/googleapis/genai-toolbox/issues/2302)) ([adc9589](https://github.com/googleapis/genai-toolbox/commit/adc9589766904d9e3cbe0a6399222f8d4bb9d0cc))
* Add remaining flag to Toolbox server in MCP registry ([#2272](https://github.com/googleapis/genai-toolbox/issues/2272)) ([5e0999e](https://github.com/googleapis/genai-toolbox/commit/5e0999ebf5cdd9046e96857738254b2e0561b6d2))
* **embeddingModel:** Add embedding model to MCP handler ([#2310](https://github.com/googleapis/genai-toolbox/issues/2310)) ([e4f60e5](https://github.com/googleapis/genai-toolbox/commit/e4f60e56335b755ef55b9553d3f40b31858ec8d9))
* **sources/bigquery:** Make maximum rows returned from queries configurable ([#2262](https://github.com/googleapis/genai-toolbox/issues/2262)) ([4abf0c3](https://github.com/googleapis/genai-toolbox/commit/4abf0c39e717d53b22cc61efb65e09928c598236))
* **prebuilt/cloud-sql:** Add create backup tool for Cloud SQL ([#2141](https://github.com/googleapis/genai-toolbox/issues/2141)) ([8e0fb03](https://github.com/googleapis/genai-toolbox/commit/8e0fb0348315a80f63cb47b3c7204869482448f4))
* **prebuilt/cloud-sql:** Add restore backup tool for Cloud SQL ([#2171](https://github.com/googleapis/genai-toolbox/issues/2171)) ([00c3e6d](https://github.com/googleapis/genai-toolbox/commit/00c3e6d8cba54e2ab6cb271c7e6b378895df53e1))
* Support combining multiple prebuilt configurations ([#2295](https://github.com/googleapis/genai-toolbox/issues/2295)) ([e535b37](https://github.com/googleapis/genai-toolbox/commit/e535b372ea81864d644a67135a1b07e4e519b4b4))
* Support MCP specs version 2025-11-25 ([#2303](https://github.com/googleapis/genai-toolbox/issues/2303)) ([4d23a3b](https://github.com/googleapis/genai-toolbox/commit/4d23a3bbf2797b1f7fe328aeb5789e778121da23))
* **tools:** Add `valueFromParam` support to Tool config ([#2333](https://github.com/googleapis/genai-toolbox/issues/2333)) ([15101b1](https://github.com/googleapis/genai-toolbox/commit/15101b1edbe2b85a4a5f9f819c23cf83138f4ee1))
### Bug Fixes
* **tools/cloudhealthcare:** Add check for client authorization before retrieving token string ([#2327](https://github.com/googleapis/genai-toolbox/issues/2327)) ([c25a233](https://github.com/googleapis/genai-toolbox/commit/c25a2330fea2ac382a398842c9e572e4e19bcb08))
## [0.25.0](https://github.com/googleapis/genai-toolbox/compare/v0.24.0...v0.25.0) (2026-01-08)

View File

@@ -83,7 +83,7 @@ Tool type serves as a category or type that a user can assign to a tool.
The following guidelines apply to tool types:
* Should use hyphens over underscores (e.g. `firestore-list-collections` or
* Should user hyphens over underscores (e.g. `firestore-list-collections` or
`firestore_list_colelctions`).
* Should use product name in name (e.g. `firestore-list-collections` over
`list-collections`).

View File

@@ -2,8 +2,6 @@
# MCP Toolbox for Databases
<a href="https://trendshift.io/repositories/13019" target="_blank"><img src="https://trendshift.io/api/badge/repositories/13019" alt="googleapis%2Fgenai-toolbox | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
[![Docs](https://img.shields.io/badge/docs-MCP_Toolbox-blue)](https://googleapis.github.io/genai-toolbox/)
[![Discord](https://img.shields.io/badge/Discord-%235865F2.svg?style=flat&logo=discord&logoColor=white)](https://discord.gg/Dmm69peqjh)
[![Medium](https://img.shields.io/badge/Medium-12100E?style=flat&logo=medium&logoColor=white)](https://medium.com/@mcp_toolbox)
@@ -107,7 +105,7 @@ redeploying your application.
## Getting Started
### Quickstart: Running Toolbox using NPX
### (Non-production) Running Toolbox
You can run Toolbox directly with a [configuration file](#configuration):
@@ -142,7 +140,7 @@ To install Toolbox as a binary:
>
> ```sh
> # see releases page for other versions
> export VERSION=0.26.0
> export VERSION=0.25.0
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
> chmod +x toolbox
> ```
@@ -155,7 +153,7 @@ To install Toolbox as a binary:
>
> ```sh
> # see releases page for other versions
> export VERSION=0.26.0
> export VERSION=0.25.0
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
> chmod +x toolbox
> ```
@@ -168,7 +166,7 @@ To install Toolbox as a binary:
>
> ```sh
> # see releases page for other versions
> export VERSION=0.26.0
> export VERSION=0.25.0
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
> chmod +x toolbox
> ```
@@ -181,7 +179,7 @@ To install Toolbox as a binary:
>
> ```cmd
> :: see releases page for other versions
> set VERSION=0.26.0
> set VERSION=0.25.0
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
> ```
>
@@ -193,7 +191,7 @@ To install Toolbox as a binary:
>
> ```powershell
> # see releases page for other versions
> $VERSION = "0.26.0"
> $VERSION = "0.25.0"
> curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
> ```
>
@@ -206,7 +204,7 @@ You can also install Toolbox as a container:
```sh
# see releases page for other versions
export VERSION=0.26.0
export VERSION=0.25.0
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
```
@@ -230,7 +228,7 @@ To install from source, ensure you have the latest version of
[Go installed](https://go.dev/doc/install), and then run the following command:
```sh
go install github.com/googleapis/genai-toolbox@v0.26.0
go install github.com/googleapis/genai-toolbox@v0.25.0
```
<!-- {x-release-please-end} -->

View File

@@ -1,131 +0,0 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cmd
import (
"context"
"os"
"path/filepath"
"strings"
"testing"
)
func TestInvokeTool(t *testing.T) {
// Create a temporary tools file
tmpDir := t.TempDir()
toolsFileContent := `
sources:
my-sqlite:
kind: sqlite
database: test.db
tools:
hello-sqlite:
kind: sqlite-sql
source: my-sqlite
description: "hello tool"
statement: "SELECT 'hello' as greeting"
echo-tool:
kind: sqlite-sql
source: my-sqlite
description: "echo tool"
statement: "SELECT ? as msg"
parameters:
- name: message
type: string
description: message to echo
`
toolsFilePath := filepath.Join(tmpDir, "tools.yaml")
if err := os.WriteFile(toolsFilePath, []byte(toolsFileContent), 0644); err != nil {
t.Fatalf("failed to write tools file: %v", err)
}
tcs := []struct {
desc string
args []string
want string
wantErr bool
errStr string
}{
{
desc: "success - basic tool call",
args: []string{"invoke", "hello-sqlite", "--tools-file", toolsFilePath},
want: `"greeting": "hello"`,
},
{
desc: "success - tool call with parameters",
args: []string{"invoke", "echo-tool", `{"message": "world"}`, "--tools-file", toolsFilePath},
want: `"msg": "world"`,
},
{
desc: "error - tool not found",
args: []string{"invoke", "non-existent", "--tools-file", toolsFilePath},
wantErr: true,
errStr: `tool "non-existent" not found`,
},
{
desc: "error - invalid JSON params",
args: []string{"invoke", "echo-tool", `invalid-json`, "--tools-file", toolsFilePath},
wantErr: true,
errStr: `params must be a valid JSON string`,
},
}
for _, tc := range tcs {
t.Run(tc.desc, func(t *testing.T) {
_, got, err := invokeCommandWithContext(context.Background(), tc.args)
if (err != nil) != tc.wantErr {
t.Fatalf("got error %v, wantErr %v", err, tc.wantErr)
}
if tc.wantErr && !strings.Contains(err.Error(), tc.errStr) {
t.Fatalf("got error %v, want error containing %q", err, tc.errStr)
}
if !tc.wantErr && !strings.Contains(got, tc.want) {
t.Fatalf("got %q, want it to contain %q", got, tc.want)
}
})
}
}
func TestInvokeTool_AuthUnsupported(t *testing.T) {
tmpDir := t.TempDir()
toolsFileContent := `
sources:
my-bq:
kind: bigquery
project: my-project
useClientOAuth: true
tools:
bq-tool:
kind: bigquery-sql
source: my-bq
description: "bq tool"
statement: "SELECT 1"
`
toolsFilePath := filepath.Join(tmpDir, "auth_tools.yaml")
if err := os.WriteFile(toolsFilePath, []byte(toolsFileContent), 0644); err != nil {
t.Fatalf("failed to write tools file: %v", err)
}
args := []string{"invoke", "bq-tool", "--tools-file", toolsFilePath}
_, _, err := invokeCommandWithContext(context.Background(), args)
if err == nil {
t.Fatal("expected error for tool requiring client auth, but got nil")
}
if !strings.Contains(err.Error(), "client authorization is not supported") {
t.Fatalf("unexpected error message: %v", err)
}
}

View File

@@ -34,8 +34,6 @@ import (
"github.com/fsnotify/fsnotify"
yaml "github.com/goccy/go-yaml"
"github.com/googleapis/genai-toolbox/internal/auth"
"github.com/googleapis/genai-toolbox/internal/cli/invoke"
"github.com/googleapis/genai-toolbox/internal/cli/skills"
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
"github.com/googleapis/genai-toolbox/internal/log"
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
@@ -93,9 +91,6 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicominstances"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomseries"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomstudies"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminlistlognames"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminlistresourcetypes"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminquerylogs"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcloneinstance"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatebackup"
@@ -104,7 +99,6 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistdatabases"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistinstances"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlrestorebackup"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmssql/cloudsqlmssqlcreateinstance"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmysql/cloudsqlmysqlcreateinstance"
@@ -132,9 +126,7 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardelement"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardfilter"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerconversationalanalytics"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookercreateprojectdirectory"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookercreateprojectfile"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdeleteprojectdirectory"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdeleteprojectfile"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdevmode"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergenerateembedurl"
@@ -151,7 +143,6 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmeasures"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmodels"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetparameters"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojectdirectories"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojectfile"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojectfiles"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojects"
@@ -166,7 +157,6 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrundashboard"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrunlook"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerupdateprojectfile"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookervalidateproject"
_ "github.com/googleapis/genai-toolbox/internal/tools/mindsdb/mindsdbexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mindsdb/mindsdbsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbaggregate"
@@ -253,7 +243,6 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/sources/clickhouse"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudgda"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudhealthcare"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudloggingadmin"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudmonitoring"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmssql"
@@ -326,15 +315,15 @@ func Execute() {
type Command struct {
*cobra.Command
cfg server.ServerConfig
logger log.Logger
tools_file string
tools_files []string
tools_folder string
prebuiltConfigs []string
inStream io.Reader
outStream io.Writer
errStream io.Writer
cfg server.ServerConfig
logger log.Logger
tools_file string
tools_files []string
tools_folder string
prebuiltConfig string
inStream io.Reader
outStream io.Writer
errStream io.Writer
}
// NewCommand returns a Command object representing an invocation of the CLI.
@@ -371,44 +360,36 @@ func NewCommand(opts ...Option) *Command {
baseCmd.SetErr(cmd.errStream)
flags := cmd.Flags()
persistentFlags := cmd.PersistentFlags()
flags.StringVarP(&cmd.cfg.Address, "address", "a", "127.0.0.1", "Address of the interface the server will listen on.")
flags.IntVarP(&cmd.cfg.Port, "port", "p", 5000, "Port the server will listen on.")
flags.StringVar(&cmd.tools_file, "tools_file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
// deprecate tools_file
_ = flags.MarkDeprecated("tools_file", "please use --tools-file instead")
persistentFlags.StringVar(&cmd.tools_file, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
persistentFlags.StringSliceVar(&cmd.tools_files, "tools-files", []string{}, "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file, or --tools-folder.")
persistentFlags.StringVar(&cmd.tools_folder, "tools-folder", "", "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file, or --tools-files.")
persistentFlags.Var(&cmd.cfg.LogLevel, "log-level", "Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'.")
persistentFlags.Var(&cmd.cfg.LoggingFormat, "logging-format", "Specify logging format to use. Allowed: 'standard' or 'JSON'.")
persistentFlags.BoolVar(&cmd.cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.")
persistentFlags.StringVar(&cmd.cfg.TelemetryOTLP, "telemetry-otlp", "", "Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318')")
persistentFlags.StringVar(&cmd.cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
flags.StringVar(&cmd.tools_file, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
flags.StringSliceVar(&cmd.tools_files, "tools-files", []string{}, "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file, or --tools-folder.")
flags.StringVar(&cmd.tools_folder, "tools-folder", "", "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file, or --tools-files.")
flags.Var(&cmd.cfg.LogLevel, "log-level", "Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'.")
flags.Var(&cmd.cfg.LoggingFormat, "logging-format", "Specify logging format to use. Allowed: 'standard' or 'JSON'.")
flags.BoolVar(&cmd.cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.")
flags.StringVar(&cmd.cfg.TelemetryOTLP, "telemetry-otlp", "", "Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318')")
flags.StringVar(&cmd.cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
// Fetch prebuilt tools sources to customize the help description
prebuiltHelp := fmt.Sprintf(
"Use a prebuilt tool configuration by source type. Allowed: '%s'. Can be specified multiple times.",
"Use a prebuilt tool configuration by source type. Allowed: '%s'.",
strings.Join(prebuiltconfigs.GetPrebuiltSources(), "', '"),
)
persistentFlags.StringSliceVar(&cmd.prebuiltConfigs, "prebuilt", []string{}, prebuiltHelp)
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", prebuiltHelp)
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
// TODO: Insecure by default. Might consider updating this for v1.0.0
flags.StringSliceVar(&cmd.cfg.AllowedOrigins, "allowed-origins", []string{"*"}, "Specifies a list of origins permitted to access this server. Defaults to '*'.")
flags.StringSliceVar(&cmd.cfg.AllowedHosts, "allowed-hosts", []string{"*"}, "Specifies a list of hosts permitted to access this server. Defaults to '*'.")
persistentFlags.StringSliceVar(&cmd.cfg.UserAgentMetadata, "user-agent-metadata", []string{}, "Appends additional metadata to the User-Agent.")
// wrap RunE command so that we have access to original Command object
cmd.RunE = func(*cobra.Command, []string) error { return run(cmd) }
// Register subcommands for tool invocation
baseCmd.AddCommand(invoke.NewCommand(cmd))
// Register subcommands for skill generation
baseCmd.AddCommand(skills.NewCommand(cmd))
return cmd
}
@@ -444,124 +425,101 @@ func parseEnv(input string) (string, error) {
return output, err
}
func convertToolsFile(raw []byte) ([]byte, error) {
func convertToolsFile(ctx context.Context, raw []byte) ([]byte, error) {
var input yaml.MapSlice
decoder := yaml.NewDecoder(bytes.NewReader(raw), yaml.UseOrderedMap())
if err := decoder.Decode(&input); err != nil {
return nil, err
}
// Convert raw MapSlice to a helper map for quick lookup
// while keeping the values as MapSlices to preserve internal order
resourceOrder := []string{}
lookup := make(map[string]yaml.MapSlice)
for _, item := range input {
key, ok := item.Key.(string)
if !ok {
return nil, fmt.Errorf("unexpected non-string key in input: %v", item.Key)
}
if slice, ok := item.Value.(yaml.MapSlice); ok {
// convert authSources to authServices
if key == "authSources" {
key = "authServices"
}
// works even if lookup[key] is nil
lookup[key] = append(lookup[key], slice...)
// preserving the resource's order of original toolsFile
if !slices.Contains(resourceOrder, key) {
resourceOrder = append(resourceOrder, key)
}
} else {
// toolsfile is already v2
if key == "kind" {
return raw, nil
}
return nil, fmt.Errorf("'%s' is not a map", key)
}
}
// convert to tools file v2
var buf bytes.Buffer
encoder := yaml.NewEncoder(&buf)
v1keys := []string{"sources", "authSources", "authServices", "embeddingModels", "tools", "toolsets", "prompts"}
for {
if err := decoder.Decode(&input); err != nil {
if err == io.EOF {
break
}
return nil, err
for _, kind := range resourceOrder {
data, exists := lookup[kind]
if !exists {
// if this is skipped for all keys, the tools file is in v2
continue
}
for _, item := range input {
key, ok := item.Key.(string)
// Transform each entry
for _, entry := range data {
entryName, ok := entry.Key.(string)
if !ok {
return nil, fmt.Errorf("unexpected non-string key in input: %v", item.Key)
return nil, fmt.Errorf("unexpected non-string key for entry in '%s': %v", kind, entry.Key)
}
// check if the key is config file v1's key
if slices.Contains(v1keys, key) {
// check if value conversion to yaml.MapSlice successfully
// fields such as "tools" in toolsets might pass the first check but
// fail to convert to MapSlice
if slice, ok := item.Value.(yaml.MapSlice); ok {
// Deprecated: convert authSources to authServices
if key == "authSources" {
key = "authServices"
}
transformed, err := transformDocs(key, slice)
if err != nil {
return nil, err
}
// encode per-doc
for _, doc := range transformed {
if err := encoder.Encode(doc); err != nil {
return nil, err
}
}
} else {
// invalid input will be ignored
// we don't want to throw error here since the config could
// be valid but with a different order such as:
// ---
// tools:
// - tool_a
// kind: toolsets
// ---
continue
}
entryBody := ProcessValue(entry.Value, kind == "toolsets")
transformed := yaml.MapSlice{
{Key: "kind", Value: kind},
{Key: "name", Value: entryName},
}
// Merge the transformed body into our result
if bodySlice, ok := entryBody.(yaml.MapSlice); ok {
transformed = append(transformed, bodySlice...)
} else {
// this doc is already v2, encode to buf
if err := encoder.Encode(input); err != nil {
return nil, err
}
break
return nil, fmt.Errorf("unable to convert entryBody to MapSlice")
}
if err := encoder.Encode(transformed); err != nil {
return nil, err
}
}
}
return buf.Bytes(), nil
}
// transformDocs transforms the configuration file from v1 format to v2
// yaml.MapSlice will preserve the order in a map
func transformDocs(kind string, input yaml.MapSlice) ([]yaml.MapSlice, error) {
var transformed []yaml.MapSlice
for _, entry := range input {
entryName, ok := entry.Key.(string)
if !ok {
return nil, fmt.Errorf("unexpected non-string key for entry in '%s': %v", kind, entry.Key)
}
entryBody := ProcessValue(entry.Value, kind == "toolsets")
currentTransformed := yaml.MapSlice{
{Key: "kind", Value: kind},
{Key: "name", Value: entryName},
}
// Merge the transformed body into our result
if bodySlice, ok := entryBody.(yaml.MapSlice); ok {
currentTransformed = append(currentTransformed, bodySlice...)
} else {
return nil, fmt.Errorf("unable to convert entryBody to MapSlice")
}
transformed = append(transformed, currentTransformed)
}
return transformed, nil
}
// ProcessValue recursively looks for MapSlices to rename 'kind' -> 'type'
func ProcessValue(v any, isToolset bool) any {
switch val := v.(type) {
case yaml.MapSlice:
// creating a new MapSlice is safer for recursive transformation
newVal := make(yaml.MapSlice, len(val))
for i, item := range val {
for i := range val {
// Perform renaming
if item.Key == "kind" {
item.Key = "type"
if val[i].Key == "kind" {
val[i].Key = "type"
}
// Recursive call for nested values (e.g., nested objects or lists)
item.Value = ProcessValue(item.Value, false)
newVal[i] = item
val[i].Value = ProcessValue(val[i].Value, false)
}
return newVal
return val
case []any:
// Process lists: If it's a toolset top-level list, wrap it.
if isToolset {
return yaml.MapSlice{{Key: "tools", Value: val}}
}
// Otherwise, recurse into list items (to catch nested objects)
newVal := make([]any, len(val))
for i := range val {
newVal[i] = ProcessValue(val[i], false)
val[i] = ProcessValue(val[i], false)
}
return newVal
return val
default:
return val
}
@@ -577,7 +535,7 @@ func parseToolsFile(ctx context.Context, raw []byte) (ToolsFile, error) {
}
raw = []byte(output)
raw, err = convertToolsFile(raw)
raw, err = convertToolsFile(ctx, raw)
if err != nil {
return toolsFile, fmt.Errorf("error converting tools file: %s", err)
}
@@ -932,183 +890,6 @@ func resolveWatcherInputs(toolsFile string, toolsFiles []string, toolsFolder str
return watchDirs, watchedFiles
}
func (cmd *Command) Config() server.ServerConfig {
return cmd.cfg
}
func (cmd *Command) Out() io.Writer {
return cmd.outStream
}
func (cmd *Command) Logger() log.Logger {
return cmd.logger
}
func (cmd *Command) LoadConfig(ctx context.Context) error {
logger, err := util.LoggerFromContext(ctx)
if err != nil {
return err
}
var allToolsFiles []ToolsFile
// Load Prebuilt Configuration
if len(cmd.prebuiltConfigs) > 0 {
slices.Sort(cmd.prebuiltConfigs)
sourcesList := strings.Join(cmd.prebuiltConfigs, ", ")
logMsg := fmt.Sprintf("Using prebuilt tool configurations for: %s", sourcesList)
logger.InfoContext(ctx, logMsg)
for _, configName := range cmd.prebuiltConfigs {
buf, err := prebuiltconfigs.Get(configName)
if err != nil {
logger.ErrorContext(ctx, err.Error())
return err
}
// Parse into ToolsFile struct
parsed, err := parseToolsFile(ctx, buf)
if err != nil {
errMsg := fmt.Errorf("unable to parse prebuilt tool configuration for '%s': %w", configName, err)
logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
allToolsFiles = append(allToolsFiles, parsed)
}
}
// Determine if Custom Files should be loaded
// Check for explicit custom flags
isCustomConfigured := cmd.tools_file != "" || len(cmd.tools_files) > 0 || cmd.tools_folder != ""
// Determine if default 'tools.yaml' should be used (No prebuilt AND No custom flags)
useDefaultToolsFile := len(cmd.prebuiltConfigs) == 0 && !isCustomConfigured
if useDefaultToolsFile {
cmd.tools_file = "tools.yaml"
isCustomConfigured = true
}
// Load Custom Configurations
if isCustomConfigured {
// Enforce exclusivity among custom flags (tools-file vs tools-files vs tools-folder)
if (cmd.tools_file != "" && len(cmd.tools_files) > 0) ||
(cmd.tools_file != "" && cmd.tools_folder != "") ||
(len(cmd.tools_files) > 0 && cmd.tools_folder != "") {
errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously")
logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
var customTools ToolsFile
var err error
if len(cmd.tools_files) > 0 {
// Use tools-files
logger.InfoContext(ctx, fmt.Sprintf("Loading and merging %d tool configuration files", len(cmd.tools_files)))
customTools, err = loadAndMergeToolsFiles(ctx, cmd.tools_files)
} else if cmd.tools_folder != "" {
// Use tools-folder
logger.InfoContext(ctx, fmt.Sprintf("Loading and merging all YAML files from directory: %s", cmd.tools_folder))
customTools, err = loadAndMergeToolsFolder(ctx, cmd.tools_folder)
} else {
// Use single file (tools-file or default `tools.yaml`)
buf, readFileErr := os.ReadFile(cmd.tools_file)
if readFileErr != nil {
errMsg := fmt.Errorf("unable to read tool file at %q: %w", cmd.tools_file, readFileErr)
logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
customTools, err = parseToolsFile(ctx, buf)
if err != nil {
err = fmt.Errorf("unable to parse tool file at %q: %w", cmd.tools_file, err)
}
}
if err != nil {
logger.ErrorContext(ctx, err.Error())
return err
}
allToolsFiles = append(allToolsFiles, customTools)
}
// Modify version string based on loaded configurations
if len(cmd.prebuiltConfigs) > 0 {
tag := "prebuilt"
if isCustomConfigured {
tag = "custom"
}
// cmd.prebuiltConfigs is already sorted above
for _, configName := range cmd.prebuiltConfigs {
cmd.cfg.Version += fmt.Sprintf("+%s.%s", tag, configName)
}
}
// Merge Everything
// This will error if custom tools collide with prebuilt tools
finalToolsFile, err := mergeToolsFiles(allToolsFiles...)
if err != nil {
logger.ErrorContext(ctx, err.Error())
return err
}
cmd.cfg.SourceConfigs = finalToolsFile.Sources
cmd.cfg.AuthServiceConfigs = finalToolsFile.AuthServices
cmd.cfg.EmbeddingModelConfigs = finalToolsFile.EmbeddingModels
cmd.cfg.ToolConfigs = finalToolsFile.Tools
cmd.cfg.ToolsetConfigs = finalToolsFile.Toolsets
cmd.cfg.PromptConfigs = finalToolsFile.Prompts
return nil
}
func (cmd *Command) Setup(ctx context.Context) (context.Context, func(context.Context) error, error) {
// If stdio, set logger's out stream (usually DEBUG and INFO logs) to errStream
loggerOut := cmd.outStream
if cmd.cfg.Stdio {
loggerOut = cmd.errStream
}
// Handle logger separately from config
logger, err := log.NewLogger(cmd.cfg.LoggingFormat.String(), cmd.cfg.LogLevel.String(), loggerOut, cmd.errStream)
if err != nil {
return ctx, nil, fmt.Errorf("unable to initialize logger: %w", err)
}
cmd.logger = logger
ctx = util.WithLogger(ctx, cmd.logger)
// Set up OpenTelemetry
otelShutdown, err := telemetry.SetupOTel(ctx, cmd.cfg.Version, cmd.cfg.TelemetryOTLP, cmd.cfg.TelemetryGCP, cmd.cfg.TelemetryServiceName)
if err != nil {
errMsg := fmt.Errorf("error setting up OpenTelemetry: %w", err)
cmd.logger.ErrorContext(ctx, errMsg.Error())
return ctx, nil, errMsg
}
shutdownFunc := func(ctx context.Context) error {
err := otelShutdown(ctx)
if err != nil {
errMsg := fmt.Errorf("error shutting down OpenTelemetry: %w", err)
cmd.logger.ErrorContext(ctx, errMsg.Error())
return err
}
return nil
}
instrumentation, err := telemetry.CreateTelemetryInstrumentation(cmd.cfg.Version)
if err != nil {
errMsg := fmt.Errorf("unable to create telemetry instrumentation: %w", err)
cmd.logger.ErrorContext(ctx, errMsg.Error())
return ctx, shutdownFunc, errMsg
}
ctx = util.WithInstrumentation(ctx, instrumentation)
return ctx, shutdownFunc, nil
}
func run(cmd *Command) error {
ctx, cancel := context.WithCancel(cmd.Context())
defer cancel()
@@ -1133,18 +914,149 @@ func run(cmd *Command) error {
cancel()
}(ctx)
ctx, shutdown, err := cmd.Setup(ctx)
// If stdio, set logger's out stream (usually DEBUG and INFO logs) to errStream
loggerOut := cmd.outStream
if cmd.cfg.Stdio {
loggerOut = cmd.errStream
}
// Handle logger separately from config
switch strings.ToLower(cmd.cfg.LoggingFormat.String()) {
case "json":
logger, err := log.NewStructuredLogger(loggerOut, cmd.errStream, cmd.cfg.LogLevel.String())
if err != nil {
return fmt.Errorf("unable to initialize logger: %w", err)
}
cmd.logger = logger
case "standard":
logger, err := log.NewStdLogger(loggerOut, cmd.errStream, cmd.cfg.LogLevel.String())
if err != nil {
return fmt.Errorf("unable to initialize logger: %w", err)
}
cmd.logger = logger
default:
return fmt.Errorf("logging format invalid")
}
ctx = util.WithLogger(ctx, cmd.logger)
// Set up OpenTelemetry
otelShutdown, err := telemetry.SetupOTel(ctx, cmd.cfg.Version, cmd.cfg.TelemetryOTLP, cmd.cfg.TelemetryGCP, cmd.cfg.TelemetryServiceName)
if err != nil {
return err
errMsg := fmt.Errorf("error setting up OpenTelemetry: %w", err)
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
defer func() {
_ = shutdown(ctx)
err := otelShutdown(ctx)
if err != nil {
errMsg := fmt.Errorf("error shutting down OpenTelemetry: %w", err)
cmd.logger.ErrorContext(ctx, errMsg.Error())
}
}()
if err := cmd.LoadConfig(ctx); err != nil {
var allToolsFiles []ToolsFile
// Load Prebuilt Configuration
if cmd.prebuiltConfig != "" {
buf, err := prebuiltconfigs.Get(cmd.prebuiltConfig)
if err != nil {
cmd.logger.ErrorContext(ctx, err.Error())
return err
}
logMsg := fmt.Sprint("Using prebuilt tool configuration for ", cmd.prebuiltConfig)
cmd.logger.InfoContext(ctx, logMsg)
// Append prebuilt.source to Version string for the User Agent
cmd.cfg.Version += "+prebuilt." + cmd.prebuiltConfig
parsed, err := parseToolsFile(ctx, buf)
if err != nil {
errMsg := fmt.Errorf("unable to parse prebuilt tool configuration: %w", err)
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
allToolsFiles = append(allToolsFiles, parsed)
}
// Determine if Custom Files should be loaded
// Check for explicit custom flags
isCustomConfigured := cmd.tools_file != "" || len(cmd.tools_files) > 0 || cmd.tools_folder != ""
// Determine if default 'tools.yaml' should be used (No prebuilt AND No custom flags)
useDefaultToolsFile := cmd.prebuiltConfig == "" && !isCustomConfigured
if useDefaultToolsFile {
cmd.tools_file = "tools.yaml"
isCustomConfigured = true
}
// Load Custom Configurations
if isCustomConfigured {
// Enforce exclusivity among custom flags (tools-file vs tools-files vs tools-folder)
if (cmd.tools_file != "" && len(cmd.tools_files) > 0) ||
(cmd.tools_file != "" && cmd.tools_folder != "") ||
(len(cmd.tools_files) > 0 && cmd.tools_folder != "") {
errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously")
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
var customTools ToolsFile
var err error
if len(cmd.tools_files) > 0 {
// Use tools-files
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging %d tool configuration files", len(cmd.tools_files)))
customTools, err = loadAndMergeToolsFiles(ctx, cmd.tools_files)
} else if cmd.tools_folder != "" {
// Use tools-folder
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging all YAML files from directory: %s", cmd.tools_folder))
customTools, err = loadAndMergeToolsFolder(ctx, cmd.tools_folder)
} else {
// Use single file (tools-file or default `tools.yaml`)
buf, readFileErr := os.ReadFile(cmd.tools_file)
if readFileErr != nil {
errMsg := fmt.Errorf("unable to read tool file at %q: %w", cmd.tools_file, readFileErr)
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
customTools, err = parseToolsFile(ctx, buf)
if err != nil {
err = fmt.Errorf("unable to parse tool file at %q: %w", cmd.tools_file, err)
}
}
if err != nil {
cmd.logger.ErrorContext(ctx, err.Error())
return err
}
allToolsFiles = append(allToolsFiles, customTools)
}
// Merge Everything
// This will error if custom tools collide with prebuilt tools
finalToolsFile, err := mergeToolsFiles(allToolsFiles...)
if err != nil {
cmd.logger.ErrorContext(ctx, err.Error())
return err
}
cmd.cfg.SourceConfigs = finalToolsFile.Sources
cmd.cfg.AuthServiceConfigs = finalToolsFile.AuthServices
cmd.cfg.EmbeddingModelConfigs = finalToolsFile.EmbeddingModels
cmd.cfg.ToolConfigs = finalToolsFile.Tools
cmd.cfg.ToolsetConfigs = finalToolsFile.Toolsets
cmd.cfg.PromptConfigs = finalToolsFile.Prompts
instrumentation, err := telemetry.CreateTelemetryInstrumentation(versionString)
if err != nil {
errMsg := fmt.Errorf("unable to create telemetry instrumentation: %w", err)
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
ctx = util.WithInstrumentation(ctx, instrumentation)
// start server
s, err := server.NewServer(ctx, cmd.cfg)
if err != nil {
@@ -1184,9 +1096,6 @@ func run(cmd *Command) error {
}()
}
// Determine if Custom Files are configured (re-check as loadAndMergeConfig might have updated defaults)
isCustomConfigured := cmd.tools_file != "" || len(cmd.tools_files) > 0 || cmd.tools_folder != ""
if isCustomConfigured && !cmd.cfg.DisableReload {
watchDirs, watchedFiles := resolveWatcherInputs(cmd.tools_file, cmd.tools_files, cmd.tools_folder)
// start watching the file(s) or folder for changes to trigger dynamic reloading

View File

@@ -23,12 +23,14 @@ import (
"os"
"path"
"path/filepath"
"reflect"
"regexp"
"runtime"
"strings"
"testing"
"time"
yaml "github.com/goccy/go-yaml"
"github.com/google/go-cmp/cmp"
"github.com/googleapis/genai-toolbox/internal/auth/google"
@@ -70,9 +72,6 @@ func withDefaults(c server.ServerConfig) server.ServerConfig {
if c.AllowedHosts == nil {
c.AllowedHosts = []string{"*"}
}
if c.UserAgentMetadata == nil {
c.UserAgentMetadata = []string{}
}
return c
}
@@ -233,13 +232,6 @@ func TestServerConfigFlags(t *testing.T) {
AllowedHosts: []string{"http://foo.com", "http://bar.com"},
}),
},
{
desc: "user agent metadata",
args: []string{"--user-agent-metadata", "foo,bar"},
want: withDefaults(server.ServerConfig{
UserAgentMetadata: []string{"foo", "bar"},
}),
},
}
for _, tc := range tcs {
t.Run(tc.desc, func(t *testing.T) {
@@ -430,27 +422,17 @@ func TestPrebuiltFlag(t *testing.T) {
tcs := []struct {
desc string
args []string
want []string
want string
}{
{
desc: "default value",
args: []string{},
want: []string{},
want: "",
},
{
desc: "single prebuilt flag",
args: []string{"--prebuilt", "alloydb"},
want: []string{"alloydb"},
},
{
desc: "multiple prebuilt flags",
args: []string{"--prebuilt", "alloydb", "--prebuilt", "bigquery"},
want: []string{"alloydb", "bigquery"},
},
{
desc: "comma separated prebuilt flags",
args: []string{"--prebuilt", "alloydb,bigquery"},
want: []string{"alloydb", "bigquery"},
desc: "custom pre built flag",
args: []string{"--tools-file", "alloydb"},
want: "alloydb",
},
}
for _, tc := range tcs {
@@ -459,8 +441,8 @@ func TestPrebuiltFlag(t *testing.T) {
if err != nil {
t.Fatalf("unexpected error invoking command: %s", err)
}
if diff := cmp.Diff(c.prebuiltConfigs, tc.want); diff != "" {
t.Fatalf("got %v, want %v, diff %s", c.prebuiltConfigs, tc.want, diff)
if c.tools_file != tc.want {
t.Fatalf("got %v, want %v", c.cfg, tc.want)
}
})
}
@@ -515,6 +497,18 @@ func TestDefaultLogLevel(t *testing.T) {
}
func TestConvertToolsFile(t *testing.T) {
ctx, cancelCtx := context.WithTimeout(context.Background(), time.Minute)
defer cancelCtx()
pr, pw := io.Pipe()
defer pw.Close()
defer pr.Close()
logger, err := log.NewStdLogger(pw, pw, "DEBUG")
if err != nil {
t.Fatalf("failed to setup logger %s", err)
}
ctx = util.WithLogger(ctx, logger)
tcs := []struct {
desc string
in string
@@ -543,7 +537,8 @@ func TestConvertToolsFile(t *testing.T) {
kind: postgres-sql
source: my-pg-instance
description: some description
statement: SELECT * FROM SQL_STATEMENT;
statement: |
SELECT * FROM SQL_STATEMENT;
parameters:
- name: country
type: string
@@ -565,7 +560,8 @@ func TestConvertToolsFile(t *testing.T) {
model: gemini-embedding-001
apiKey: some-key
dimension: 768`,
want: `kind: sources
want: `
kind: sources
name: my-pg-instance
type: cloud-sql-postgres
project: my-project
@@ -585,7 +581,8 @@ name: example_tool
type: postgres-sql
source: my-pg-instance
description: some description
statement: SELECT * FROM SQL_STATEMENT;
statement: |
SELECT * FROM SQL_STATEMENT;
parameters:
- name: country
type: string
@@ -610,18 +607,18 @@ name: gemini-model
type: gemini
model: gemini-embedding-001
apiKey: some-key
dimension: 768
`,
dimension: 768`,
},
{
desc: "preserve resource order",
desc: "preserve resource order with grouping",
in: `
tools:
example_tool:
kind: postgres-sql
source: my-pg-instance
description: some description
statement: SELECT * FROM SQL_STATEMENT;
statement: |
SELECT * FROM SQL_STATEMENT;
parameters:
- name: country
type: string
@@ -643,136 +640,23 @@ dimension: 768
example_toolset:
- example_tool
authSources:
my-google-auth2:
kind: google
clientId: testing-id`,
want: `kind: tools
name: example_tool
type: postgres-sql
source: my-pg-instance
description: some description
statement: SELECT * FROM SQL_STATEMENT;
parameters:
- name: country
type: string
description: some description
---
kind: sources
name: my-pg-instance
type: cloud-sql-postgres
project: my-project
region: my-region
instance: my-instance
database: my_db
user: my_user
password: my_pass
---
kind: authServices
name: my-google-auth
type: google
clientId: testing-id
---
kind: toolsets
name: example_toolset
tools:
- example_tool
---
kind: authServices
name: my-google-auth2
type: google
clientId: testing-id
`,
},
{
desc: "convert combination of v1 and v2",
in: `
sources:
my-pg-instance:
kind: cloud-sql-postgres
project: my-project
region: my-region
instance: my-instance
database: my_db
user: my_user
password: my_pass
authServices:
my-google-auth:
kind: google
clientId: testing-id
tools:
example_tool:
kind: postgres-sql
source: my-pg-instance
description: some description
statement: SELECT * FROM SQL_STATEMENT;
parameters:
- name: country
type: string
description: some description
toolsets:
example_toolset:
- example_tool
prompts:
code_review:
description: ask llm to analyze code quality
messages:
- content: "please review the following code for quality: {{.code}}"
arguments:
- name: code
description: the code to review
embeddingModels:
gemini-model:
kind: gemini
model: gemini-embedding-001
apiKey: some-key
dimension: 768
clientId: testing-id`,
want: `
kind: tools
name: example_tool
type: postgres-sql
source: my-pg-instance
description: some description
statement: |
SELECT * FROM SQL_STATEMENT;
parameters:
- name: country
type: string
description: some description
---
kind: sources
name: my-pg-instance2
type: cloud-sql-postgres
project: my-project
region: my-region
instance: my-instance
---
kind: authServices
name: my-google-auth2
type: google
clientId: testing-id
---
kind: tools
name: example_tool2
type: postgres-sql
source: my-pg-instance
description: some description
statement: SELECT * FROM SQL_STATEMENT;
parameters:
- name: country
type: string
description: some description
---
kind: toolsets
name: example_toolset2
tools:
- example_tool
---
tools:
- example_tool
kind: toolsets
name: example_toolset3
---
kind: prompts
name: code_review2
description: ask llm to analyze code quality
messages:
- content: "please review the following code for quality: {{.code}}"
arguments:
- name: code
description: the code to review
---
kind: embeddingModels
name: gemini-model2
type: gemini`,
want: `kind: sources
kind: sources
name: my-pg-instance
type: cloud-sql-postgres
project: my-project
@@ -787,88 +671,20 @@ name: my-google-auth
type: google
clientId: testing-id
---
kind: tools
name: example_tool
type: postgres-sql
source: my-pg-instance
description: some description
statement: SELECT * FROM SQL_STATEMENT;
parameters:
- name: country
type: string
description: some description
kind: authServices
name: my-google-auth
type: google
clientId: testing-id
---
kind: toolsets
name: example_toolset
tools:
- example_tool
---
kind: prompts
name: code_review
description: ask llm to analyze code quality
messages:
- content: "please review the following code for quality: {{.code}}"
arguments:
- name: code
description: the code to review
---
kind: embeddingModels
name: gemini-model
type: gemini
model: gemini-embedding-001
apiKey: some-key
dimension: 768
---
kind: sources
name: my-pg-instance2
type: cloud-sql-postgres
project: my-project
region: my-region
instance: my-instance
---
kind: authServices
name: my-google-auth2
type: google
clientId: testing-id
---
kind: tools
name: example_tool2
type: postgres-sql
source: my-pg-instance
description: some description
statement: SELECT * FROM SQL_STATEMENT;
parameters:
- name: country
type: string
description: some description
---
kind: toolsets
name: example_toolset2
tools:
- example_tool
---
tools:
- example_tool
kind: toolsets
name: example_toolset3
---
kind: prompts
name: code_review2
description: ask llm to analyze code quality
messages:
- content: "please review the following code for quality: {{.code}}"
arguments:
- name: code
description: the code to review
---
kind: embeddingModels
name: gemini-model2
type: gemini
`,
- example_tool`,
},
{
desc: "no convertion needed",
in: `kind: sources
in: `
kind: sources
name: my-pg-instance
type: cloud-sql-postgres
project: my-project
@@ -883,7 +699,8 @@ name: example_tool
type: postgres-sql
source: my-pg-instance
description: some description
statement: SELECT * FROM SQL_STATEMENT;
statement: |
SELECT * FROM SQL_STATEMENT;
parameters:
- name: country
type: string
@@ -893,7 +710,8 @@ kind: toolsets
name: example_toolset
tools:
- example_tool`,
want: `kind: sources
want: `
kind: sources
name: my-pg-instance
type: cloud-sql-postgres
project: my-project
@@ -908,7 +726,8 @@ name: example_tool
type: postgres-sql
source: my-pg-instance
description: some description
statement: SELECT * FROM SQL_STATEMENT;
statement: |
SELECT * FROM SQL_STATEMENT;
parameters:
- name: country
type: string
@@ -917,34 +736,69 @@ parameters:
kind: toolsets
name: example_toolset
tools:
- example_tool
`,
- example_tool`,
},
{
desc: "invalid source",
in: `sources: invalid`,
want: "",
desc: "invalid source",
in: `sources: invalid`,
isErr: true,
errStr: "'sources' is not a map",
},
{
desc: "invalid toolset",
in: `toolsets: invalid`,
want: "",
desc: "invalid toolset",
in: `toolsets: invalid`,
isErr: true,
errStr: "'toolsets' is not a map",
},
}
for _, tc := range tcs {
t.Run(tc.desc, func(t *testing.T) {
output, err := convertToolsFile([]byte(tc.in))
output, err := convertToolsFile(ctx, []byte(tc.in))
if tc.isErr {
if err == nil {
t.Fatalf("missing error: %s", tc.errStr)
}
if err.Error() != tc.errStr {
t.Fatalf("invalid error string: got %s, want %s", err, tc.errStr)
}
return
}
if err != nil {
t.Fatalf("unexpected error: %s", err)
}
if diff := cmp.Diff(string(output), tc.want); diff != "" {
t.Fatalf("incorrect toolsets parse: diff %v", diff)
var docs1, docs2 []yaml.MapSlice
if docs1, err = decodeToMapSlice(string(output)); err != nil {
t.Fatalf("error decoding output: %s", err)
}
if docs2, err = decodeToMapSlice(tc.want); err != nil {
t.Fatalf("Error decoding want: %s", err)
}
if !reflect.DeepEqual(docs1, docs2) {
t.Fatalf("incorrect output: got %s, want %s", string(output), tc.want)
}
})
}
}
func decodeToMapSlice(data string) ([]yaml.MapSlice, error) {
// ensures that the order is correct
var docs []yaml.MapSlice
decoder := yaml.NewDecoder(strings.NewReader(data))
for {
var doc yaml.MapSlice
err := decoder.Decode(&doc)
if err == io.EOF {
break
}
if err != nil {
return nil, err
}
docs = append(docs, doc)
}
return docs, nil
}
func TestParseToolFile(t *testing.T) {
ctx, err := testutils.ContextWithNewLogger()
if err != nil {
@@ -1015,8 +869,7 @@ func TestParseToolFile(t *testing.T) {
ToolNames: []string{"example_tool"},
},
},
AuthServices: nil,
Prompts: nil,
Prompts: nil,
},
},
{
@@ -1120,7 +973,7 @@ func TestParseToolFile(t *testing.T) {
},
},
Prompts: server.PromptConfigs{
"code_review": &custom.Config{
"code_review": custom.Config{
Name: "code_review",
Description: "ask llm to analyze code quality",
Arguments: prompts.Arguments{
@@ -1138,12 +991,12 @@ func TestParseToolFile(t *testing.T) {
in: `
kind: prompts
name: my-prompt
description: A prompt template for data analysis.
arguments:
- name: country
description: The country to analyze.
messages:
- content: Analyze the data for {{.country}}.
description: A prompt template for data analysis.
arguments:
- name: country
description: The country to analyze.
messages:
- content: Analyze the data for {{.country}}.
`,
wantToolsFile: ToolsFile{
Sources: nil,
@@ -1213,17 +1066,17 @@ func TestParseToolFileWithAuth(t *testing.T) {
database: my_db
user: my_user
password: my_pass
---
---
kind: authServices
name: my-google-service
type: google
clientId: my-client-id
---
---
kind: authServices
name: other-google-service
type: google
clientId: other-client-id
---
---
kind: tools
name: example_tool
type: postgres-sql
@@ -1249,7 +1102,7 @@ func TestParseToolFileWithAuth(t *testing.T) {
field: email
- name: other-google-service
field: other_email
---
---
kind: toolsets
name: example_toolset
tools:
@@ -1417,17 +1270,17 @@ func TestParseToolFileWithAuth(t *testing.T) {
database: my_db
user: my_user
password: my_pass
---
---
kind: authServices
name: my-google-service
type: google
clientId: my-client-id
---
---
kind: authServices
name: other-google-service
type: google
clientId: other-client-id
---
---
kind: tools
name: example_tool
type: postgres-sql
@@ -1455,7 +1308,7 @@ func TestParseToolFileWithAuth(t *testing.T) {
field: email
- name: other-google-service
field: other_email
---
---
kind: toolsets
name: example_toolset
tools:
@@ -1624,7 +1477,7 @@ func TestEnvVarReplacement(t *testing.T) {
${toolset_name}:
- example_tool
prompts:
${prompt_name}:
description: A test prompt for {{.name}}.
@@ -1714,17 +1567,17 @@ func TestEnvVarReplacement(t *testing.T) {
Authorization: ${TestHeader}
queryParams:
api-key: ${API_KEY}
---
---
kind: authServices
name: my-google-service
type: google
clientId: ${clientId}
---
---
kind: authServices
name: other-google-service
type: google
clientId: ${clientId2}
---
---
kind: tools
name: example_tool
type: http
@@ -1765,12 +1618,12 @@ func TestEnvVarReplacement(t *testing.T) {
- name: Language
type: string
description: language string
---
---
kind: toolsets
name: ${toolset_name}
tools:
- example_tool
---
---
kind: prompts
name: ${prompt_name}
description: A test prompt for {{.name}}.
@@ -2054,7 +1907,6 @@ func TestSingleEdit(t *testing.T) {
func TestPrebuiltTools(t *testing.T) {
// Get prebuilt configs
alloydb_omni_config, _ := prebuiltconfigs.Get("alloydb-omni")
alloydb_admin_config, _ := prebuiltconfigs.Get("alloydb-postgres-admin")
alloydb_config, _ := prebuiltconfigs.Get("alloydb-postgres")
bigquery_config, _ := prebuiltconfigs.Get("bigquery")
@@ -2105,12 +1957,6 @@ func TestPrebuiltTools(t *testing.T) {
t.Setenv("ALLOYDB_POSTGRES_USER", "your_alloydb_user")
t.Setenv("ALLOYDB_POSTGRES_PASSWORD", "your_alloydb_password")
t.Setenv("ALLOYDB_OMNI_HOST", "localhost")
t.Setenv("ALLOYDB_OMNI_PORT", "5432")
t.Setenv("ALLOYDB_OMNI_DATABASE", "your_alloydb_db")
t.Setenv("ALLOYDB_OMNI_USER", "your_alloydb_user")
t.Setenv("ALLOYDB_OMNI_PASSWORD", "your_alloydb_password")
t.Setenv("CLICKHOUSE_PROTOCOL", "your_clickhouse_protocol")
t.Setenv("CLICKHOUSE_DATABASE", "your_clickhouse_database")
t.Setenv("CLICKHOUSE_PASSWORD", "your_clickhouse_password")
@@ -2204,16 +2050,6 @@ func TestPrebuiltTools(t *testing.T) {
in []byte
wantToolset server.ToolsetConfigs
}{
{
name: "alloydb omni prebuilt tools",
in: alloydb_omni_config,
wantToolset: server.ToolsetConfigs{
"alloydb_omni_database_tools": tools.ToolsetConfig{
Name: "alloydb_omni_database_tools",
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_columnar_configurations", "list_columnar_recommended_columns", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats", "list_stored_procedure"},
},
},
},
{
name: "alloydb postgres admin prebuilt tools",
in: alloydb_admin_config,
@@ -2230,7 +2066,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"cloud_sql_postgres_admin_tools": tools.ToolsetConfig{
Name: "cloud_sql_postgres_admin_tools",
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck", "clone_instance", "create_backup", "restore_backup"},
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck", "clone_instance", "create_backup"},
},
},
},
@@ -2240,7 +2076,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"cloud_sql_mysql_admin_tools": tools.ToolsetConfig{
Name: "cloud_sql_mysql_admin_tools",
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup", "restore_backup"},
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup"},
},
},
},
@@ -2250,7 +2086,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"cloud_sql_mssql_admin_tools": tools.ToolsetConfig{
Name: "cloud_sql_mssql_admin_tools",
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup", "restore_backup"},
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup"},
},
},
},
@@ -2370,7 +2206,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"looker_tools": tools.ToolsetConfig{
Name: "looker_tools",
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look", "make_look", "get_dashboards", "run_dashboard", "make_dashboard", "add_dashboard_element", "add_dashboard_filter", "generate_embed_url", "health_pulse", "health_analyze", "health_vacuum", "dev_mode", "get_projects", "get_project_files", "get_project_file", "create_project_file", "update_project_file", "delete_project_file", "validate_project", "get_connections", "get_connection_schemas", "get_connection_databases", "get_connection_tables", "get_connection_table_columns"},
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look", "make_look", "get_dashboards", "run_dashboard", "make_dashboard", "add_dashboard_element", "add_dashboard_filter", "generate_embed_url", "health_pulse", "health_analyze", "health_vacuum", "dev_mode", "get_projects", "get_project_files", "get_project_file", "create_project_file", "update_project_file", "delete_project_file", "get_connections", "get_connection_schemas", "get_connection_databases", "get_connection_tables", "get_connection_table_columns"},
},
},
},
@@ -2765,7 +2601,6 @@ description: "Dummy"
---
kind: toolsets
name: sqlite_database_tools
tools:
- dummy_tool
`
toolsetConflictFile := filepath.Join(t.TempDir(), "toolset_conflict.yaml")
@@ -2806,12 +2641,6 @@ authSources:
return nil
},
},
{
desc: "sqlite called twice error",
args: []string{"--prebuilt", "sqlite", "--prebuilt", "sqlite"},
wantErr: true,
errString: "resource conflicts detected",
},
{
desc: "tool conflict error",
args: []string{"--prebuilt", "sqlite", "--tools-file", toolConflictFile},
@@ -2920,115 +2749,3 @@ func TestDefaultToolsFileBehavior(t *testing.T) {
})
}
}
func TestParameterReferenceValidation(t *testing.T) {
ctx, err := testutils.ContextWithNewLogger()
if err != nil {
t.Fatalf("unexpected error: %s", err)
}
// Base template
baseYaml := `
sources:
dummy-source:
kind: http
baseUrl: http://example.com
tools:
test-tool:
kind: postgres-sql
source: dummy-source
description: test tool
statement: SELECT 1;
parameters:
%s`
tcs := []struct {
desc string
params string
wantErr bool
errSubstr string
}{
{
desc: "valid backward reference",
params: `
- name: source_param
type: string
description: source
- name: copy_param
type: string
description: copy
valueFromParam: source_param`,
wantErr: false,
},
{
desc: "valid forward reference (out of order)",
params: `
- name: copy_param
type: string
description: copy
valueFromParam: source_param
- name: source_param
type: string
description: source`,
wantErr: false,
},
{
desc: "invalid missing reference",
params: `
- name: copy_param
type: string
description: copy
valueFromParam: non_existent_param`,
wantErr: true,
errSubstr: "references '\"non_existent_param\"' in the 'valueFromParam' field",
},
{
desc: "invalid self reference",
params: `
- name: myself
type: string
description: self
valueFromParam: myself`,
wantErr: true,
errSubstr: "parameter \"myself\" cannot copy value from itself",
},
{
desc: "multiple valid references",
params: `
- name: a
type: string
description: a
- name: b
type: string
description: b
valueFromParam: a
- name: c
type: string
description: c
valueFromParam: a`,
wantErr: false,
},
}
for _, tc := range tcs {
t.Run(tc.desc, func(t *testing.T) {
// Indent parameters to match YAML structure
yamlContent := fmt.Sprintf(baseYaml, tc.params)
_, err := parseToolsFile(ctx, []byte(yamlContent))
if tc.wantErr {
if err == nil {
t.Fatal("expected error, got nil")
}
if !strings.Contains(err.Error(), tc.errSubstr) {
t.Errorf("error %q does not contain expected substring %q", err.Error(), tc.errSubstr)
}
} else {
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
}
})
}
}

View File

@@ -1,179 +0,0 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cmd
import (
"context"
"os"
"path/filepath"
"strings"
"testing"
"time"
)
func TestGenerateSkill(t *testing.T) {
// Create a temporary directory for tests
tmpDir := t.TempDir()
outputDir := filepath.Join(tmpDir, "skills")
// Create a tools.yaml file with a sqlite tool
toolsFileContent := `
sources:
my-sqlite:
kind: sqlite
database: test.db
tools:
hello-sqlite:
kind: sqlite-sql
source: my-sqlite
description: "hello tool"
statement: "SELECT 'hello' as greeting"
`
toolsFilePath := filepath.Join(tmpDir, "tools.yaml")
if err := os.WriteFile(toolsFilePath, []byte(toolsFileContent), 0644); err != nil {
t.Fatalf("failed to write tools file: %v", err)
}
args := []string{
"skills-generate",
"--tools-file", toolsFilePath,
"--output-dir", outputDir,
"--name", "hello-sqlite",
"--description", "hello tool",
}
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
_, got, err := invokeCommandWithContext(ctx, args)
if err != nil {
t.Fatalf("command failed: %v\nOutput: %s", err, got)
}
// Verify generated directory structure
skillPath := filepath.Join(outputDir, "hello-sqlite")
if _, err := os.Stat(skillPath); os.IsNotExist(err) {
t.Fatalf("skill directory not created: %s", skillPath)
}
// Check SKILL.md
skillMarkdown := filepath.Join(skillPath, "SKILL.md")
content, err := os.ReadFile(skillMarkdown)
if err != nil {
t.Fatalf("failed to read SKILL.md: %v", err)
}
expectedFrontmatter := `---
name: hello-sqlite
description: hello tool
---`
if !strings.HasPrefix(string(content), expectedFrontmatter) {
t.Errorf("SKILL.md does not have expected frontmatter format.\nExpected prefix:\n%s\nGot:\n%s", expectedFrontmatter, string(content))
}
if !strings.Contains(string(content), "## Usage") {
t.Errorf("SKILL.md does not contain '## Usage' section")
}
if !strings.Contains(string(content), "## Scripts") {
t.Errorf("SKILL.md does not contain '## Scripts' section")
}
if !strings.Contains(string(content), "### hello-sqlite") {
t.Errorf("SKILL.md does not contain '### hello-sqlite' tool header")
}
// Check script file
scriptFilename := "hello-sqlite.js"
scriptPath := filepath.Join(skillPath, "scripts", scriptFilename)
if _, err := os.Stat(scriptPath); os.IsNotExist(err) {
t.Fatalf("script file not created: %s", scriptPath)
}
scriptContent, err := os.ReadFile(scriptPath)
if err != nil {
t.Fatalf("failed to read script file: %v", err)
}
if !strings.Contains(string(scriptContent), "hello-sqlite") {
t.Errorf("script file does not contain expected tool name")
}
// Check assets
assetPath := filepath.Join(skillPath, "assets", "hello-sqlite.yaml")
if _, err := os.Stat(assetPath); os.IsNotExist(err) {
t.Fatalf("asset file not created: %s", assetPath)
}
assetContent, err := os.ReadFile(assetPath)
if err != nil {
t.Fatalf("failed to read asset file: %v", err)
}
if !strings.Contains(string(assetContent), "hello-sqlite") {
t.Errorf("asset file does not contain expected tool name")
}
}
func TestGenerateSkill_NoConfig(t *testing.T) {
tmpDir := t.TempDir()
outputDir := filepath.Join(tmpDir, "skills")
args := []string{
"skills-generate",
"--output-dir", outputDir,
"--name", "test",
"--description", "test",
}
_, _, err := invokeCommandWithContext(context.Background(), args)
if err == nil {
t.Fatal("expected command to fail when no configuration is provided and tools.yaml is missing")
}
// Should not have created the directory if no config was processed
if _, err := os.Stat(outputDir); !os.IsNotExist(err) {
t.Errorf("output directory should not have been created")
}
}
func TestGenerateSkill_MissingArguments(t *testing.T) {
tmpDir := t.TempDir()
toolsFilePath := filepath.Join(tmpDir, "tools.yaml")
if err := os.WriteFile(toolsFilePath, []byte("tools: {}"), 0644); err != nil {
t.Fatalf("failed to write tools file: %v", err)
}
tests := []struct {
name string
args []string
}{
{
name: "missing name",
args: []string{"skills-generate", "--tools-file", toolsFilePath, "--description", "test"},
},
{
name: "missing description",
args: []string{"skills-generate", "--tools-file", toolsFilePath, "--name", "test"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
_, got, err := invokeCommandWithContext(context.Background(), tt.args)
if err == nil {
t.Fatalf("expected command to fail due to missing arguments, but it succeeded\nOutput: %s", got)
}
})
}
}

0
cmd/test.db Normal file
View File

View File

@@ -1 +1 @@
0.26.0
0.25.0

View File

@@ -23,14 +23,6 @@ To connect to the database to explore and query data, search the MCP store for t
In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt alloydb-postgres-admin```.
You'll now be able to see all enabled tools in the "Tools" tab.
> [!NOTE]

View File

@@ -27,13 +27,6 @@ For AlloyDB infrastructure management, search the MCP store for the AlloyDB for
## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt alloydb-postgres```.
2. Add the required inputs for your [cluster](https://docs.cloud.google.com/alloydb/docs/cluster-list) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.

View File

@@ -21,13 +21,6 @@ An editor configured to use the BigQuery MCP server can use its AI capabilities
## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt bigquery```.
2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.

View File

@@ -23,14 +23,6 @@ To connect to the database to explore and query data, search the MCP store for t
In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mssql-admin```.
You'll now be able to see all enabled tools in the "Tools" tab.
> [!NOTE]

View File

@@ -24,13 +24,6 @@ For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL
## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mssql```.
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/sqlserver/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.

View File

@@ -23,14 +23,6 @@ To connect to the database to explore and query data, search the MCP store for t
In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mysql-admin```.
You'll now be able to see all enabled tools in the "Tools" tab.
> [!NOTE]

View File

@@ -26,13 +26,6 @@ For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL
## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mysql```.
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/mysql/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.

View File

@@ -23,14 +23,6 @@ To connect to the database to explore and query data, search the MCP store for t
In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-postgres-admin```.
You'll now be able to see all enabled tools in the "Tools" tab.
> [!NOTE]

View File

@@ -26,13 +26,6 @@ For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL
## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-postgres```.
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/postgres/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.

View File

@@ -20,13 +20,6 @@ An editor configured to use the Dataplex MCP server can use its AI capabilities
## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt dataplex```.
2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.

View File

@@ -21,13 +21,6 @@ An editor configured to use the Looker MCP server can use its AI capabilities to
## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt looker```.
2. Add the required inputs for your [instance](https://docs.cloud.google.com/looker/docs/set-up-and-administer-looker) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.

View File

@@ -21,13 +21,6 @@ An editor configured to use the Cloud Spanner MCP server can use its AI capabili
## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt spanner```.
2. Add the required inputs for your [instance](https://docs.cloud.google.com/spanner/docs/instances) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.

View File

@@ -12,17 +12,10 @@ The MCP Toolbox for Databases Server gives AI-powered development tools the abil
## Install & Configuration
1. In the Antigravity MCP Store, click the **Install** button. A configuration window will appear.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest```.
3. Create your [`tools.yaml` configuration file](https://googleapis.github.io/genai-toolbox/getting-started/configure/).
2. Create your [`tools.yaml` configuration file](https://googleapis.github.io/genai-toolbox/getting-started/configure/).
4. In the configuration window, enter the full absolute path to your `tools.yaml` file and click **Save**.
3. In the configuration window, enter the full absolute path to your `tools.yaml` file and click **Save**.
> [!NOTE]
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.

View File

@@ -234,7 +234,7 @@
},
"outputs": [],
"source": [
"version = \"0.26.0\" # x-release-please-version\n",
"version = \"0.25.0\" # x-release-please-version\n",
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
"\n",
"# Make the binary executable\n",
@@ -520,7 +520,8 @@
},
"outputs": [],
"source": [
"! pip install google-adk[toolbox] --quiet"
"! pip install toolbox-core --quiet\n",
"! pip install google-adk --quiet"
]
},
{
@@ -535,18 +536,14 @@
"from google.adk.runners import Runner\n",
"from google.adk.sessions import InMemorySessionService\n",
"from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService\n",
"from google.adk.tools.toolbox_toolset import ToolboxToolset\n",
"from google.genai import types\n",
"from toolbox_core import ToolboxSyncClient\n",
"\n",
"import os\n",
"# TODO(developer): replace this with your Google API key\n",
"os.environ['GOOGLE_API_KEY'] = \"<GOOGLE_API_KEY>\"\n",
"\n",
"# Configure toolset\n",
"toolset = ToolboxToolset(\n",
" server_url=\"http://127.0.0.1:5000\",\n",
" toolset_name=\"my-toolset\"\n",
")\n",
"toolbox_client = ToolboxSyncClient(\"http://127.0.0.1:5000\")\n",
"\n",
"prompt = \"\"\"\n",
" You're a helpful hotel assistant. You handle hotel searching, booking and\n",
@@ -563,7 +560,7 @@
" name='hotel_agent',\n",
" description='A helpful AI assistant.',\n",
" instruction=prompt,\n",
" tools=[toolset],\n",
" tools=toolbox_client.load_toolset(\"my-toolset\"),\n",
")\n",
"\n",
"session_service = InMemorySessionService()\n",

View File

@@ -16,12 +16,6 @@ Databases” as its initial development predated MCP, but was renamed to align
with recently added MCP compatibility.
{{< /notice >}}
{{< notice note >}}
This document has been updated to support the configuration file v2 format. To
view documentation with configuration file v1 format, please navigate to the
top-right menu and select versions v0.26.0 or older.
{{< /notice >}}
## Why Toolbox?
Toolbox helps you build Gen AI tools that let your agents access data in your
@@ -77,7 +71,7 @@ redeploying your application.
## Getting Started
### Quickstart: Running Toolbox using NPX
### (Non-production) Running Toolbox
You can run Toolbox directly with a [configuration file](../configure.md):
@@ -109,7 +103,7 @@ To install Toolbox as a binary on Linux (AMD64):
```sh
# see releases page for other versions
export VERSION=0.26.0
export VERSION=0.25.0
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
chmod +x toolbox
```
@@ -120,7 +114,7 @@ To install Toolbox as a binary on macOS (Apple Silicon):
```sh
# see releases page for other versions
export VERSION=0.26.0
export VERSION=0.25.0
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
chmod +x toolbox
```
@@ -131,7 +125,7 @@ To install Toolbox as a binary on macOS (Intel):
```sh
# see releases page for other versions
export VERSION=0.26.0
export VERSION=0.25.0
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
chmod +x toolbox
```
@@ -142,7 +136,7 @@ To install Toolbox as a binary on Windows (Command Prompt):
```cmd
:: see releases page for other versions
set VERSION=0.26.0
set VERSION=0.25.0
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
```
@@ -152,7 +146,7 @@ To install Toolbox as a binary on Windows (PowerShell):
```powershell
# see releases page for other versions
$VERSION = "0.26.0"
$VERSION = "0.25.0"
curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
```
@@ -164,7 +158,7 @@ You can also install Toolbox as a container:
```sh
# see releases page for other versions
export VERSION=0.26.0
export VERSION=0.25.0
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
```
@@ -183,7 +177,7 @@ To install from source, ensure you have the latest version of
[Go installed](https://go.dev/doc/install), and then run the following command:
```sh
go install github.com/googleapis/genai-toolbox@v0.26.0
go install github.com/googleapis/genai-toolbox@v0.25.0
```
{{% /tab %}}

View File

@@ -52,7 +52,7 @@ runtime](https://research.google.com/colaboratory/local-runtimes.html).
{{< tabpane persist=header >}}
{{< tab header="ADK" lang="bash" >}}
pip install google-adk[toolbox]
pip install toolbox-core
{{< /tab >}}
{{< tab header="Langchain" lang="bash" >}}
@@ -73,7 +73,7 @@ pip install toolbox-core
{{< tabpane persist=header >}}
{{< tab header="ADK" lang="bash" >}}
# No other dependencies required for ADK
pip install google-adk
{{< /tab >}}
{{< tab header="Langchain" lang="bash" >}}
@@ -115,7 +115,7 @@ pip install google-genai
1. Update `my_agent/agent.py` with the following content to connect to Toolbox:
```py
{{< regionInclude "quickstart/python/adk/quickstart.py" "quickstart" >}}
{{< include "quickstart/python/adk/quickstart.py" >}}
```
<br/>

View File

@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
```
<!-- {x-release-please-end} -->

View File

@@ -18,7 +18,6 @@
"resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-5.0.2.tgz",
"integrity": "sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg==",
"license": "Apache-2.0",
"peer": true,
"dependencies": {
"arrify": "^2.0.0",
"extend": "^3.0.2"
@@ -32,7 +31,6 @@
"resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz",
"integrity": "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==",
"license": "Apache-2.0",
"peer": true,
"engines": {
"node": ">=14.0.0"
}
@@ -42,17 +40,15 @@
"resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.0.0.tgz",
"integrity": "sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g==",
"license": "Apache-2.0",
"peer": true,
"engines": {
"node": ">=14"
}
},
"node_modules/@google-cloud/storage": {
"version": "7.19.0",
"resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.19.0.tgz",
"integrity": "sha512-n2FjE7NAOYyshogdc7KQOl/VZb4sneqPjWouSyia9CMDdMhRX5+RIbqalNmC7LOLzuLAN89VlF2HvG8na9G+zQ==",
"version": "7.18.0",
"resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.18.0.tgz",
"integrity": "sha512-r3ZwDMiz4nwW6R922Z1pwpePxyRwE5GdevYX63hRmAQUkUQJcBH/79EnQPDv5cOv1mFBgevdNWQfi3tie3dHrQ==",
"license": "Apache-2.0",
"peer": true,
"dependencies": {
"@google-cloud/paginator": "^5.0.0",
"@google-cloud/projectify": "^4.0.0",
@@ -60,7 +56,7 @@
"abort-controller": "^3.0.0",
"async-retry": "^1.3.3",
"duplexify": "^4.1.3",
"fast-xml-parser": "^5.3.4",
"fast-xml-parser": "^4.4.1",
"gaxios": "^6.0.2",
"google-auth-library": "^9.6.3",
"html-entities": "^2.5.2",
@@ -79,7 +75,6 @@
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
"license": "MIT",
"peer": true,
"bin": {
"uuid": "dist/bin/uuid"
}
@@ -102,6 +97,7 @@
"resolved": "https://registry.npmjs.org/@google/genai/-/genai-1.14.0.tgz",
"integrity": "sha512-jirYprAAJU1svjwSDVCzyVq+FrJpJd5CSxR/g2Ga/gZ0ZYZpcWjMS75KJl9y71K1mDN+tcx6s21CzCbB2R840g==",
"license": "Apache-2.0",
"peer": true,
"dependencies": {
"google-auth-library": "^9.14.2",
"ws": "^8.18.0"
@@ -140,6 +136,7 @@
"resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.17.5.tgz",
"integrity": "sha512-QakrKIGniGuRVfWBdMsDea/dx1PNE739QJ7gCM41s9q+qaCYTHCdsIBXQVVXry3mfWAiaM9kT22Hyz53Uw8mfg==",
"license": "MIT",
"peer": true,
"dependencies": {
"ajv": "^6.12.6",
"content-type": "^1.0.5",
@@ -302,7 +299,6 @@
"resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz",
"integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">= 10"
}
@@ -311,15 +307,13 @@
"version": "0.12.5",
"resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz",
"integrity": "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==",
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/@types/node": {
"version": "24.10.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.1.tgz",
"integrity": "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"undici-types": "~7.16.0"
}
@@ -329,7 +323,6 @@
"resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.13.tgz",
"integrity": "sha512-FGJ6udDNUCjd19pp0Q3iTiDkwhYup7J8hpMW9c4k53NrccQFFWKRho6hvtPPEhnXWKvukfwAlB6DbDz4yhH5Gg==",
"license": "MIT",
"peer": true,
"dependencies": {
"@types/caseless": "*",
"@types/node": "*",
@@ -342,7 +335,6 @@
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.5.tgz",
"integrity": "sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A==",
"license": "MIT",
"peer": true,
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
@@ -360,7 +352,6 @@
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">= 0.6"
}
@@ -370,7 +361,6 @@
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"license": "MIT",
"peer": true,
"dependencies": {
"mime-db": "1.52.0"
},
@@ -382,15 +372,13 @@
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz",
"integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==",
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/abort-controller": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
"integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==",
"license": "MIT",
"peer": true,
"dependencies": {
"event-target-shim": "^5.0.0"
},
@@ -465,7 +453,6 @@
"resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz",
"integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">=8"
}
@@ -475,7 +462,6 @@
"resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz",
"integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==",
"license": "MIT",
"peer": true,
"dependencies": {
"retry": "0.13.1"
}
@@ -768,7 +754,6 @@
"resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz",
"integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==",
"license": "MIT",
"peer": true,
"dependencies": {
"end-of-stream": "^1.4.1",
"inherits": "^2.0.3",
@@ -817,7 +802,6 @@
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz",
"integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==",
"license": "MIT",
"peer": true,
"dependencies": {
"once": "^1.4.0"
}
@@ -887,7 +871,6 @@
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
"integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">=6"
}
@@ -918,6 +901,7 @@
"resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz",
"integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==",
"license": "MIT",
"peer": true,
"dependencies": {
"accepts": "^2.0.0",
"body-parser": "^2.2.0",
@@ -989,9 +973,9 @@
"license": "MIT"
},
"node_modules/fast-xml-parser": {
"version": "5.3.5",
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.3.5.tgz",
"integrity": "sha512-JeaA2Vm9ffQKp9VjvfzObuMCjUYAp5WDYhRYL5LrBPY/jUDlUtOvDfot0vKSkB9tuX885BDHjtw4fZadD95wnA==",
"version": "4.5.3",
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.3.tgz",
"integrity": "sha512-RKihhV+SHsIUGXObeVy9AXiBbFwkVk7Syp8XgwN5U3JV416+Gwp/GO9i0JYKmikykgz/UHRrrV4ROuZEo/T0ig==",
"funding": [
{
"type": "github",
@@ -999,9 +983,8 @@
}
],
"license": "MIT",
"peer": true,
"dependencies": {
"strnum": "^2.1.2"
"strnum": "^1.1.1"
},
"bin": {
"fxparser": "src/cli/cli.js"
@@ -1350,8 +1333,7 @@
"url": "https://patreon.com/mdevils"
}
],
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/http-errors": {
"version": "2.0.0",
@@ -1383,7 +1365,6 @@
"resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz",
"integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==",
"license": "MIT",
"peer": true,
"dependencies": {
"@tootallnate/once": "2",
"agent-base": "6",
@@ -1398,7 +1379,6 @@
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"debug": "4"
},
@@ -1575,7 +1555,6 @@
"resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz",
"integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==",
"license": "MIT",
"peer": true,
"bin": {
"mime": "cli.js"
},
@@ -1736,7 +1715,6 @@
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
"integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"yocto-queue": "^0.1.0"
},
@@ -1878,7 +1856,6 @@
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
"integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
"license": "MIT",
"peer": true,
"dependencies": {
"inherits": "^2.0.3",
"string_decoder": "^1.1.1",
@@ -1893,7 +1870,6 @@
"resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz",
"integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">= 4"
}
@@ -1903,7 +1879,6 @@
"resolved": "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz",
"integrity": "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==",
"license": "MIT",
"peer": true,
"dependencies": {
"@types/request": "^2.48.8",
"extend": "^3.0.2",
@@ -2132,7 +2107,6 @@
"resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz",
"integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==",
"license": "MIT",
"peer": true,
"dependencies": {
"stubs": "^3.0.0"
}
@@ -2141,15 +2115,13 @@
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz",
"integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==",
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/string_decoder": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
"license": "MIT",
"peer": true,
"dependencies": {
"safe-buffer": "~5.2.0"
}
@@ -2251,31 +2223,28 @@
}
},
"node_modules/strnum": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/strnum/-/strnum-2.1.2.tgz",
"integrity": "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ==",
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/strnum/-/strnum-1.1.2.tgz",
"integrity": "sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/NaturalIntelligence"
}
],
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/stubs": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz",
"integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==",
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/teeny-request": {
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz",
"integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==",
"license": "Apache-2.0",
"peer": true,
"dependencies": {
"http-proxy-agent": "^5.0.0",
"https-proxy-agent": "^5.0.0",
@@ -2292,7 +2261,6 @@
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"debug": "4"
},
@@ -2305,7 +2273,6 @@
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz",
"integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==",
"license": "MIT",
"peer": true,
"dependencies": {
"agent-base": "6",
"debug": "4"
@@ -2347,8 +2314,7 @@
"version": "7.16.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz",
"integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==",
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/unpipe": {
"version": "1.0.0",
@@ -2372,8 +2338,7 @@
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/uuid": {
"version": "9.0.1",
@@ -2560,7 +2525,6 @@
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
"integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">=10"
},
@@ -2573,6 +2537,7 @@
"resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
"license": "MIT",
"peer": true,
"funding": {
"url": "https://github.com/sponsors/colinhacks"
}

View File

@@ -24,13 +24,12 @@
}
},
"node_modules/@dabh/diagnostics": {
"version": "2.0.8",
"resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.8.tgz",
"integrity": "sha512-R4MSXTVnuMzGD7bzHdW2ZhhdPC/igELENcq5IjEverBvq5hn1SXCWcsi6eSsdWP0/Ur+SItRRjAktmdoX/8R/Q==",
"license": "MIT",
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.3.tgz",
"integrity": "sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==",
"optional": true,
"dependencies": {
"@so-ric/colorspace": "^1.1.6",
"colorspace": "1.1.x",
"enabled": "2.0.x",
"kuler": "^2.0.0"
}
@@ -579,10 +578,9 @@
}
},
"node_modules/@google-cloud/firestore": {
"version": "7.11.6",
"resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.11.6.tgz",
"integrity": "sha512-EW/O8ktzwLfyWBOsNuhRoMi8lrC3clHM5LVFhGvO1HCsLozCOOXRAlHrYBoE6HL42Sc8yYMuCb2XqcnJ4OOEpw==",
"license": "Apache-2.0",
"version": "7.11.3",
"resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.11.3.tgz",
"integrity": "sha512-qsM3/WHpawF07SRVvEJJVRwhYzM7o9qtuksyuqnrMig6fxIrwWnsezECWsG/D5TyYru51Fv5c/RTqNDQ2yU+4w==",
"optional": true,
"peer": true,
"dependencies": {
@@ -2889,17 +2887,6 @@
"resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz",
"integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="
},
"node_modules/@so-ric/colorspace": {
"version": "1.1.6",
"resolved": "https://registry.npmjs.org/@so-ric/colorspace/-/colorspace-1.1.6.tgz",
"integrity": "sha512-/KiKkpHNOBgkFJwu9sh48LkHSMYGyuTcSFK/qMBdnOAlrRJzRSXAOFB5qwzaVQuDl8wAvHVMkaASQDReTahxuw==",
"license": "MIT",
"optional": true,
"dependencies": {
"color": "^5.0.2",
"text-hex": "1.0.x"
}
},
"node_modules/@toolbox-sdk/core": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/@toolbox-sdk/core/-/core-0.1.2.tgz",
@@ -3351,13 +3338,13 @@
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
},
"node_modules/axios": {
"version": "1.13.5",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.13.5.tgz",
"integrity": "sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==",
"version": "1.12.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz",
"integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==",
"license": "MIT",
"dependencies": {
"follow-redirects": "^1.15.11",
"form-data": "^4.0.5",
"follow-redirects": "^1.15.6",
"form-data": "^4.0.4",
"proxy-from-env": "^1.1.0"
}
},
@@ -3528,53 +3515,38 @@
}
},
"node_modules/color": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/color/-/color-5.0.3.tgz",
"integrity": "sha512-ezmVcLR3xAVp8kYOm4GS45ZLLgIE6SPAFoduLr6hTDajwb3KZ2F46gulK3XpcwRFb5KKGCSezCBAY4Dw4HsyXA==",
"license": "MIT",
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/color/-/color-3.2.1.tgz",
"integrity": "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==",
"optional": true,
"dependencies": {
"color-convert": "^3.1.3",
"color-string": "^2.1.3"
},
"engines": {
"node": ">=18"
"color-convert": "^1.9.3",
"color-string": "^1.6.0"
}
},
"node_modules/color-convert": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-3.1.3.tgz",
"integrity": "sha512-fasDH2ont2GqF5HpyO4w0+BcewlhHEZOFn9c1ckZdHpJ56Qb7MHhH/IcJZbBGgvdtwdwNbLvxiBEdg336iA9Sg==",
"license": "MIT",
"version": "1.9.3",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
"integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
"optional": true,
"dependencies": {
"color-name": "^2.0.0"
},
"engines": {
"node": ">=14.6"
"color-name": "1.1.3"
}
},
"node_modules/color-name": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-2.1.0.tgz",
"integrity": "sha512-1bPaDNFm0axzE4MEAzKPuqKWeRaT43U/hyxKPBdqTfmPF+d6n7FSoTFxLVULUJOmiLp01KjhIPPH+HrXZJN4Rg==",
"license": "MIT",
"optional": true,
"engines": {
"node": ">=12.20"
}
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
"integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==",
"optional": true
},
"node_modules/color-string": {
"version": "2.1.4",
"resolved": "https://registry.npmjs.org/color-string/-/color-string-2.1.4.tgz",
"integrity": "sha512-Bb6Cq8oq0IjDOe8wJmi4JeNn763Xs9cfrBcaylK1tPypWzyoy2G3l90v9k64kjphl/ZJjPIShFztenRomi8WTg==",
"license": "MIT",
"version": "1.9.1",
"resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz",
"integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==",
"optional": true,
"dependencies": {
"color-name": "^2.0.0"
},
"engines": {
"node": ">=18"
"color-name": "^1.0.0",
"simple-swizzle": "^0.2.2"
}
},
"node_modules/colorette": {
@@ -3582,6 +3554,16 @@
"resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz",
"integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w=="
},
"node_modules/colorspace": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.4.tgz",
"integrity": "sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w==",
"optional": true,
"dependencies": {
"color": "^3.1.3",
"text-hex": "1.0.x"
}
},
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
@@ -4248,10 +4230,9 @@
}
},
"node_modules/form-data": {
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz",
"integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==",
"license": "MIT",
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
@@ -4987,6 +4968,12 @@
"node": ">= 0.10"
}
},
"node_modules/is-arrayish": {
"version": "0.3.2",
"resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz",
"integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==",
"optional": true
},
"node_modules/is-core-module": {
"version": "2.16.1",
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz",
@@ -5127,14 +5114,13 @@
}
},
"node_modules/jsonwebtoken/node_modules/jws": {
"version": "3.2.3",
"resolved": "https://registry.npmjs.org/jws/-/jws-3.2.3.tgz",
"integrity": "sha512-byiJ0FLRdLdSVSReO/U4E7RoEyOCKnEnEPMjq3HxWtvzLsV08/i5RQKsFVNkCldrCaPr2vDNAOMsfs8T/Hze7g==",
"license": "MIT",
"version": "3.2.2",
"resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz",
"integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==",
"optional": true,
"peer": true,
"dependencies": {
"jwa": "^1.4.2",
"jwa": "^1.4.1",
"safe-buffer": "^5.0.1"
}
},
@@ -5167,12 +5153,11 @@
}
},
"node_modules/jws": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
"license": "MIT",
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
"dependencies": {
"jwa": "^2.0.1",
"jwa": "^2.0.0",
"safe-buffer": "^5.0.1"
}
},
@@ -5439,10 +5424,9 @@
}
},
"node_modules/node-forge": {
"version": "1.3.3",
"resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.3.tgz",
"integrity": "sha512-rLvcdSyRCyouf6jcOIPe/BgwG/d7hKjzMKOas33/pHEr6gbq18IK9zV7DiPvzsz0oBJPme6qr6H6kGZuI9/DZg==",
"license": "(BSD-3-Clause OR GPL-2.0)",
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz",
"integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==",
"optional": true,
"peer": true,
"engines": {
@@ -6054,6 +6038,15 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/simple-swizzle": {
"version": "0.2.2",
"resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz",
"integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==",
"optional": true,
"dependencies": {
"is-arrayish": "^0.3.1"
}
},
"node_modules/source-map": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
@@ -6240,7 +6233,6 @@
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz",
"integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==",
"license": "MIT",
"optional": true
},
"node_modules/thriftrw": {
@@ -6424,14 +6416,13 @@
}
},
"node_modules/winston": {
"version": "3.19.0",
"resolved": "https://registry.npmjs.org/winston/-/winston-3.19.0.tgz",
"integrity": "sha512-LZNJgPzfKR+/J3cHkxcpHKpKKvGfDZVPS4hfJCc4cCG0CgYzvlD6yE/S3CIL/Yt91ak327YCpiF/0MyeZHEHKA==",
"license": "MIT",
"version": "3.17.0",
"resolved": "https://registry.npmjs.org/winston/-/winston-3.17.0.tgz",
"integrity": "sha512-DLiFIXYC5fMPxaRg832S6F5mJYvePtmO5G9v9IgUFPhXm9/GkXarH/TUrBAVzhTCzAj9anE/+GjrgXp/54nOgw==",
"optional": true,
"dependencies": {
"@colors/colors": "^1.6.0",
"@dabh/diagnostics": "^2.0.8",
"@dabh/diagnostics": "^2.0.2",
"async": "^3.2.3",
"is-stream": "^2.0.0",
"logform": "^2.7.0",

View File

@@ -53,7 +53,7 @@ export async function main() {
for (const query of queries) {
conversationHistory.push({ role: "user", content: [{ text: query }] });
let response = await ai.generate({
const response = await ai.generate({
messages: conversationHistory,
tools: tools,
});

View File

@@ -18,8 +18,7 @@
"node_modules/@cfworker/json-schema": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/@cfworker/json-schema/-/json-schema-4.1.1.tgz",
"integrity": "sha512-gAmrUZSGtKc3AiBL71iNWxDsyUC5uMaKKGdvzYsBoTW/xi42JQHl7eKV2OYzCUqvc+D2RCcf7EXY2iCyFIk6og==",
"peer": true
"integrity": "sha512-gAmrUZSGtKc3AiBL71iNWxDsyUC5uMaKKGdvzYsBoTW/xi42JQHl7eKV2OYzCUqvc+D2RCcf7EXY2iCyFIk6og=="
},
"node_modules/@google/generative-ai": {
"version": "0.24.1",
@@ -226,7 +225,6 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz",
"integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==",
"peer": true,
"engines": {
"node": ">=10"
},
@@ -310,7 +308,6 @@
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz",
"integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==",
"peer": true,
"engines": {
"node": ">=10"
},
@@ -423,7 +420,6 @@
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
"integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==",
"peer": true,
"engines": {
"node": ">=0.10.0"
}
@@ -825,7 +821,6 @@
"version": "1.0.21",
"resolved": "https://registry.npmjs.org/js-tiktoken/-/js-tiktoken-1.0.21.tgz",
"integrity": "sha512-biOj/6M5qdgx5TKjDnFT1ymSpM5tbd3ylwDtrQvFQSu0Z7bBYko2dF+W/aUkXUPuk6IVpRxk/3Q2sHOzGlS36g==",
"peer": true,
"dependencies": {
"base64-js": "^1.5.1"
}
@@ -878,9 +873,9 @@
}
},
"node_modules/langsmith": {
"version": "0.5.2",
"resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.5.2.tgz",
"integrity": "sha512-CfkcQsiajtTWknAcyItvJsKEQdY2VgDpm6U8pRI9wnM07mevnOv5EF+RcqWGwx37SEUxtyi2RXMwnKW8b06JtA==",
"version": "0.4.3",
"resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.4.3.tgz",
"integrity": "sha512-vuBAagBZulXj0rpZhUTxmHhrYIBk53z8e2Q8ty4OHVkahN4ul7Im3OZxD9jsXZB0EuncK1xRYtY8J3BW4vj1zw==",
"license": "MIT",
"dependencies": {
"@types/uuid": "^10.0.0",
@@ -974,7 +969,6 @@
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz",
"integrity": "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==",
"peer": true,
"bin": {
"mustache": "bin/mustache"
}
@@ -1413,6 +1407,7 @@
"version": "3.25.76",
"resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
"peer": true,
"funding": {
"url": "https://github.com/sponsors/colinhacks"
}

View File

@@ -975,10 +975,9 @@
}
},
"node_modules/lodash": {
"version": "4.17.23",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz",
"integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==",
"license": "MIT"
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"node_modules/magic-bytes.js": {
"version": "1.12.1",

View File

@@ -1,57 +1,15 @@
# [START quickstart]
import asyncio
from google.adk import Agent
from google.adk.apps import App
from google.adk.runners import InMemoryRunner
from google.adk.tools.toolbox_toolset import ToolboxToolset
from google.genai.types import Content, Part
prompt = """
You're a helpful hotel assistant. You handle hotel searching, booking and
cancellations. When the user searches for a hotel, mention it's name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
"""
from toolbox_core import ToolboxSyncClient
# TODO(developer): update the TOOLBOX_URL to your toolbox endpoint
toolset = ToolboxToolset(
server_url="http://127.0.0.1:5000",
)
client = ToolboxSyncClient("http://127.0.0.1:5000")
root_agent = Agent(
name='hotel_assistant',
name='root_agent',
model='gemini-2.5-flash',
instruction=prompt,
tools=[toolset],
instruction="You are a helpful AI assistant designed to provide accurate and useful information.",
tools=client.load_toolset(),
)
app = App(root_agent=root_agent, name="my_agent")
# [END quickstart]
queries = [
"Find hotels in Basel with Basel in its name.",
"Can you book the Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
]
async def main():
runner = InMemoryRunner(app=app)
session = await runner.session_service.create_session(
app_name=app.name, user_id="test_user"
)
for query in queries:
print(f"\nUser: {query}")
user_message = Content(parts=[Part.from_text(text=query)])
async for event in runner.run_async(user_id="test_user", session_id=session.id, new_message=user_message):
if event.is_final_response() and event.content and event.content.parts:
print(f"Agent: {event.content.parts[0].text}")
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -1,2 +1,3 @@
google-adk[toolbox]==1.23.0
google-adk==1.21.0
toolbox-core==0.5.4
pytest==9.0.2

View File

@@ -41,29 +41,31 @@ def golden_keywords():
class TestExecution:
"""Test framework execution and output validation."""
_cached_output = None
@pytest.fixture(scope="function")
def script_output(self, capsys):
"""Run the quickstart function and return its output."""
if TestExecution._cached_output is None:
# TODO: Add better validation for ADK once we have a way to capture its
# output.
if ORCH_NAME == "adk":
return quickstart.app.root_agent.name
else:
asyncio.run(quickstart.main())
out, err = capsys.readouterr()
TestExecution._cached_output = (out, err)
class Output:
def __init__(self, out, err):
self.out = out
self.err = err
return Output(*TestExecution._cached_output)
return capsys.readouterr()
def test_script_runs_without_errors(self, script_output):
"""Test that the script runs and produces no stderr."""
if ORCH_NAME == "adk":
return
assert script_output.err == "", f"Script produced stderr: {script_output.err}"
def test_keywords_in_output(self, script_output, golden_keywords):
"""Test that expected keywords are present in the script's output."""
if ORCH_NAME == "adk":
assert script_output == "root_agent"
return
output = script_output.out
missing_keywords = [kw for kw in golden_keywords if kw not in output]
assert not missing_keywords, f"Missing keywords in output: {missing_keywords}"

View File

@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
```
<!-- {x-release-please-end} -->

View File

@@ -54,7 +54,6 @@ instance, database and users:
* `create_instance`
* `create_user`
* `clone_instance`
* `restore_backup`
## Install MCP Toolbox
@@ -302,7 +301,6 @@ instances and interacting with your database:
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
* **clone_instance**: Creates a clone of an existing Cloud SQL for SQL Server instance.
* **create_backup**: Creates a backup on a Cloud SQL instance.
* **restore_backup**: Restores a backup of a Cloud SQL instance.
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs

View File

@@ -54,7 +54,6 @@ database and users:
* `create_instance`
* `create_user`
* `clone_instance`
* `restore_backup`
## Install MCP Toolbox
@@ -302,7 +301,6 @@ instances and interacting with your database:
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
* **clone_instance**: Creates a clone of an existing Cloud SQL for MySQL instance.
* **create_backup**: Creates a backup on a Cloud SQL instance.
* **restore_backup**: Restores a backup of a Cloud SQL instance.
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs

View File

@@ -54,7 +54,6 @@ instance, database and users:
* `create_instance`
* `create_user`
* `clone_instance`
* `restore_backup`
## Install MCP Toolbox
@@ -302,7 +301,6 @@ instances and interacting with your database:
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
* **clone_instance**: Creates a clone of an existing Cloud SQL for PostgreSQL instance.
* **create_backup**: Creates a backup on a Cloud SQL instance.
* **restore_backup**: Restores a backup of a Cloud SQL instance.
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs

View File

@@ -100,19 +100,19 @@ After you install Looker in the MCP Store, resources and tools from the server a
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -45,19 +45,19 @@ instance:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -43,19 +43,19 @@ expose your developer assistant tools to a MySQL instance:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -44,19 +44,19 @@ expose your developer assistant tools to a Neo4j instance:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -32,7 +32,7 @@ to expose your developer assistant tools to a Postgres instance:
{{< notice tip >}}
This guide can be used with [AlloyDB
Omni](https://cloud.google.com/alloydb/omni/docs/overview).
Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
{{< /notice >}}
## Set up the database
@@ -40,10 +40,10 @@ Omni](https://cloud.google.com/alloydb/omni/docs/overview).
1. Create or select a PostgreSQL instance.
* [Install PostgreSQL locally](https://www.postgresql.org/download/)
* [Install AlloyDB Omni](https://cloud.google.com/alloydb/omni/docs/quickstart)
* [Install AlloyDB Omni](https://cloud.google.com/alloydb/omni/current/docs/quickstart)
1. Create or reuse [a database
user](https://docs.cloud.google.com/alloydb/omni/containers/current/docs/database-users/manage-users)
user](https://cloud.google.com/alloydb/omni/current/docs/database-users/manage-users)
and have the username and password ready.
## Install MCP Toolbox
@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/docs/overview).
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -43,19 +43,19 @@ to expose your developer assistant tools to a SQLite instance:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -20,7 +20,6 @@ The native SDKs can be combined with MCP clients in many cases.
Toolbox currently supports the following versions of MCP specification:
* [2025-11-25](https://modelcontextprotocol.io/specification/2025-11-25)
* [2025-06-18](https://modelcontextprotocol.io/specification/2025-06-18)
* [2025-03-26](https://modelcontextprotocol.io/specification/2025-03-26)
* [2024-11-05](https://modelcontextprotocol.io/specification/2024-11-05)

View File

@@ -46,10 +46,10 @@ with the necessary configuration for deployment to Vertex AI Agent Engine.
process will generate deployment configuration files (like a `Makefile` and
`Dockerfile`) in your project directory.
4. Add `google-adk[toolbox]` as a dependency to the new project:
4. Add `toolbox-core` as a dependency to the new project:
```bash
uv add google-adk[toolbox]
uv add toolbox-core
```
## Step 3: Configure Google Cloud Authentication
@@ -83,32 +83,34 @@ Toolbox instead of the local address.
2. Open your agent file (`my_agent/agent.py`).
3. Update the `ToolboxToolset` initialization to point to your Cloud Run service URL. Replace the existing initialization code with the following:
3. Update the `ToolboxSyncClient` initialization to use your Cloud Run URL.
{{% alert color="info" title="Note" %}}
Since Cloud Run services are secured by default, you also need to provide a workload identity.
{{% alert color="info" %}}
Since Cloud Run services are secured by default, you also need to provide an
authentication token.
{{% /alert %}}
Replace your existing client initialization code with the following:
```python
from google.adk import Agent
from google.adk.apps import App
from google.adk.tools.toolbox_toolset import ToolboxToolset
from toolbox_adk import CredentialStrategy
from toolbox_core import ToolboxSyncClient, auth_methods
# TODO(developer): Replace with your Toolbox Cloud Run Service URL
TOOLBOX_URL = "https://your-toolbox-service-xyz.a.run.app"
# Initialize the toolset with Workload Identity (generates ID token for the URL)
toolset = ToolboxToolset(
server_url=TOOLBOX_URL,
credentials=CredentialStrategy.workload_identity(target_audience=TOOLBOX_URL)
# Initialize the client with the Cloud Run URL and Auth headers
client = ToolboxSyncClient(
TOOLBOX_URL,
client_headers={"Authorization": auth_methods.get_google_id_token(TOOLBOX_URL)}
)
root_agent = Agent(
name='root_agent',
model='gemini-2.5-flash',
instruction="You are a helpful AI assistant designed to provide accurate and useful information.",
tools=[toolset],
tools=client.load_toolset(),
)
app = App(root_agent=root_agent, name="my_agent")
@@ -129,14 +131,14 @@ app = App(root_agent=root_agent, name="my_agent")
Run the deployment command:
```bash
make deploy
make backend
```
This command will build your agent's container image and deploy it to Vertex AI.
## Step 6: Test your Deployment
Once the deployment command (`make deploy`) completes, it will output the URL
Once the deployment command (`make backend`) completes, it will output the URL
for the Agent Engine Playground. You can click on this URL to open the
Playground in your browser and start chatting with your agent to test the tools.

View File

@@ -207,7 +207,6 @@ You can connect to Toolbox Cloud Run instances directly through the SDK.
{{< tab header="Python" lang="python" >}}
import asyncio
from toolbox_core import ToolboxClient, auth_methods
from toolbox_core.protocol import Protocol
# Replace with the Cloud Run service URL generated in the previous step
URL = "https://cloud-run-url.app"
@@ -218,7 +217,6 @@ async def main():
async with ToolboxClient(
URL,
client_headers={"Authorization": auth_token_provider},
protocol=Protocol.TOOLBOX,
) as toolbox:
toolset = await toolbox.load_toolset()
# ...
@@ -283,5 +281,3 @@ contain the specific error message needed to diagnose the problem.
Manager, it means the Toolbox service account is missing permissions.
- Ensure the `toolbox-identity` service account has the **Secret Manager
Secret Accessor** (`roles/secretmanager.secretAccessor`) IAM role.
- **Cloud Run Connections via IAP:** Currently we do not support Cloud Run connections via [IAP](https://docs.cloud.google.com/iap/docs/concepts-overview). Please disable IAP if you are using it.

View File

@@ -1,112 +0,0 @@
---
title: "Generate Agent Skills"
type: docs
weight: 10
description: >
How to generate agent skills from a toolset.
---
The `skills-generate` command allows you to convert a **toolset** into an **Agent Skill**. A toolset is a collection of tools, and the generated skill will contain metadata and execution scripts for all tools within that toolset, complying with the [Agent Skill specification](https://agentskills.io/specification).
## Before you begin
1. Make sure you have the `toolbox` executable in your PATH.
2. Make sure you have [Node.js](https://nodejs.org/) installed on your system.
## Generating a Skill from a Toolset
A skill package consists of a `SKILL.md` file (with required YAML frontmatter) and a set of Node.js scripts. Each tool defined in your toolset maps to a corresponding script in the generated Node.js scripts (`.js`) that work across different platforms (Linux, macOS, Windows).
### Command Usage
The basic syntax for the command is:
```bash
toolbox <tool-source> skills-generate \
--name <skill-name> \
--toolset <toolset-name> \
--description <description> \
--output-dir <output-directory>
```
- `<tool-source>`: Can be `--tools-file`, `--tools-files`, `--tools-folder`, and `--prebuilt`. See the [CLI Reference](../reference/cli.md) for details.
- `--name`: Name of the generated skill.
- `--description`: Description of the generated skill.
- `--toolset`: (Optional) Name of the toolset to convert into a skill. If not provided, all tools will be included.
- `--output-dir`: (Optional) Directory to output generated skills (default: "skills").
{{< notice note >}}
**Note:** The `<skill-name>` must follow the Agent Skill [naming convention](https://agentskills.io/specification): it must contain only lowercase alphanumeric characters and hyphens, cannot start or end with a hyphen, and cannot contain consecutive hyphens (e.g., `my-skill`, `data-processing`).
{{< /notice >}}
### Example: Custom Tools File
1. Create a `tools.yaml` file with a toolset and some tools:
```yaml
tools:
tool_a:
description: "First tool"
run:
command: "echo 'Tool A'"
tool_b:
description: "Second tool"
run:
command: "echo 'Tool B'"
toolsets:
my_toolset:
tools:
- tool_a
- tool_b
```
2. Generate the skill:
```bash
toolbox --tools-file tools.yaml skills-generate \
--name "my-skill" \
--toolset "my_toolset" \
--description "A skill containing multiple tools" \
--output-dir "generated-skills"
```
3. The generated skill directory structure:
```text
generated-skills/
└── my-skill/
├── SKILL.md
├── assets/
│ ├── tool_a.yaml
│ └── tool_b.yaml
└── scripts/
├── tool_a.js
└── tool_b.js
```
In this example, the skill contains two Node.js scripts (`tool_a.js` and `tool_b.js`), each mapping to a tool in the original toolset.
### Example: Prebuilt Configuration
You can also generate skills from prebuilt toolsets:
```bash
toolbox --prebuilt alloydb-postgres-admin skills-generate \
--name "alloydb-postgres-admin" \
--description "skill for performing administrative operations on alloydb"
```
## Installing the Generated Skill in Gemini CLI
Once you have generated a skill, you can install it into the Gemini CLI using the `gemini skills install` command.
### Installation Command
Provide the path to the directory containing the generated skill:
```bash
gemini skills install /path/to/generated-skills/my-skill
```
Alternatively, use ~/.gemini/skills as the `--output-dir` to generate the skill straight to the Gemini CLI.

View File

@@ -1,75 +0,0 @@
---
title: "Invoke Tools via CLI"
type: docs
weight: 10
description: >
Learn how to invoke your tools directly from the command line using the `invoke` command.
---
The `invoke` command allows you to invoke tools defined in your configuration directly from the CLI. This is useful for:
- **Ephemeral Invocation:** Executing a tool without spinning up a full MCP server/client.
- **Debugging:** Isolating tool execution logic and testing with various parameter combinations.
{{< notice tip >}}
**Keep configurations minimal:** The `invoke` command initializes *all* resources (sources, tools, etc.) defined in your configuration files during execution. To ensure fast response times, consider using a minimal configuration file containing only the tools you need for the specific invocation.
{{< /notice >}}
## Before you begin
1. Make sure you have the `toolbox` binary installed or built.
2. Make sure you have a valid tool configuration file (e.g., `tools.yaml`).
### Command Usage
The basic syntax for the command is:
```bash
toolbox <tool-source> invoke <tool-name> [params]
```
- `<tool-source>`: Can be `--tools-file`, `--tools-files`, `--tools-folder`, and `--prebuilt`. See the [CLI Reference](../reference/cli.md) for details.
- `<tool-name>`: The name of the tool you want to call. This must match the name defined in your `tools.yaml`.
- `[params]`: (Optional) A JSON string representing the arguments for the tool.
## Examples
### 1. Calling a Tool without Parameters
If your tool takes no parameters, simply provide the tool name:
```bash
toolbox --tools-file tools.yaml invoke my-simple-tool
```
### 2. Calling a Tool with Parameters
For tools that require arguments, pass them as a JSON string. Ensure you escape quotes correctly for your shell.
**Example: A tool that takes parameters**
Assuming a tool named `mytool` taking `a` and `b`:
```bash
toolbox --tools-file tools.yaml invoke mytool '{"a": 10, "b": 20}'
```
**Example: A tool that queries a database**
```bash
toolbox --tools-file tools.yaml invoke db-query '{"sql": "SELECT * FROM users LIMIT 5"}'
```
### 3. Using Prebuilt Configurations
You can also use the `--prebuilt` flag to load prebuilt toolsets.
```bash
toolbox --prebuilt cloudsql-postgres invoke cloudsql-postgres-list-instances
```
## Troubleshooting
- **Tool not found:** Ensure the `<tool-name>` matches exactly what is in your YAML file and that the file is correctly loaded via `--tools-file`.
- **Invalid parameters:** Double-check your JSON syntax. The error message will usually indicate if the JSON parsing failed or if the parameters didn't match the tool's schema.
- **Auth errors:** The `invoke` command currently does not support flows requiring client-side authorization (like OAuth flow initiation via the CLI). It works best for tools using service-side authentication (e.g., Application Default Credentials).

View File

@@ -16,7 +16,7 @@ description: >
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
| `-p` | `--port` | Port the server will listen on. | `5000` |
| | `--prebuilt` | Use one or more prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
| | `--prebuilt` | Use a prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
@@ -27,53 +27,8 @@ description: >
| | `--ui` | Launches the Toolbox UI web server. | |
| | `--allowed-origins` | Specifies a list of origins permitted to access this server for CORs access. | `*` |
| | `--allowed-hosts` | Specifies a list of hosts permitted to access this server to prevent DNS rebinding attacks. | `*` |
| | `--user-agent-metadata` | Appends additional metadata to the User-Agent. | |
| `-v` | `--version` | version for toolbox | |
## Sub Commands
<details>
<summary><code>invoke</code></summary>
Executes a tool directly with the provided parameters. This is useful for testing tool configurations and parameters without needing a full client setup.
**Syntax:**
```bash
toolbox invoke <tool-name> [params]
```
**Arguments:**
- `tool-name`: The name of the tool to execute (as defined in your configuration).
- `params`: (Optional) A JSON string containing the parameters for the tool.
For more detailed instructions, see [Invoke Tools via CLI](../how-to/invoke_tool.md).
</details>
<details>
<summary><code>skills-generate</code></summary>
Generates a skill package from a specified toolset. Each tool in the toolset will have a corresponding Node.js execution script in the generated skill.
**Syntax:**
```bash
toolbox skills-generate --name <name> --description <description> --toolset <toolset> --output-dir <output>
```
**Flags:**
- `--name`: Name of the generated skill.
- `--description`: Description of the generated skill.
- `--toolset`: (Optional) Name of the toolset to convert into a skill. If not provided, all tools will be included.
- `--output-dir`: (Optional) Directory to output generated skills (default: "skills").
For more detailed instructions, see [Generate Agent Skills](../how-to/generate_skill.md).
</details>
## Examples
### Transport Configuration
@@ -95,11 +50,6 @@ For more detailed instructions, see [Generate Agent Skills](../how-to/generate_s
# Server with prebuilt + custom tools configurations
./toolbox --tools-file tools.yaml --prebuilt alloydb-postgres
# Server with multiple prebuilt tools configurations
./toolbox --prebuilt alloydb-postgres,alloydb-postgres-admin
# OR
./toolbox --prebuilt alloydb-postgres --prebuilt alloydb-postgres-admin
```
### Tool Configuration Sources
@@ -120,7 +70,7 @@ The CLI supports multiple mutually exclusive ways to specify tool configurations
**Prebuilt Configurations:**
- `--prebuilt`: Use one or more predefined configurations for specific database types (e.g.,
- `--prebuilt`: Use predefined configurations for specific database types (e.g.,
'bigquery', 'postgres', 'spanner'). See [Prebuilt Tools
Reference](prebuilt-tools.md) for allowed values.

View File

@@ -16,9 +16,6 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
{{< notice tip >}}
You can now use `--prebuilt` along `--tools-file`, `--tools-files`, or
`--tools-folder` to combine prebuilt configs with custom tools.
You can also combine multiple prebuilt configs.
See [Usage Examples](../reference/cli.md#examples).
{{< /notice >}}
@@ -54,9 +51,9 @@ See [Usage Examples](../reference/cli.md#examples).
* `get_query_plan`: Generate the execution plan of a statement.
* `list_views`: Lists views in the database from pg_views with a default
limit of 50 rows. Returns schemaname, viewname and the ownername.
* `list_schemas`: Lists schemas in the database.
* `database_overview`: Fetches the current state of the PostgreSQL server.
* `list_triggers`: Lists triggers in the database.
* `list_schemas`: Lists schemas in the database.
* `database_overview`: Fetches the current state of the PostgreSQL server.
* `list_triggers`: Lists triggers in the database.
* `list_indexes`: List available user indexes in a PostgreSQL database.
* `list_sequences`: List sequences in a PostgreSQL database.
* `list_publication_tables`: List publication tables in a PostgreSQL database.
@@ -64,7 +61,7 @@ See [Usage Examples](../reference/cli.md#examples).
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
* `list_database_stats`: Lists the key performance and activity statistics for
each database in the AlloyDB instance.
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
## AlloyDB Postgres Admin
@@ -100,43 +97,6 @@ See [Usage Examples](../reference/cli.md#examples).
(timeseries metrics) for queries running in an AlloyDB instance using a
PromQL query.
## AlloyDB Omni
* `--prebuilt` value: `alloydb-omni`
* **Environment Variables:**
* `ALLOYDB_OMNI_HOST`: (Optional) The hostname or IP address (Default: localhost).
* `ALLOYDB_OMNI_PORT`: (Optional) The port number (Default: 5432).
* `ALLOYDB_OMNI_DATABASE`: The name of the database to connect to.
* `ALLOYDB_OMNI_USER`: The database username.
* `ALLOYDB_OMNI_PASSWORD`: (Optional) The password for the database user.
* `ALLOYDB_OMNI_QUERY_PARAMS`: (Optional) Connection query parameters.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
* `list_autovacuum_configurations`: Lists autovacuum configurations in the
database.
* `list_columnar_configurations`: List AlloyDB Omni columnar-related configurations.
* `list_columnar_recommended_columns`: Lists columns that AlloyDB Omni recommends adding to the columnar engine.
* `list_memory_configurations`: Lists memory-related configurations in the
database.
* `list_top_bloated_tables`: List top bloated tables in the database.
* `list_replication_slots`: Lists replication slots in the database.
* `list_invalid_indexes`: Lists invalid indexes in the database.
* `get_query_plan`: Generate the execution plan of a statement.
* `list_views`: Lists views in the database from pg_views with a default
limit of 50 rows. Returns schemaname, viewname and the ownername.
* `list_schemas`: Lists schemas in the database.
* `database_overview`: Fetches the current state of the PostgreSQL server.
* `list_triggers`: Lists triggers in the database.
* `list_indexes`: List available user indexes in a PostgreSQL database.
* `list_sequences`: List sequences in a PostgreSQL database.
* `list_publication_tables`: List publication tables in a PostgreSQL database.
* `list_tablespaces`: Lists tablespaces in the database.
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
* `list_database_stats`: Lists the key performance and activity statistics for
each database in the AlloyDB instance.
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
## BigQuery
* `--prebuilt` value: `bigquery`
@@ -234,7 +194,6 @@ See [Usage Examples](../reference/cli.md#examples).
* `create_instance`
* `create_user`
* `clone_instance`
* `restore_backup`
* **Tools:**
* `create_instance`: Creates a new Cloud SQL for MySQL instance.
@@ -246,7 +205,6 @@ See [Usage Examples](../reference/cli.md#examples).
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
* `clone_instance`: Creates a clone for an existing Cloud SQL for MySQL instance.
* `create_backup`: Creates a backup on a Cloud SQL instance.
* `restore_backup`: Restores a backup of a Cloud SQL instance.
## Cloud SQL for PostgreSQL
@@ -280,9 +238,9 @@ See [Usage Examples](../reference/cli.md#examples).
* `get_query_plan`: Generate the execution plan of a statement.
* `list_views`: Lists views in the database from pg_views with a default
limit of 50 rows. Returns schemaname, viewname and the ownername.
* `list_schemas`: Lists schemas in the database.
* `database_overview`: Fetches the current state of the PostgreSQL server.
* `list_triggers`: Lists triggers in the database.
* `list_schemas`: Lists schemas in the database.
* `database_overview`: Fetches the current state of the PostgreSQL server.
* `list_triggers`: Lists triggers in the database.
* `list_indexes`: List available user indexes in a PostgreSQL database.
* `list_sequences`: List sequences in a PostgreSQL database.
* `list_publication_tables`: List publication tables in a PostgreSQL database.
@@ -290,7 +248,7 @@ See [Usage Examples](../reference/cli.md#examples).
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
* `list_database_stats`: Lists the key performance and activity statistics for
each database in the postgreSQL instance.
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
## Cloud SQL for PostgreSQL Observability
@@ -326,7 +284,6 @@ See [Usage Examples](../reference/cli.md#examples).
* `create_instance`
* `create_user`
* `clone_instance`
* `restore_backup`
* **Tools:**
* `create_instance`: Creates a new Cloud SQL for PostgreSQL instance.
* `get_instance`: Gets information about a Cloud SQL instance.
@@ -337,7 +294,6 @@ See [Usage Examples](../reference/cli.md#examples).
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
* `clone_instance`: Creates a clone for an existing Cloud SQL for PostgreSQL instance.
* `create_backup`: Creates a backup on a Cloud SQL instance.
* `restore_backup`: Restores a backup of a Cloud SQL instance.
## Cloud SQL for SQL Server
@@ -391,7 +347,6 @@ See [Usage Examples](../reference/cli.md#examples).
* `create_instance`
* `create_user`
* `clone_instance`
* `restore_backup`
* **Tools:**
* `create_instance`: Creates a new Cloud SQL for SQL Server instance.
* `get_instance`: Gets information about a Cloud SQL instance.
@@ -402,7 +357,6 @@ See [Usage Examples](../reference/cli.md#examples).
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
* `clone_instance`: Creates a clone for an existing Cloud SQL for SQL Server instance.
* `create_backup`: Creates a backup on a Cloud SQL instance.
* `restore_backup`: Restores a backup of a Cloud SQL instance.
## Dataplex
@@ -414,10 +368,10 @@ See [Usage Examples](../reference/cli.md#examples).
entries.
* **Dataplex Editor** (`roles/dataplex.editor`) to modify entries.
* **Tools:**
* `search_entries`: Searches for entries in Dataplex Catalog.
* `lookup_entry`: Retrieves a specific entry from Dataplex
* `dataplex_search_entries`: Searches for entries in Dataplex Catalog.
* `dataplex_lookup_entry`: Retrieves a specific entry from Dataplex
Catalog.
* `search_aspect_types`: Finds aspect types relevant to the
* `dataplex_search_aspect_types`: Finds aspect types relevant to the
query.
## Firestore
@@ -488,10 +442,6 @@ See [Usage Examples](../reference/cli.md#examples).
* `create_project_file`: Create a new LookML file.
* `update_project_file`: Update an existing LookML file.
* `delete_project_file`: Delete a LookML file.
* `get_project_directories`: Retrieves a list of project directories for a given LookML project.
* `create_project_directory`: Creates a new directory within a specified LookML project.
* `delete_project_directory`: Deletes a directory from a specified LookML project.
* `validate_project`: Check the syntax of a LookML project.
* `get_connections`: Get the available connections in a Looker instance.
* `get_connection_schemas`: Get the available schemas in a connection.
* `get_connection_databases`: Get the available databases in a connection.
@@ -605,9 +555,9 @@ See [Usage Examples](../reference/cli.md#examples).
* `get_query_plan`: Generate the execution plan of a statement.
* `list_views`: Lists views in the database from pg_views with a default
limit of 50 rows. Returns schemaname, viewname and the ownername.
* `list_schemas`: Lists schemas in the database.
* `database_overview`: Fetches the current state of the PostgreSQL server.
* `list_triggers`: Lists triggers in the database.
* `list_schemas`: Lists schemas in the database.
* `database_overview`: Fetches the current state of the PostgreSQL server.
* `list_triggers`: Lists triggers in the database.
* `list_indexes`: List available user indexes in a PostgreSQL database.
* `list_sequences`: List sequences in a PostgreSQL database.
* `list_publication_tables`: List publication tables in a PostgreSQL database.
@@ -615,7 +565,7 @@ See [Usage Examples](../reference/cli.md#examples).
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
* `list_database_stats`: Lists the key performance and activity statistics for
each database in the PostgreSQL server.
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
## Google Cloud Serverless for Apache Spark

View File

@@ -3,14 +3,13 @@ title: "EmbeddingModels"
type: docs
weight: 2
description: >
EmbeddingModels represent services that transform text into vector embeddings
for semantic search.
EmbeddingModels represent services that transform text into vector embeddings for semantic search.
---
EmbeddingModels represent services that generate vector representations of text
data. In the MCP Toolbox, these models enable **Semantic Queries**, allowing
[Tools](../tools/) to automatically convert human-readable text into numerical
vectors before using them in a query.
data. In the MCP Toolbox, these models enable **Semantic Queries**,
allowing [Tools](../tools/) to automatically convert human-readable text into
numerical vectors before using them in a query.
This is primarily used in two scenarios:
@@ -20,33 +19,14 @@ This is primarily used in two scenarios:
- **Semantic Search**: Converting a natural language query into a vector to
perform similarity searches.
## Hidden Parameter Duplication (valueFromParam)
When building tools for vector ingestion, you often need the same input string
twice:
1. To store the original text in a TEXT column.
1. To generate the vector embedding for a VECTOR column.
Requesting an Agent (LLM) to output the exact same string twice is inefficient
and error-prone. The `valueFromParam` field solves this by allowing a parameter
to inherit its value from another parameter in the same tool.
### Key Behaviors
1. Hidden from Manifest: Parameters with valueFromParam set are excluded from
the tool definition sent to the Agent. The Agent does not know this parameter
exists.
1. Auto-Filled: When the tool is executed, the Toolbox automatically copies the
value from the referenced parameter before processing embeddings.
## Example
The following configuration defines an embedding model and applies it to
specific tool parameters.
{{< notice tip >}} Use environment variable replacement with the format
${ENV_NAME} instead of hardcoding your API keys into the configuration file.
{{< notice tip >}}
Use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your API keys into the configuration file.
{{< /notice >}}
### Step 1 - Define an Embedding Model
@@ -55,7 +35,7 @@ Define an embedding model in the `embeddingModels` section:
```yaml
kind: embeddingModels
name: gemini-model # Name of the embedding model
name: gemini-model: # Name of the embedding model
type: gemini
model: gemini-embedding-001
apiKey: ${GOOGLE_API_KEY}
@@ -65,7 +45,8 @@ dimension: 768
### Step 2 - Embed Tool Parameters
Use the defined embedding model, embed your query parameters using the
`embeddedBy` field. Only string-typed parameters can be embedded:
`embeddedBy` field. Only string-typed
parameters can be embedded:
```yaml
# Vector ingestion tool
@@ -79,13 +60,10 @@ statement: |
parameters:
- name: content
type: string
description: The raw text content to be stored in the database.
- name: vector_string
type: string
# This parameter is hidden from the LLM.
# It automatically copies the value from 'content' and embeds it.
valueFromParam: content
embeddedBy: gemini-model
description: The text to be vectorized and stored.
embeddedBy: gemini-model # refers to the name of a defined embedding model
---
# Semantic search tool
kind: tools

View File

@@ -20,10 +20,10 @@ kind: prompts
name: code_review
description: "Asks the LLM to analyze code quality and suggest improvements."
messages:
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
arguments:
- name: "code"
description: "The code to review"
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
arguments:
- name: "code"
description: "The code to review"
```
## Prompt Schema

View File

@@ -27,7 +27,7 @@ Authentication can be handled in two ways:
```yaml
kind: sources
name: my-alloydb-admin
type: alloydb-admin
type: alloy-admin
---
kind: sources
name: my-oauth-alloydb-admin

View File

@@ -194,15 +194,6 @@ Use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}
### Managed Connection Pooling
Toolbox automatically supports [Managed Connection Pooling][alloydb-mcp]. If your AlloyDB instance has Managed Connection Pooling enabled, the connection will immediately benefit from increased throughput and reduced latency.
The interface is identical, so there's no additional configuration required on the client. For more information on configuring your instance, see the [AlloyDB Managed Connection Pooling documentation][alloydb-mcp-docs].
[alloydb-mcp]: https://cloud.google.com/blog/products/databases/alloydb-managed-connection-pooling
[alloydb-mcp-docs]: https://cloud.google.com/alloydb/docs/configure-managed-connection-pooling
## Reference
| **field** | **type** | **required** | **description** |

View File

@@ -1,71 +0,0 @@
---
title: "Cloud Logging Admin"
type: docs
weight: 1
description: >
The Cloud Logging Admin source enables tools to interact with the Cloud Logging API, allowing for the retrieval of log names, monitored resource types, and the querying of log data.
---
## About
The Cloud Logging Admin source provides a client to interact with the [Google
Cloud Logging API](https://cloud.google.com/logging/docs). This allows tools to list log names, monitored resource types, and query log entries.
Authentication can be handled in two ways:
1. **Application Default Credentials (ADC):** By default, the source uses ADC
to authenticate with the API.
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will
expect an OAuth 2.0 access token to be provided by the client (e.g., a web
browser) for each request.
## Available Tools
- [`cloud-logging-admin-list-log-names`](../tools/cloudloggingadmin/cloud-logging-admin-list-log-names.md)
Lists the log names in the project.
- [`cloud-logging-admin-list-resource-types`](../tools/cloudloggingadmin/cloud-logging-admin-list-resource-types.md)
Lists the monitored resource types.
- [`cloud-logging-admin-query-logs`](../tools/cloudloggingadmin/cloud-logging-admin-query-logs.md)
Queries log entries.
## Example
Initialize a Cloud Logging Admin source that uses ADC:
```yaml
kind: sources
name: my-cloud-logging
type: cloud-logging-admin
project: my-project-id
```
Initialize a Cloud Logging Admin source that uses client-side OAuth:
```yaml
kind: sources
name: my-oauth-cloud-logging
type: cloud-logging-admin
project: my-project-id
useClientOAuth: true
```
Initialize a Cloud Logging Admin source that uses service account impersonation:
```yaml
kind: sources
name: my-impersonated-cloud-logging
type: cloud-logging-admin
project: my-project-id
impersonateServiceAccount: "my-service-account@my-project.iam.gserviceaccount.com"
```
## Reference
| **field** | **type** | **required** | **description** |
|-----------------------------|:--------:|:------------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| type | string | true | Must be "cloud-logging-admin". |
| project | string | true | ID of the GCP project. |
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. Cannot be used with `impersonateServiceAccount`. |
| impersonateServiceAccount | string | false | The service account to impersonate for API calls. Cannot be used with `useClientOAuth`. |

View File

@@ -195,15 +195,6 @@ Use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}
### Managed Connection Pooling
Toolbox automatically supports [Managed Connection Pooling][csql-mcp]. If your Cloud SQL for PostgreSQL instance has Managed Connection Pooling enabled, the connection will immediately benefit from increased throughput and reduced latency.
The interface is identical, so there's no additional configuration required on the client. For more information on configuring your instance, see the [Cloud SQL Managed Connection Pooling documentation][csql-mcp-docs].
[csql-mcp]: https://docs.cloud.google.com/sql/docs/postgres/managed-connection-pooling
[csql-mcp-docs]: https://docs.cloud.google.com/sql/docs/postgres/configure-mcp
## Reference
| **field** | **type** | **required** | **description** |

View File

@@ -7,17 +7,6 @@ description: >
---
{{< notice note >}}
**⚠️ Best Effort Maintenance**
This integration is maintained on a best-effort basis by the project
team/community. While we strive to address issues and provide workarounds when
resources are available, there are no guaranteed response times or code fixes.
The automated integration tests for this module are currently non-functional or
failing.
{{< /notice >}}
## About
[Dgraph][dgraph-docs] is an open-source graph database. It is designed for

View File

@@ -12,9 +12,6 @@ aliases:
The `cloud-gemini-data-analytics-query` tool allows you to send natural language questions to the Gemini Data Analytics API and receive structured responses containing SQL queries, natural language answers, and explanations. For details on defining data agent context for database data sources, see the official [documentation](https://docs.cloud.google.com/gemini/docs/conversational-analytics-api/data-agent-authored-context-databases).
> [!NOTE]
> Only `alloydb`, `spannerReference`, and `cloudSqlReference` are supported as [datasource references](https://clouddocs.devsite.corp.google.com/gemini/docs/conversational-analytics-api/reference/rest/v1beta/projects.locations.dataAgents#DatasourceReferences).
## Example
```yaml
@@ -44,13 +41,13 @@ generationOptions:
### Usage Flow
When using this tool, a `query` parameter containing a natural language query is provided to the tool (typically by an agent). The tool then interacts with the Gemini Data Analytics API using the context defined in your configuration.
When using this tool, a `prompt` parameter containing a natural language query is provided to the tool (typically by an agent). The tool then interacts with the Gemini Data Analytics API using the context defined in your configuration.
The structure of the response depends on the `generationOptions` configured in your tool definition (e.g., enabling `generateQueryResult` will include the SQL query results).
See [Data Analytics API REST documentation](https://clouddocs.devsite.corp.google.com/gemini/docs/conversational-analytics-api/reference/rest/v1alpha/projects.locations/queryData?rep_location=global) for details.
**Example Input Query:**
**Example Input Prompt:**
```text
How many accounts who have region in Prague are eligible for loans? A3 contains the data of region.

View File

@@ -1,8 +0,0 @@
---
title: "Cloud Logging Admin"
linkTitle: "Cloud Logging Admin"
type: docs
weight: 1
description: >
Tools that work with Cloud Logging Admin Sources.
---

View File

@@ -1,39 +0,0 @@
---
title: "cloud-logging-admin-list-log-names"
type: docs
description: >
A "cloud-logging-admin-list-log-names" tool lists the log names in the project.
aliases:
- /resources/tools/cloud-logging-admin-list-log-names
---
## About
The `cloud-logging-admin-list-log-names` tool lists the log names available in the Google Cloud project.
It's compatible with the following sources:
- [cloud-logging-admin](../../sources/cloud-logging-admin.md)
## Example
```yaml
kind: tools
name: list_log_names
type: cloud-logging-admin-list-log-names
source: my-cloud-logging
description: Lists all log names in the project.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|----------------------------------------------------|
| type | string | true | Must be "cloud-logging-admin-list-log-names". |
| source | string | true | Name of the cloud-logging-admin source. |
| description | string | true | Description of the tool that is passed to the LLM. |
### Parameters
| **parameter** | **type** | **required** | **description** |
|:--------------|:--------:|:------------:|:----------------|
| limit | integer | false | Maximum number of log entries to return (default: 200). |

View File

@@ -1,34 +0,0 @@
---
title: "cloud-logging-admin-list-resource-types"
type: docs
description: >
A "cloud-logging-admin-list-resource-types" tool lists the monitored resource types.
aliases:
- /resources/tools/cloud-logging-admin-list-resource-types
---
## About
The `cloud-logging-admin-list-resource-types` tool lists the monitored resource types available in Google Cloud Logging.
It's compatible with the following sources:
- [cloud-logging-admin](../../sources/cloud-logging-admin.md)
## Example
```yaml
kind: tools
name: list_resource_types
type: cloud-logging-admin-list-resource-types
source: my-cloud-logging
description: Lists monitored resource types.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|----------------------------------------------------|
| type | string | true | Must be "cloud-logging-admin-list-resource-types".|
| source | string | true | Name of the cloud-logging-admin source. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,44 +0,0 @@
---
title: "cloud-logging-admin-query-logs"
type: docs
description: >
A "cloud-logging-admin-query-logs" tool queries log entries.
aliases:
- /resources/tools/cloud-logging-admin-query-logs
---
## About
The `cloud-logging-admin-query-logs` tool allows you to query log entries from Google Cloud Logging using the advanced logs filter syntax.
It's compatible with the following sources:
- [cloud-logging-admin](../../sources/cloud-logging-admin.md)
## Example
```yaml
kind: tools
name: query_logs
type: cloud-logging-admin-query-logs
source: my-cloud-logging
description: Queries log entries from Cloud Logging.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|----------------------------------------------------|
| type | string | true | Must be "cloud-logging-admin-query-logs". |
| source | string | true | Name of the cloud-logging-admin source. |
| description | string | true | Description of the tool that is passed to the LLM. |
### Parameters
| **parameter** | **type** | **required** | **description** |
|:--------------|:--------:|:------------:|:----------------|
| filter | string | false | Cloud Logging filter query. Common fields: resource.type, resource.labels.*, logName, severity, textPayload, jsonPayload.*, protoPayload.*, labels.*, httpRequest.*. Operators: =, !=, <, <=, >, >=, :, =~, AND, OR, NOT. |
| newestFirst | boolean | false | Set to true for newest logs first. Defaults to oldest first. |
| startTime | string | false | Start time in RFC3339 format (e.g., 2025-12-09T00:00:00Z). Defaults to 30 days ago. |
| endTime | string | false | End time in RFC3339 format (e.g., 2025-12-09T23:59:59Z). Defaults to now. |
| verbose | boolean | false | Include additional fields (insertId, trace, spanId, httpRequest, labels, operation, sourceLocation). Defaults to false. |
| limit | integer | false | Maximum number of log entries to return. Default: `200`. |

View File

@@ -1,53 +0,0 @@
---
title: cloud-sql-restore-backup
type: docs
weight: 10
description: "Restores a backup of a Cloud SQL instance."
---
The `cloud-sql-restore-backup` tool restores a backup on a Cloud SQL instance using the Cloud SQL Admin API.
{{< notice info dd>}}
This tool uses a `source` of type `cloud-sql-admin`.
{{< /notice >}}
## Examples
Basic backup restore
```yaml
kind: tools
name: backup-restore-basic
type: cloud-sql-restore-backup
source: cloud-sql-admin-source
description: "Restores a backup onto the given Cloud SQL instance."
```
## Reference
### Tool Configuration
| **field** | **type** | **required** | **description** |
| -------------- | :------: | :----------: | ------------------------------------------------ |
| type | string | true | Must be "cloud-sql-restore-backup". |
| source | string | true | The name of the `cloud-sql-admin` source to use. |
| description | string | false | A description of the tool. |
### Tool Inputs
| **parameter** | **type** | **required** | **description** |
| ------------------| :------: | :----------: | -----------------------------------------------------------------------------|
| target_project | string | true | The project ID of the instance to restore the backup onto. |
| target_instance | string | true | The instance to restore the backup onto. Does not include the project ID. |
| backup_id | string | true | The identifier of the backup being restored. |
| source_project | string | false | (Optional) The project ID of the instance that the backup belongs to. |
| source_instance | string | false | (Optional) Cloud SQL instance ID of the instance that the backup belongs to. |
## Usage Notes
- The `backup_id` field can be a BackupRun ID (which will be an int64), backup name, or BackupDR backup name.
- If the `backup_id` field contains a BackupRun ID (i.e. an int64), the optional fields `source_project` and `source_instance` must also be provided.
## See Also
- [Cloud SQL Admin API documentation](https://cloud.google.com/sql/docs/mysql/admin-api)
- [Toolbox Cloud SQL tools documentation](../cloudsql)
- [Cloud SQL Restore API documentation](https://cloud.google.com/sql/docs/mysql/backup-recovery/restoring)

View File

@@ -9,17 +9,6 @@ aliases:
- /resources/tools/dgraph-dql
---
{{< notice note >}}
**⚠️ Best Effort Maintenance**
This integration is maintained on a best-effort basis by the project
team/community. While we strive to address issues and provide workarounds when
resources are available, there are no guaranteed response times or code fixes.
The automated integration tests for this module are currently non-functional or
failing.
{{< /notice >}}
## About
A `dgraph-dql` tool executes a pre-defined DQL statement against a Dgraph

View File

@@ -18,7 +18,7 @@ with filters, ordering, and limit capabilities.
To use this tool, you need to configure it in your YAML configuration file:
```yaml
kind: sources
kind: source
name: my-firestore
type: firestore
project: my-gcp-project

View File

@@ -1,44 +0,0 @@
---
title: "looker-create-project-directory"
type: docs
weight: 1
description: >
A "looker-create-project-directory" tool creates a new directory in a LookML project.
aliases:
- /resources/tools/looker-create-project-directory
---
## About
A `looker-create-project-directory` tool creates a new directory within a specified LookML project.
It's compatible with the following sources:
- [looker](../../sources/looker.md)
## Example
```yaml
kind: tools
name: looker-create-project-directory
type: looker-create-project-directory
source: looker-source
description: |
This tool creates a new directory within a specific LookML project.
It is useful for organizing project files.
Parameters:
- project_id (string): The ID of the LookML project.
- path (string): The path of the directory to create.
Output:
A string confirming the creation of the directory.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|----------------------------------------------------|
| type | string | true | Must be "looker-create-project-directory". |
| source | string | true | Name of the source Looker instance. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,44 +0,0 @@
---
title: "looker-delete-project-directory"
type: docs
weight: 1
description: >
A "looker-delete-project-directory" tool deletes a directory from a LookML project.
aliases:
- /resources/tools/looker-delete-project-directory
---
## About
A `looker-delete-project-directory` tool deletes a directory from a specified LookML project.
It's compatible with the following sources:
- [looker](../../sources/looker.md)
## Example
```yaml
kind: tools
name: looker-delete-project-directory
type: looker-delete-project-directory
source: looker-source
description: |
This tool deletes a directory from a specific LookML project.
It is useful for removing unnecessary or obsolete directories.
Parameters:
- project_id (string): The ID of the LookML project.
- path (string): The path of the directory to delete.
Output:
A string confirming the deletion of the directory.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|----------------------------------------------------|
| type | string | true | Must be "looker-delete-project-directory". |
| source | string | true | Name of the source Looker instance. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,43 +0,0 @@
---
title: "looker-get-project-directories"
type: docs
weight: 1
description: >
A "looker-get-project-directories" tool returns the directories within a specific LookML project.
aliases:
- /resources/tools/looker-get-project-directories
---
## About
A `looker-get-project-directories` tool retrieves the directories within a specified LookML project.
It's compatible with the following sources:
- [looker](../../sources/looker.md)
## Example
```yaml
kind: tools
name: looker-get-project-directories
type: looker-get-project-directories
source: looker-source
description: |
This tool retrieves a list of directories within a specific LookML project.
It is useful for exploring the project structure.
Parameters:
- project_id (string): The ID of the LookML project.
Output:
A JSON array of strings, representing the directories within the project.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|----------------------------------------------------|
| type | string | true | Must be "looker-get-project-directories". |
| source | string | true | Name of the source Looker instance. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,47 +0,0 @@
---
title: "looker-validate-project"
type: docs
weight: 1
description: >
A "looker-validate-project" tool checks the syntax of a LookML project and reports any errors
aliases:
- /resources/tools/looker-validate-project
---
## About
A "looker-validate-project" tool checks the syntax of a LookML project and reports any errors
It's compatible with the following sources:
- [looker](../../sources/looker.md)
`looker-validate-project` accepts a project_id parameter.
## Example
```yaml
tools:
validate_project:
kind: looker-validate-project
source: looker-source
description: |
This tool checks a LookML project for syntax errors.
Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first.
Parameters:
- project_id (required): The unique ID of the LookML project.
Output:
A list of error details including the file path and line number, and also a list of models
that are not currently valid due to LookML errors.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|----------------------------------------------------|
| kind | string | true | Must be "looker-validate-project". |
| source | string | true | Name of the source Looker instance. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -30,10 +30,6 @@ following config for example:
- name: userNames
type: array
description: The user names to be set.
items:
name: userName # the item name doesn't matter but it has to exist
type: string
description: username
```
If the input is an array of strings `["Alice", "Sid", "Bob"]`, The final command

View File

@@ -771,7 +771,7 @@
},
"outputs": [],
"source": [
"version = \"0.26.0\" # x-release-please-version\n",
"version = \"0.25.0\" # x-release-please-version\n",
"! curl -L -o /content/toolbox https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
"\n",
"# Make the binary executable\n",

View File

@@ -123,7 +123,7 @@ In this section, we will download and install the Toolbox binary.
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
export VERSION="0.26.0"
export VERSION="0.25.0"
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox
```
<!-- {x-release-please-end} -->

View File

@@ -220,7 +220,7 @@
},
"outputs": [],
"source": [
"version = \"0.26.0\" # x-release-please-version\n",
"version = \"0.25.0\" # x-release-please-version\n",
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
"\n",
"# Make the binary executable\n",

View File

@@ -179,7 +179,7 @@ to use BigQuery, and then run the Toolbox server.
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
```
<!-- {x-release-please-end} -->
@@ -335,7 +335,7 @@ pip install toolbox-llamaindex
{{< /tab >}}
{{< tab header="ADK" lang="bash" >}}
pip install google-adk[toolbox]
pip install google-adk
{{< /tab >}}
{{< /tabpane >}}
@@ -375,7 +375,7 @@ pip install llama-index-llms-google-genai
{{< /tab >}}
{{< tab header="ADK" lang="bash" >}}
# No other dependencies required for ADK
pip install toolbox-core
{{< /tab >}}
{{< /tabpane >}}
@@ -617,8 +617,8 @@ from google.adk.agents import Agent
from google.adk.runners import Runner
from google.adk.sessions import InMemorySessionService
from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService
from google.adk.tools.toolbox_toolset import ToolboxToolset
from google.genai import types # For constructing message content
from toolbox_core import ToolboxSyncClient
import os
os.environ['GOOGLE_GENAI_USE_VERTEXAI'] = 'True'
@@ -633,47 +633,48 @@ os.environ['GOOGLE_CLOUD_LOCATION'] = 'us-central1'
# --- Load Tools from Toolbox ---
# TODO(developer): Ensure the Toolbox server is running at http://127.0.0.1:5000
toolset = ToolboxToolset(server_url="http://127.0.0.1:5000")
# TODO(developer): Ensure the Toolbox server is running at <http://127.0.0.1:5000>
# --- Define the Agent's Prompt ---
prompt = """
You're a helpful hotel assistant. You handle hotel searching, booking and
cancellations. When the user searches for a hotel, mention it's name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
"""
with ToolboxSyncClient("<http://127.0.0.1:5000>") as toolbox_client:
# TODO(developer): Replace "my-toolset" with the actual ID of your toolset as configured in your MCP Toolbox server.
agent_toolset = toolbox_client.load_toolset("my-toolset")
# --- Configure the Agent ---
# --- Define the Agent's Prompt ---
prompt = """
You're a helpful hotel assistant. You handle hotel searching, booking and
cancellations. When the user searches for a hotel, mention it's name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
"""
root_agent = Agent(
model='gemini-2.0-flash-001',
name='hotel_agent',
description='A helpful AI assistant that can search and book hotels.',
instruction=prompt,
tools=[toolset], # Pass the loaded toolset
)
# --- Configure the Agent ---
# --- Initialize Services for Running the Agent ---
session_service = InMemorySessionService()
artifacts_service = InMemoryArtifactService()
root_agent = Agent(
model='gemini-2.0-flash-001',
name='hotel_agent',
description='A helpful AI assistant that can search and book hotels.',
instruction=prompt,
tools=agent_toolset, # Pass the loaded toolset
)
runner = Runner(
app_name='hotel_agent',
agent=root_agent,
artifact_service=artifacts_service,
session_service=session_service,
)
async def main():
# --- Initialize Services for Running the Agent ---
session_service = InMemorySessionService()
artifacts_service = InMemoryArtifactService()
# Create a new session for the interaction.
session = await session_service.create_session(
session = session_service.create_session(
state={}, app_name='hotel_agent', user_id='123'
)
runner = Runner(
app_name='hotel_agent',
agent=root_agent,
artifact_service=artifacts_service,
session_service=session_service,
)
# --- Define Queries and Run the Agent ---
queries = [
"Find hotels in Basel with Basel in it's name.",
@@ -696,10 +697,6 @@ async def main():
for text in responses:
print(text)
import asyncio
if __name__ == "__main__":
asyncio.run(main())
{{< /tab >}}
{{< /tabpane >}}

View File

@@ -98,7 +98,7 @@ In this section, we will download Toolbox, configure our tools in a
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
```
<!-- {x-release-please-end} -->

View File

@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
```
<!-- {x-release-please-end} -->

View File

@@ -48,7 +48,7 @@ In this section, we will download Toolbox and run the Toolbox server.
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
```
<!-- {x-release-please-end} -->

Some files were not shown because too many files have changed in this diff Show More