mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-02-12 08:05:06 -05:00
Compare commits
7 Commits
cli-improv
...
refactor/c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e716efea6e | ||
|
|
dcfd056a30 | ||
|
|
59aa21729d | ||
|
|
e25ee6f165 | ||
|
|
9bb4eee494 | ||
|
|
9f5b04cf73 | ||
|
|
66d6b58c4f |
@@ -87,7 +87,7 @@ steps:
|
|||||||
- "CLOUD_SQL_POSTGRES_REGION=$_REGION"
|
- "CLOUD_SQL_POSTGRES_REGION=$_REGION"
|
||||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||||
secretEnv:
|
secretEnv:
|
||||||
["CLOUD_SQL_POSTGRES_USER", "CLOUD_SQL_POSTGRES_PASS", "CLIENT_ID", "API_KEY"]
|
["CLOUD_SQL_POSTGRES_USER", "CLOUD_SQL_POSTGRES_PASS", "CLIENT_ID"]
|
||||||
volumes:
|
volumes:
|
||||||
- name: "go"
|
- name: "go"
|
||||||
path: "/gopath"
|
path: "/gopath"
|
||||||
@@ -134,7 +134,7 @@ steps:
|
|||||||
- "ALLOYDB_POSTGRES_DATABASE=$_DATABASE_NAME"
|
- "ALLOYDB_POSTGRES_DATABASE=$_DATABASE_NAME"
|
||||||
- "ALLOYDB_POSTGRES_REGION=$_REGION"
|
- "ALLOYDB_POSTGRES_REGION=$_REGION"
|
||||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||||
secretEnv: ["ALLOYDB_POSTGRES_USER", "ALLOYDB_POSTGRES_PASS", "CLIENT_ID", "API_KEY"]
|
secretEnv: ["ALLOYDB_POSTGRES_USER", "ALLOYDB_POSTGRES_PASS", "CLIENT_ID"]
|
||||||
volumes:
|
volumes:
|
||||||
- name: "go"
|
- name: "go"
|
||||||
path: "/gopath"
|
path: "/gopath"
|
||||||
@@ -171,23 +171,6 @@ steps:
|
|||||||
alloydbainl \
|
alloydbainl \
|
||||||
alloydbainl
|
alloydbainl
|
||||||
|
|
||||||
- id: "alloydb-omni"
|
|
||||||
name: golang:1
|
|
||||||
waitFor: ["compile-test-binary"]
|
|
||||||
entrypoint: /bin/bash
|
|
||||||
env:
|
|
||||||
- "GOPATH=/gopath"
|
|
||||||
volumes:
|
|
||||||
- name: "go"
|
|
||||||
path: "/gopath"
|
|
||||||
args:
|
|
||||||
- -c
|
|
||||||
- |
|
|
||||||
.ci/test_with_coverage.sh \
|
|
||||||
"AlloyDB Omni" \
|
|
||||||
alloydbomni \
|
|
||||||
postgres
|
|
||||||
|
|
||||||
- id: "bigtable"
|
- id: "bigtable"
|
||||||
name: golang:1
|
name: golang:1
|
||||||
waitFor: ["compile-test-binary"]
|
waitFor: ["compile-test-binary"]
|
||||||
@@ -310,26 +293,7 @@ steps:
|
|||||||
.ci/test_with_coverage.sh \
|
.ci/test_with_coverage.sh \
|
||||||
"Cloud Healthcare API" \
|
"Cloud Healthcare API" \
|
||||||
cloudhealthcare \
|
cloudhealthcare \
|
||||||
cloudhealthcare
|
cloudhealthcare || echo "Integration tests failed."
|
||||||
|
|
||||||
- id: "cloud-logging-admin"
|
|
||||||
name: golang:1
|
|
||||||
waitFor: ["compile-test-binary"]
|
|
||||||
entrypoint: /bin/bash
|
|
||||||
env:
|
|
||||||
- "GOPATH=/gopath"
|
|
||||||
- "LOGADMIN_PROJECT=$PROJECT_ID"
|
|
||||||
secretEnv: ["CLIENT_ID"]
|
|
||||||
volumes:
|
|
||||||
- name: "go"
|
|
||||||
path: "/gopath"
|
|
||||||
args:
|
|
||||||
- -c
|
|
||||||
- |
|
|
||||||
.ci/test_with_coverage.sh \
|
|
||||||
"Cloud Logging Admin" \
|
|
||||||
cloudloggingadmin \
|
|
||||||
cloudloggingadmin
|
|
||||||
|
|
||||||
- id: "postgres"
|
- id: "postgres"
|
||||||
name: golang:1
|
name: golang:1
|
||||||
@@ -341,7 +305,7 @@ steps:
|
|||||||
- "POSTGRES_HOST=$_POSTGRES_HOST"
|
- "POSTGRES_HOST=$_POSTGRES_HOST"
|
||||||
- "POSTGRES_PORT=$_POSTGRES_PORT"
|
- "POSTGRES_PORT=$_POSTGRES_PORT"
|
||||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||||
secretEnv: ["POSTGRES_USER", "POSTGRES_PASS", "CLIENT_ID", "API_KEY"]
|
secretEnv: ["POSTGRES_USER", "POSTGRES_PASS", "CLIENT_ID"]
|
||||||
volumes:
|
volumes:
|
||||||
- name: "go"
|
- name: "go"
|
||||||
path: "/gopath"
|
path: "/gopath"
|
||||||
@@ -354,30 +318,6 @@ steps:
|
|||||||
postgressql \
|
postgressql \
|
||||||
postgresexecutesql
|
postgresexecutesql
|
||||||
|
|
||||||
- id: "cockroachdb"
|
|
||||||
name: golang:1
|
|
||||||
waitFor: ["compile-test-binary"]
|
|
||||||
entrypoint: /bin/bash
|
|
||||||
env:
|
|
||||||
- "GOPATH=/gopath"
|
|
||||||
- "COCKROACHDB_DATABASE=$_DATABASE_NAME"
|
|
||||||
- "COCKROACHDB_PORT=$_COCKROACHDB_PORT"
|
|
||||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
|
||||||
secretEnv: ["COCKROACHDB_USER", "COCKROACHDB_HOST","CLIENT_ID"]
|
|
||||||
volumes:
|
|
||||||
- name: "go"
|
|
||||||
path: "/gopath"
|
|
||||||
args:
|
|
||||||
- -c
|
|
||||||
- |
|
|
||||||
.ci/test_with_coverage.sh \
|
|
||||||
"CockroachDB" \
|
|
||||||
cockroachdb \
|
|
||||||
cockroachdbsql \
|
|
||||||
cockroachdbexecutesql \
|
|
||||||
cockroachdblisttables \
|
|
||||||
cockroachdblistschemas
|
|
||||||
|
|
||||||
- id: "spanner"
|
- id: "spanner"
|
||||||
name: golang:1
|
name: golang:1
|
||||||
waitFor: ["compile-test-binary"]
|
waitFor: ["compile-test-binary"]
|
||||||
@@ -947,7 +887,7 @@ steps:
|
|||||||
tar -C /usr/local -xzf go.tar.gz
|
tar -C /usr/local -xzf go.tar.gz
|
||||||
export PATH="/usr/local/go/bin:$$PATH"
|
export PATH="/usr/local/go/bin:$$PATH"
|
||||||
|
|
||||||
go test -v ./tests/oracle/... \
|
go test -v ./internal/sources/oracle/... \
|
||||||
-coverprofile=oracle_coverage.out \
|
-coverprofile=oracle_coverage.out \
|
||||||
-coverpkg=./internal/sources/oracle/...,./internal/tools/oracle/...
|
-coverpkg=./internal/sources/oracle/...,./internal/tools/oracle/...
|
||||||
|
|
||||||
@@ -955,8 +895,8 @@ steps:
|
|||||||
total_coverage=$(go tool cover -func=oracle_coverage.out | grep "total:" | awk '{print $3}')
|
total_coverage=$(go tool cover -func=oracle_coverage.out | grep "total:" | awk '{print $3}')
|
||||||
echo "Oracle total coverage: $total_coverage"
|
echo "Oracle total coverage: $total_coverage"
|
||||||
coverage_numeric=$(echo "$total_coverage" | sed 's/%//')
|
coverage_numeric=$(echo "$total_coverage" | sed 's/%//')
|
||||||
if awk -v cov="$coverage_numeric" 'BEGIN {exit !(cov < 60)}'; then
|
if awk -v cov="$coverage_numeric" 'BEGIN {exit !(cov < 20)}'; then
|
||||||
echo "Coverage failure: $total_coverage is below 60%."
|
echo "Coverage failure: $total_coverage is below 20%."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -1024,13 +964,6 @@ steps:
|
|||||||
|
|
||||||
availableSecrets:
|
availableSecrets:
|
||||||
secretManager:
|
secretManager:
|
||||||
# Common secrets
|
|
||||||
- versionName: projects/$PROJECT_ID/secrets/client_id/versions/latest
|
|
||||||
env: CLIENT_ID
|
|
||||||
- versionName: projects/$PROJECT_ID/secrets/api_key/versions/latest
|
|
||||||
env: API_KEY
|
|
||||||
|
|
||||||
# Resource-specific secrets
|
|
||||||
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_user/versions/latest
|
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_user/versions/latest
|
||||||
env: CLOUD_SQL_POSTGRES_USER
|
env: CLOUD_SQL_POSTGRES_USER
|
||||||
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_pass/versions/latest
|
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_pass/versions/latest
|
||||||
@@ -1047,6 +980,8 @@ availableSecrets:
|
|||||||
env: POSTGRES_USER
|
env: POSTGRES_USER
|
||||||
- versionName: projects/$PROJECT_ID/secrets/postgres_pass/versions/latest
|
- versionName: projects/$PROJECT_ID/secrets/postgres_pass/versions/latest
|
||||||
env: POSTGRES_PASS
|
env: POSTGRES_PASS
|
||||||
|
- versionName: projects/$PROJECT_ID/secrets/client_id/versions/latest
|
||||||
|
env: CLIENT_ID
|
||||||
- versionName: projects/$PROJECT_ID/secrets/neo4j_user/versions/latest
|
- versionName: projects/$PROJECT_ID/secrets/neo4j_user/versions/latest
|
||||||
env: NEO4J_USER
|
env: NEO4J_USER
|
||||||
- versionName: projects/$PROJECT_ID/secrets/neo4j_pass/versions/latest
|
- versionName: projects/$PROJECT_ID/secrets/neo4j_pass/versions/latest
|
||||||
@@ -1153,11 +1088,6 @@ availableSecrets:
|
|||||||
env: MARIADB_HOST
|
env: MARIADB_HOST
|
||||||
- versionName: projects/$PROJECT_ID/secrets/mongodb_uri/versions/latest
|
- versionName: projects/$PROJECT_ID/secrets/mongodb_uri/versions/latest
|
||||||
env: MONGODB_URI
|
env: MONGODB_URI
|
||||||
- versionName: projects/$PROJECT_ID/secrets/cockroachdb_user/versions/latest
|
|
||||||
env: COCKROACHDB_USER
|
|
||||||
- versionName: projects/$PROJECT_ID/secrets/cockroachdb_host/versions/latest
|
|
||||||
env: COCKROACHDB_HOST
|
|
||||||
|
|
||||||
|
|
||||||
options:
|
options:
|
||||||
logging: CLOUD_LOGGING_ONLY
|
logging: CLOUD_LOGGING_ONLY
|
||||||
@@ -1218,9 +1148,6 @@ substitutions:
|
|||||||
_SINGLESTORE_PORT: "3308"
|
_SINGLESTORE_PORT: "3308"
|
||||||
_SINGLESTORE_DATABASE: "singlestore"
|
_SINGLESTORE_DATABASE: "singlestore"
|
||||||
_SINGLESTORE_USER: "root"
|
_SINGLESTORE_USER: "root"
|
||||||
_COCKROACHDB_HOST: 127.0.0.1
|
|
||||||
_COCKROACHDB_PORT: "26257"
|
|
||||||
_COCKROACHDB_USER: "root"
|
|
||||||
_MARIADB_PORT: "3307"
|
_MARIADB_PORT: "3307"
|
||||||
_MARIADB_DATABASE: test_database
|
_MARIADB_DATABASE: test_database
|
||||||
_SNOWFLAKE_DATABASE: "test"
|
_SNOWFLAKE_DATABASE: "test"
|
||||||
|
|||||||
@@ -23,18 +23,13 @@ steps:
|
|||||||
- |
|
- |
|
||||||
set -ex
|
set -ex
|
||||||
export VERSION=$(cat ./cmd/version.txt)
|
export VERSION=$(cat ./cmd/version.txt)
|
||||||
chmod +x .ci/sample_tests/run_tests.sh
|
chmod +x .ci/quickstart_test/run_go_tests.sh
|
||||||
.ci/sample_tests/run_tests.sh
|
.ci/quickstart_test/run_go_tests.sh
|
||||||
env:
|
env:
|
||||||
- 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}'
|
- 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}'
|
||||||
- 'GCP_PROJECT=${_GCP_PROJECT}'
|
- 'GCP_PROJECT=${_GCP_PROJECT}'
|
||||||
- 'DATABASE_NAME=${_DATABASE_NAME}'
|
- 'DATABASE_NAME=${_DATABASE_NAME}'
|
||||||
- 'DB_USER=${_DB_USER}'
|
- 'DB_USER=${_DB_USER}'
|
||||||
- 'TARGET_ROOT=docs/en/getting-started/quickstart/go'
|
|
||||||
- 'TARGET_LANG=go'
|
|
||||||
- 'TABLE_NAME=hotels_go'
|
|
||||||
- 'SQL_FILE=.ci/sample_tests/setup_hotels.sql'
|
|
||||||
- 'AGENT_FILE_PATTERN=quickstart.go'
|
|
||||||
secretEnv: ['TOOLS_YAML_CONTENT', 'GOOGLE_API_KEY', 'DB_PASSWORD']
|
secretEnv: ['TOOLS_YAML_CONTENT', 'GOOGLE_API_KEY', 'DB_PASSWORD']
|
||||||
|
|
||||||
availableSecrets:
|
availableSecrets:
|
||||||
@@ -23,18 +23,13 @@ steps:
|
|||||||
- |
|
- |
|
||||||
set -ex
|
set -ex
|
||||||
export VERSION=$(cat ./cmd/version.txt)
|
export VERSION=$(cat ./cmd/version.txt)
|
||||||
chmod +x .ci/sample_tests/run_tests.sh
|
chmod +x .ci/quickstart_test/run_js_tests.sh
|
||||||
.ci/sample_tests/run_tests.sh
|
.ci/quickstart_test/run_js_tests.sh
|
||||||
env:
|
env:
|
||||||
- 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}'
|
- 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}'
|
||||||
- 'GCP_PROJECT=${_GCP_PROJECT}'
|
- 'GCP_PROJECT=${_GCP_PROJECT}'
|
||||||
- 'DATABASE_NAME=${_DATABASE_NAME}'
|
- 'DATABASE_NAME=${_DATABASE_NAME}'
|
||||||
- 'DB_USER=${_DB_USER}'
|
- 'DB_USER=${_DB_USER}'
|
||||||
- 'TARGET_ROOT=docs/en/getting-started/quickstart/js'
|
|
||||||
- 'TARGET_LANG=js'
|
|
||||||
- 'TABLE_NAME=hotels_js'
|
|
||||||
- 'SQL_FILE=.ci/sample_tests/setup_hotels.sql'
|
|
||||||
- 'AGENT_FILE_PATTERN=quickstart.js'
|
|
||||||
secretEnv: ['TOOLS_YAML_CONTENT', 'GOOGLE_API_KEY', 'DB_PASSWORD']
|
secretEnv: ['TOOLS_YAML_CONTENT', 'GOOGLE_API_KEY', 'DB_PASSWORD']
|
||||||
|
|
||||||
availableSecrets:
|
availableSecrets:
|
||||||
@@ -23,18 +23,13 @@ steps:
|
|||||||
- |
|
- |
|
||||||
set -ex
|
set -ex
|
||||||
export VERSION=$(cat ./cmd/version.txt)
|
export VERSION=$(cat ./cmd/version.txt)
|
||||||
chmod +x .ci/sample_tests/run_tests.sh
|
chmod +x .ci/quickstart_test/run_py_tests.sh
|
||||||
.ci/sample_tests/run_tests.sh
|
.ci/quickstart_test/run_py_tests.sh
|
||||||
env:
|
env:
|
||||||
- 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}'
|
- 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}'
|
||||||
- 'GCP_PROJECT=${_GCP_PROJECT}'
|
- 'GCP_PROJECT=${_GCP_PROJECT}'
|
||||||
- 'DATABASE_NAME=${_DATABASE_NAME}'
|
- 'DATABASE_NAME=${_DATABASE_NAME}'
|
||||||
- 'DB_USER=${_DB_USER}'
|
- 'DB_USER=${_DB_USER}'
|
||||||
- 'TARGET_ROOT=docs/en/getting-started/quickstart/python'
|
|
||||||
- 'TARGET_LANG=python'
|
|
||||||
- 'TABLE_NAME=hotels_python'
|
|
||||||
- 'SQL_FILE=.ci/sample_tests/setup_hotels.sql'
|
|
||||||
- 'AGENT_FILE_PATTERN=quickstart.py'
|
|
||||||
secretEnv: ['TOOLS_YAML_CONTENT', 'GOOGLE_API_KEY', 'DB_PASSWORD']
|
secretEnv: ['TOOLS_YAML_CONTENT', 'GOOGLE_API_KEY', 'DB_PASSWORD']
|
||||||
|
|
||||||
availableSecrets:
|
availableSecrets:
|
||||||
125
.ci/quickstart_test/run_go_tests.sh
Normal file
125
.ci/quickstart_test/run_go_tests.sh
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
# Copyright 2025 Google LLC
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
TABLE_NAME="hotels_go"
|
||||||
|
QUICKSTART_GO_DIR="docs/en/getting-started/quickstart/go"
|
||||||
|
SQL_FILE=".ci/quickstart_test/setup_hotels_sample.sql"
|
||||||
|
|
||||||
|
PROXY_PID=""
|
||||||
|
TOOLBOX_PID=""
|
||||||
|
|
||||||
|
install_system_packages() {
|
||||||
|
apt-get update && apt-get install -y \
|
||||||
|
postgresql-client \
|
||||||
|
wget \
|
||||||
|
gettext-base \
|
||||||
|
netcat-openbsd
|
||||||
|
}
|
||||||
|
|
||||||
|
start_cloud_sql_proxy() {
|
||||||
|
wget "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy
|
||||||
|
chmod +x /usr/local/bin/cloud-sql-proxy
|
||||||
|
cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" &
|
||||||
|
PROXY_PID=$!
|
||||||
|
|
||||||
|
for i in {1..30}; do
|
||||||
|
if nc -z 127.0.0.1 5432; then
|
||||||
|
echo "Cloud SQL Proxy is up and running."
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "Cloud SQL Proxy failed to start within the timeout period."
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
setup_toolbox() {
|
||||||
|
TOOLBOX_YAML="/tools.yaml"
|
||||||
|
echo "${TOOLS_YAML_CONTENT}" > "$TOOLBOX_YAML"
|
||||||
|
if [ ! -f "$TOOLBOX_YAML" ]; then echo "Failed to create tools.yaml"; exit 1; fi
|
||||||
|
wget "https://storage.googleapis.com/genai-toolbox/v${VERSION}/linux/amd64/toolbox" -O "/toolbox"
|
||||||
|
chmod +x "/toolbox"
|
||||||
|
/toolbox --tools-file "$TOOLBOX_YAML" &
|
||||||
|
TOOLBOX_PID=$!
|
||||||
|
sleep 2
|
||||||
|
}
|
||||||
|
|
||||||
|
setup_orch_table() {
|
||||||
|
export TABLE_NAME
|
||||||
|
envsubst < "$SQL_FILE" | psql -h "$PGHOST" -p "$PGPORT" -U "$DB_USER" -d "$DATABASE_NAME"
|
||||||
|
}
|
||||||
|
|
||||||
|
run_orch_test() {
|
||||||
|
local orch_dir="$1"
|
||||||
|
local orch_name
|
||||||
|
orch_name=$(basename "$orch_dir")
|
||||||
|
|
||||||
|
if [ "$orch_name" == "openAI" ]; then
|
||||||
|
echo -e "\nSkipping framework '${orch_name}': Temporarily excluded."
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
(
|
||||||
|
set -e
|
||||||
|
setup_orch_table
|
||||||
|
|
||||||
|
echo "--- Preparing module for $orch_name ---"
|
||||||
|
cd "$orch_dir"
|
||||||
|
|
||||||
|
if [ -f "go.mod" ]; then
|
||||||
|
go mod tidy
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
|
||||||
|
export ORCH_NAME="$orch_name"
|
||||||
|
|
||||||
|
echo "--- Running tests for $orch_name ---"
|
||||||
|
go test -v ./...
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanup_all() {
|
||||||
|
echo "--- Final cleanup: Shutting down processes and dropping table ---"
|
||||||
|
if [ -n "$TOOLBOX_PID" ]; then
|
||||||
|
kill $TOOLBOX_PID || true
|
||||||
|
fi
|
||||||
|
if [ -n "$PROXY_PID" ]; then
|
||||||
|
kill $PROXY_PID || true
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
trap cleanup_all EXIT
|
||||||
|
|
||||||
|
# Main script execution
|
||||||
|
install_system_packages
|
||||||
|
start_cloud_sql_proxy
|
||||||
|
|
||||||
|
export PGHOST=127.0.0.1
|
||||||
|
export PGPORT=5432
|
||||||
|
export PGPASSWORD="$DB_PASSWORD"
|
||||||
|
export GOOGLE_API_KEY="$GOOGLE_API_KEY"
|
||||||
|
|
||||||
|
setup_toolbox
|
||||||
|
|
||||||
|
for ORCH_DIR in "$QUICKSTART_GO_DIR"/*/; do
|
||||||
|
if [ ! -d "$ORCH_DIR" ]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
run_orch_test "$ORCH_DIR"
|
||||||
|
done
|
||||||
125
.ci/quickstart_test/run_js_tests.sh
Normal file
125
.ci/quickstart_test/run_js_tests.sh
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
# Copyright 2025 Google LLC
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
TABLE_NAME="hotels_js"
|
||||||
|
QUICKSTART_JS_DIR="docs/en/getting-started/quickstart/js"
|
||||||
|
SQL_FILE=".ci/quickstart_test/setup_hotels_sample.sql"
|
||||||
|
|
||||||
|
# Initialize process IDs to empty at the top of the script
|
||||||
|
PROXY_PID=""
|
||||||
|
TOOLBOX_PID=""
|
||||||
|
|
||||||
|
install_system_packages() {
|
||||||
|
apt-get update && apt-get install -y \
|
||||||
|
postgresql-client \
|
||||||
|
wget \
|
||||||
|
gettext-base \
|
||||||
|
netcat-openbsd
|
||||||
|
}
|
||||||
|
|
||||||
|
start_cloud_sql_proxy() {
|
||||||
|
wget "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy
|
||||||
|
chmod +x /usr/local/bin/cloud-sql-proxy
|
||||||
|
cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" &
|
||||||
|
PROXY_PID=$!
|
||||||
|
|
||||||
|
for i in {1..30}; do
|
||||||
|
if nc -z 127.0.0.1 5432; then
|
||||||
|
echo "Cloud SQL Proxy is up and running."
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "Cloud SQL Proxy failed to start within the timeout period."
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
setup_toolbox() {
|
||||||
|
TOOLBOX_YAML="/tools.yaml"
|
||||||
|
echo "${TOOLS_YAML_CONTENT}" > "$TOOLBOX_YAML"
|
||||||
|
if [ ! -f "$TOOLBOX_YAML" ]; then echo "Failed to create tools.yaml"; exit 1; fi
|
||||||
|
wget "https://storage.googleapis.com/genai-toolbox/v${VERSION}/linux/amd64/toolbox" -O "/toolbox"
|
||||||
|
chmod +x "/toolbox"
|
||||||
|
/toolbox --tools-file "$TOOLBOX_YAML" &
|
||||||
|
TOOLBOX_PID=$!
|
||||||
|
sleep 2
|
||||||
|
}
|
||||||
|
|
||||||
|
setup_orch_table() {
|
||||||
|
export TABLE_NAME
|
||||||
|
envsubst < "$SQL_FILE" | psql -h "$PGHOST" -p "$PGPORT" -U "$DB_USER" -d "$DATABASE_NAME"
|
||||||
|
}
|
||||||
|
|
||||||
|
run_orch_test() {
|
||||||
|
local orch_dir="$1"
|
||||||
|
local orch_name
|
||||||
|
orch_name=$(basename "$orch_dir")
|
||||||
|
|
||||||
|
(
|
||||||
|
set -e
|
||||||
|
echo "--- Preparing environment for $orch_name ---"
|
||||||
|
setup_orch_table
|
||||||
|
|
||||||
|
cd "$orch_dir"
|
||||||
|
echo "Installing dependencies for $orch_name..."
|
||||||
|
if [ -f "package-lock.json" ]; then
|
||||||
|
npm ci
|
||||||
|
else
|
||||||
|
npm install
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
|
||||||
|
echo "--- Running tests for $orch_name ---"
|
||||||
|
export ORCH_NAME="$orch_name"
|
||||||
|
node --test quickstart.test.js
|
||||||
|
|
||||||
|
echo "--- Cleaning environment for $orch_name ---"
|
||||||
|
rm -rf "${orch_name}/node_modules"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanup_all() {
|
||||||
|
echo "--- Final cleanup: Shutting down processes and dropping table ---"
|
||||||
|
if [ -n "$TOOLBOX_PID" ]; then
|
||||||
|
kill $TOOLBOX_PID || true
|
||||||
|
fi
|
||||||
|
if [ -n "$PROXY_PID" ]; then
|
||||||
|
kill $PROXY_PID || true
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
trap cleanup_all EXIT
|
||||||
|
|
||||||
|
# Main script execution
|
||||||
|
install_system_packages
|
||||||
|
start_cloud_sql_proxy
|
||||||
|
|
||||||
|
export PGHOST=127.0.0.1
|
||||||
|
export PGPORT=5432
|
||||||
|
export PGPASSWORD="$DB_PASSWORD"
|
||||||
|
export GOOGLE_API_KEY="$GOOGLE_API_KEY"
|
||||||
|
|
||||||
|
setup_toolbox
|
||||||
|
|
||||||
|
for ORCH_DIR in "$QUICKSTART_JS_DIR"/*/; do
|
||||||
|
if [ ! -d "$ORCH_DIR" ]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
run_orch_test "$ORCH_DIR"
|
||||||
|
done
|
||||||
115
.ci/quickstart_test/run_py_tests.sh
Normal file
115
.ci/quickstart_test/run_py_tests.sh
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
# Copyright 2025 Google LLC
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
TABLE_NAME="hotels_python"
|
||||||
|
QUICKSTART_PYTHON_DIR="docs/en/getting-started/quickstart/python"
|
||||||
|
SQL_FILE=".ci/quickstart_test/setup_hotels_sample.sql"
|
||||||
|
|
||||||
|
PROXY_PID=""
|
||||||
|
TOOLBOX_PID=""
|
||||||
|
|
||||||
|
install_system_packages() {
|
||||||
|
apt-get update && apt-get install -y \
|
||||||
|
postgresql-client \
|
||||||
|
python3-venv \
|
||||||
|
wget \
|
||||||
|
gettext-base \
|
||||||
|
netcat-openbsd
|
||||||
|
}
|
||||||
|
|
||||||
|
start_cloud_sql_proxy() {
|
||||||
|
wget "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy
|
||||||
|
chmod +x /usr/local/bin/cloud-sql-proxy
|
||||||
|
cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" &
|
||||||
|
PROXY_PID=$!
|
||||||
|
|
||||||
|
for i in {1..30}; do
|
||||||
|
if nc -z 127.0.0.1 5432; then
|
||||||
|
echo "Cloud SQL Proxy is up and running."
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "Cloud SQL Proxy failed to start within the timeout period."
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
setup_toolbox() {
|
||||||
|
TOOLBOX_YAML="/tools.yaml"
|
||||||
|
echo "${TOOLS_YAML_CONTENT}" > "$TOOLBOX_YAML"
|
||||||
|
if [ ! -f "$TOOLBOX_YAML" ]; then echo "Failed to create tools.yaml"; exit 1; fi
|
||||||
|
wget "https://storage.googleapis.com/genai-toolbox/v${VERSION}/linux/amd64/toolbox" -O "/toolbox"
|
||||||
|
chmod +x "/toolbox"
|
||||||
|
/toolbox --tools-file "$TOOLBOX_YAML" &
|
||||||
|
TOOLBOX_PID=$!
|
||||||
|
sleep 2
|
||||||
|
}
|
||||||
|
|
||||||
|
setup_orch_table() {
|
||||||
|
export TABLE_NAME
|
||||||
|
envsubst < "$SQL_FILE" | psql -h "$PGHOST" -p "$PGPORT" -U "$DB_USER" -d "$DATABASE_NAME"
|
||||||
|
}
|
||||||
|
|
||||||
|
run_orch_test() {
|
||||||
|
local orch_dir="$1"
|
||||||
|
local orch_name
|
||||||
|
orch_name=$(basename "$orch_dir")
|
||||||
|
(
|
||||||
|
set -e
|
||||||
|
setup_orch_table
|
||||||
|
cd "$orch_dir"
|
||||||
|
local VENV_DIR=".venv"
|
||||||
|
python3 -m venv "$VENV_DIR"
|
||||||
|
source "$VENV_DIR/bin/activate"
|
||||||
|
pip install -r requirements.txt
|
||||||
|
echo "--- Running tests for $orch_name ---"
|
||||||
|
cd ..
|
||||||
|
ORCH_NAME="$orch_name" pytest
|
||||||
|
rm -rf "$VENV_DIR"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanup_all() {
|
||||||
|
echo "--- Final cleanup: Shutting down processes and dropping table ---"
|
||||||
|
if [ -n "$TOOLBOX_PID" ]; then
|
||||||
|
kill $TOOLBOX_PID || true
|
||||||
|
fi
|
||||||
|
if [ -n "$PROXY_PID" ]; then
|
||||||
|
kill $PROXY_PID || true
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
trap cleanup_all EXIT
|
||||||
|
|
||||||
|
# Main script execution
|
||||||
|
install_system_packages
|
||||||
|
start_cloud_sql_proxy
|
||||||
|
|
||||||
|
export PGHOST=127.0.0.1
|
||||||
|
export PGPORT=5432
|
||||||
|
export PGPASSWORD="$DB_PASSWORD"
|
||||||
|
export GOOGLE_API_KEY="$GOOGLE_API_KEY"
|
||||||
|
|
||||||
|
setup_toolbox
|
||||||
|
|
||||||
|
for ORCH_DIR in "$QUICKSTART_PYTHON_DIR"/*/; do
|
||||||
|
if [ ! -d "$ORCH_DIR" ]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
run_orch_test "$ORCH_DIR"
|
||||||
|
done
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
# Copyright 2026 Google LLC
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: "${_IMAGE}"
|
|
||||||
id: "py-pre-post-processing-test"
|
|
||||||
entrypoint: "bash"
|
|
||||||
args:
|
|
||||||
- -c
|
|
||||||
- |
|
|
||||||
set -ex
|
|
||||||
chmod +x .ci/sample_tests/run_tests.sh
|
|
||||||
.ci/sample_tests/run_tests.sh
|
|
||||||
env:
|
|
||||||
- "CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}"
|
|
||||||
- "GCP_PROJECT=${_GCP_PROJECT}"
|
|
||||||
- "DATABASE_NAME=${_DATABASE_NAME}"
|
|
||||||
- "DB_USER=${_DB_USER}"
|
|
||||||
- "TARGET_ROOT=${_TARGET_ROOT}"
|
|
||||||
- "TARGET_LANG=${_TARGET_LANG}"
|
|
||||||
- "TABLE_NAME=${_TABLE_NAME}"
|
|
||||||
- "SQL_FILE=${_SQL_FILE}"
|
|
||||||
- "AGENT_FILE_PATTERN=${_AGENT_FILE_PATTERN}"
|
|
||||||
secretEnv: ["TOOLS_YAML_CONTENT", "GOOGLE_API_KEY", "DB_PASSWORD"]
|
|
||||||
|
|
||||||
availableSecrets:
|
|
||||||
secretManager:
|
|
||||||
- versionName: projects/${_GCP_PROJECT}/secrets/${_TOOLS_YAML_SECRET}/versions/5
|
|
||||||
env: "TOOLS_YAML_CONTENT"
|
|
||||||
- versionName: projects/${_GCP_PROJECT_NUMBER}/secrets/${_API_KEY_SECRET}/versions/latest
|
|
||||||
env: "GOOGLE_API_KEY"
|
|
||||||
- versionName: projects/${_GCP_PROJECT}/secrets/${_DB_PASS_SECRET}/versions/latest
|
|
||||||
env: "DB_PASSWORD"
|
|
||||||
|
|
||||||
timeout: 1200s
|
|
||||||
|
|
||||||
substitutions:
|
|
||||||
_TARGET_LANG: "python"
|
|
||||||
_IMAGE: "gcr.io/google.com/cloudsdktool/cloud-sdk:537.0.0"
|
|
||||||
_TARGET_ROOT: "docs/en/samples/pre_post_processing/python"
|
|
||||||
_TABLE_NAME: "hotels_py_pre_post_processing"
|
|
||||||
_SQL_FILE: ".ci/sample_tests/setup_hotels.sql"
|
|
||||||
_AGENT_FILE_PATTERN: "agent.py"
|
|
||||||
|
|
||||||
options:
|
|
||||||
logging: CLOUD_LOGGING_ONLY
|
|
||||||
@@ -1,202 +0,0 @@
|
|||||||
# Copyright 2026 Google LLC
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
# --- Configuration (from Environment Variables) ---
|
|
||||||
# TARGET_ROOT: The directory to search for tests (e.g., docs/en/getting-started/quickstart/js)
|
|
||||||
# TARGET_LANG: python, js, go
|
|
||||||
# TABLE_NAME: Database table name to use
|
|
||||||
# SQL_FILE: Path to the SQL setup file
|
|
||||||
# AGENT_FILE_PATTERN: Filename to look for (e.g., quickstart.js or agent.py)
|
|
||||||
|
|
||||||
VERSION=$(cat ./cmd/version.txt)
|
|
||||||
|
|
||||||
# Process IDs & Logs
|
|
||||||
PROXY_PID=""
|
|
||||||
TOOLBOX_PID=""
|
|
||||||
PROXY_LOG="cloud_sql_proxy.log"
|
|
||||||
TOOLBOX_LOG="toolbox_server.log"
|
|
||||||
|
|
||||||
install_system_packages() {
|
|
||||||
echo "Installing system packages..."
|
|
||||||
apt-get update && apt-get install -y \
|
|
||||||
postgresql-client \
|
|
||||||
wget \
|
|
||||||
gettext-base \
|
|
||||||
netcat-openbsd
|
|
||||||
|
|
||||||
if [[ "$TARGET_LANG" == "python" ]]; then
|
|
||||||
apt-get install -y python3-venv
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
start_cloud_sql_proxy() {
|
|
||||||
echo "Starting Cloud SQL Proxy..."
|
|
||||||
wget -q "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy
|
|
||||||
chmod +x /usr/local/bin/cloud-sql-proxy
|
|
||||||
cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" > "$PROXY_LOG" 2>&1 &
|
|
||||||
PROXY_PID=$!
|
|
||||||
|
|
||||||
# Health Check
|
|
||||||
for i in {1..30}; do
|
|
||||||
if nc -z 127.0.0.1 5432; then
|
|
||||||
echo "Cloud SQL Proxy is up and running."
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
echo "ERROR: Cloud SQL Proxy failed to start. Logs:"
|
|
||||||
cat "$PROXY_LOG"
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
setup_toolbox() {
|
|
||||||
echo "Setting up Toolbox server..."
|
|
||||||
TOOLBOX_YAML="/tools.yaml"
|
|
||||||
echo "${TOOLS_YAML_CONTENT}" > "$TOOLBOX_YAML"
|
|
||||||
wget -q "https://storage.googleapis.com/genai-toolbox/v${VERSION}/linux/amd64/toolbox" -O "/toolbox"
|
|
||||||
chmod +x "/toolbox"
|
|
||||||
/toolbox --tools-file "$TOOLBOX_YAML" > "$TOOLBOX_LOG" 2>&1 &
|
|
||||||
TOOLBOX_PID=$!
|
|
||||||
|
|
||||||
# Health Check
|
|
||||||
for i in {1..15}; do
|
|
||||||
if nc -z 127.0.0.1 5000; then
|
|
||||||
echo "Toolbox server is up and running."
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
echo "ERROR: Toolbox server failed to start. Logs:"
|
|
||||||
cat "$TOOLBOX_LOG"
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
setup_db_table() {
|
|
||||||
echo "Setting up database table $TABLE_NAME using $SQL_FILE..."
|
|
||||||
export TABLE_NAME
|
|
||||||
envsubst < "$SQL_FILE" | psql -h 127.0.0.1 -p 5432 -U "$DB_USER" -d "$DATABASE_NAME"
|
|
||||||
}
|
|
||||||
|
|
||||||
run_python_test() {
|
|
||||||
local dir=$1
|
|
||||||
local name=$(basename "$dir")
|
|
||||||
echo "--- Running Python Test: $name ---"
|
|
||||||
(
|
|
||||||
cd "$dir"
|
|
||||||
python3 -m venv .venv
|
|
||||||
source .venv/bin/activate
|
|
||||||
pip install -q -r requirements.txt pytest
|
|
||||||
|
|
||||||
cd ..
|
|
||||||
local test_file=$(find . -maxdepth 1 -name "*test.py" | head -n 1)
|
|
||||||
if [ -n "$test_file" ]; then
|
|
||||||
echo "Found native test: $test_file. Running pytest..."
|
|
||||||
export ORCH_NAME="$name"
|
|
||||||
export PYTHONPATH="../"
|
|
||||||
pytest "$test_file"
|
|
||||||
else
|
|
||||||
echo "No native test found. running agent directly..."
|
|
||||||
export PYTHONPATH="../"
|
|
||||||
python3 "${name}/${AGENT_FILE_PATTERN}"
|
|
||||||
fi
|
|
||||||
rm -rf "${name}/.venv"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
run_js_test() {
|
|
||||||
local dir=$1
|
|
||||||
local name=$(basename "$dir")
|
|
||||||
echo "--- Running JS Test: $name ---"
|
|
||||||
(
|
|
||||||
cd "$dir"
|
|
||||||
if [ -f "package-lock.json" ]; then npm ci -q; else npm install -q; fi
|
|
||||||
|
|
||||||
cd ..
|
|
||||||
# Looking for a JS test file in the parent directory
|
|
||||||
local test_file=$(find . -maxdepth 1 -name "*test.js" | head -n 1)
|
|
||||||
if [ -n "$test_file" ]; then
|
|
||||||
echo "Found native test: $test_file. Running node --test..."
|
|
||||||
export ORCH_NAME="$name"
|
|
||||||
node --test "$test_file"
|
|
||||||
else
|
|
||||||
echo "No native test found. running agent directly..."
|
|
||||||
node "${name}/${AGENT_FILE_PATTERN}"
|
|
||||||
fi
|
|
||||||
rm -rf "${name}/node_modules"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
run_go_test() {
|
|
||||||
local dir=$1
|
|
||||||
local name=$(basename "$dir")
|
|
||||||
|
|
||||||
if [ "$name" == "openAI" ]; then
|
|
||||||
echo -e "\nSkipping framework '${name}': Temporarily excluded."
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "--- Running Go Test: $name ---"
|
|
||||||
(
|
|
||||||
cd "$dir"
|
|
||||||
if [ -f "go.mod" ]; then
|
|
||||||
go mod tidy
|
|
||||||
fi
|
|
||||||
|
|
||||||
cd ..
|
|
||||||
local test_file=$(find . -maxdepth 1 -name "*test.go" | head -n 1)
|
|
||||||
if [ -n "$test_file" ]; then
|
|
||||||
echo "Found native test: $test_file. Running go test..."
|
|
||||||
export ORCH_NAME="$name"
|
|
||||||
go test -v ./...
|
|
||||||
else
|
|
||||||
echo "No native test found. running agent directly..."
|
|
||||||
cd "$name"
|
|
||||||
go run "."
|
|
||||||
fi
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
cleanup() {
|
|
||||||
echo "Cleaning up background processes..."
|
|
||||||
[ -n "$TOOLBOX_PID" ] && kill "$TOOLBOX_PID" || true
|
|
||||||
[ -n "$PROXY_PID" ] && kill "$PROXY_PID" || true
|
|
||||||
}
|
|
||||||
trap cleanup EXIT
|
|
||||||
|
|
||||||
# --- Execution ---
|
|
||||||
install_system_packages
|
|
||||||
start_cloud_sql_proxy
|
|
||||||
|
|
||||||
export PGHOST=127.0.0.1
|
|
||||||
export PGPORT=5432
|
|
||||||
export PGPASSWORD="$DB_PASSWORD"
|
|
||||||
export GOOGLE_API_KEY="$GOOGLE_API_KEY"
|
|
||||||
|
|
||||||
setup_toolbox
|
|
||||||
setup_db_table
|
|
||||||
|
|
||||||
echo "Scanning $TARGET_ROOT for tests with pattern $AGENT_FILE_PATTERN..."
|
|
||||||
|
|
||||||
find "$TARGET_ROOT" -name "$AGENT_FILE_PATTERN" | while read -r agent_file; do
|
|
||||||
sample_dir=$(dirname "$agent_file")
|
|
||||||
if [[ "$TARGET_LANG" == "python" ]]; then
|
|
||||||
run_python_test "$sample_dir"
|
|
||||||
elif [[ "$TARGET_LANG" == "js" ]]; then
|
|
||||||
run_js_test "$sample_dir"
|
|
||||||
elif [[ "$TARGET_LANG" == "go" ]]; then
|
|
||||||
run_go_test "$sample_dir"
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
6
.github/workflows/deploy_dev_docs.yaml
vendored
6
.github/workflows/deploy_dev_docs.yaml
vendored
@@ -40,7 +40,7 @@ jobs:
|
|||||||
group: docs-deployment
|
group: docs-deployment
|
||||||
cancel-in-progress: false
|
cancel-in-progress: false
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
|
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
|
||||||
|
|
||||||
@@ -51,12 +51,12 @@ jobs:
|
|||||||
extended: true
|
extended: true
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
|
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
|
||||||
with:
|
with:
|
||||||
node-version: "22"
|
node-version: "22"
|
||||||
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5
|
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
|
||||||
with:
|
with:
|
||||||
path: ~/.npm
|
path: ~/.npm
|
||||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||||
|
|||||||
@@ -30,14 +30,14 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout main branch (for latest templates and theme)
|
- name: Checkout main branch (for latest templates and theme)
|
||||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||||
with:
|
with:
|
||||||
ref: 'main'
|
ref: 'main'
|
||||||
submodules: 'recursive'
|
submodules: 'recursive'
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Checkout old content from tag into a temporary directory
|
- name: Checkout old content from tag into a temporary directory
|
||||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.inputs.version_tag }}
|
ref: ${{ github.event.inputs.version_tag }}
|
||||||
path: 'old_version_source' # Checkout into a temp subdir
|
path: 'old_version_source' # Checkout into a temp subdir
|
||||||
@@ -57,7 +57,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
hugo-version: "0.145.0"
|
hugo-version: "0.145.0"
|
||||||
extended: true
|
extended: true
|
||||||
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
|
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
|
||||||
with:
|
with:
|
||||||
node-version: "22"
|
node-version: "22"
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/deploy_versioned_docs.yaml
vendored
4
.github/workflows/deploy_versioned_docs.yaml
vendored
@@ -30,7 +30,7 @@ jobs:
|
|||||||
cancel-in-progress: false
|
cancel-in-progress: false
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Code at Tag
|
- name: Checkout Code at Tag
|
||||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.release.tag_name }}
|
ref: ${{ github.event.release.tag_name }}
|
||||||
|
|
||||||
@@ -44,7 +44,7 @@ jobs:
|
|||||||
extended: true
|
extended: true
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
|
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
|
||||||
with:
|
with:
|
||||||
node-version: "22"
|
node-version: "22"
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/docs_preview_clean.yaml
vendored
2
.github/workflows/docs_preview_clean.yaml
vendored
@@ -34,7 +34,7 @@ jobs:
|
|||||||
group: "preview-${{ github.event.number }}"
|
group: "preview-${{ github.event.number }}"
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||||
with:
|
with:
|
||||||
ref: versioned-gh-pages
|
ref: versioned-gh-pages
|
||||||
|
|
||||||
|
|||||||
6
.github/workflows/docs_preview_deploy.yaml
vendored
6
.github/workflows/docs_preview_deploy.yaml
vendored
@@ -49,7 +49,7 @@ jobs:
|
|||||||
group: "preview-${{ github.event.number }}"
|
group: "preview-${{ github.event.number }}"
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||||
with:
|
with:
|
||||||
# Checkout the PR's HEAD commit (supports forks).
|
# Checkout the PR's HEAD commit (supports forks).
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
@@ -62,12 +62,12 @@ jobs:
|
|||||||
extended: true
|
extended: true
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
|
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
|
||||||
with:
|
with:
|
||||||
node-version: "22"
|
node-version: "22"
|
||||||
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5
|
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
|
||||||
with:
|
with:
|
||||||
path: ~/.npm
|
path: ~/.npm
|
||||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||||
|
|||||||
29
.github/workflows/link_checker_workflow.yaml
vendored
29
.github/workflows/link_checker_workflow.yaml
vendored
@@ -22,47 +22,38 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Repository
|
- name: Checkout Repository
|
||||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||||
|
|
||||||
- name: Restore lychee cache
|
- name: Restore lychee cache
|
||||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5
|
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
|
||||||
with:
|
with:
|
||||||
path: .lycheecache
|
path: .lycheecache
|
||||||
key: cache-lychee-${{ github.sha }}
|
key: cache-lychee-${{ github.sha }}
|
||||||
restore-keys: cache-lychee-
|
restore-keys: cache-lychee-
|
||||||
|
|
||||||
- name: Link Checker
|
- name: Link Checker
|
||||||
id: lychee-check
|
|
||||||
uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 # v2
|
uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 # v2
|
||||||
continue-on-error: true
|
|
||||||
with:
|
with:
|
||||||
args: >
|
args: >
|
||||||
--quiet
|
--verbose
|
||||||
--no-progress
|
--no-progress
|
||||||
--cache
|
--cache
|
||||||
--max-cache-age 1d
|
--max-cache-age 1d
|
||||||
--exclude '^neo4j\+.*' --exclude '^bolt://.*'
|
|
||||||
README.md
|
README.md
|
||||||
docs/
|
docs/
|
||||||
output: lychee-report.md
|
output: /tmp/foo.txt
|
||||||
format: markdown
|
|
||||||
fail: true
|
fail: true
|
||||||
jobSummary: false
|
jobSummary: true
|
||||||
debug: false
|
debug: true
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
# This step only runs if the 'lychee_check' step fails, ensuring the
|
||||||
- name: Display Failure Report
|
# context note only appears when the developer needs to troubleshoot.
|
||||||
# Run this ONLY if the link checker failed
|
- name: Display Link Context Note on Failure
|
||||||
if: steps.lychee-check.outcome == 'failure'
|
if: ${{ failure() }}
|
||||||
run: |
|
run: |
|
||||||
echo "## Link Resolution Note" >> $GITHUB_STEP_SUMMARY
|
echo "## Link Resolution Note" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "Local links and directory changes work differently on GitHub than on the docsite." >> $GITHUB_STEP_SUMMARY
|
echo "Local links and directory changes work differently on GitHub than on the docsite." >> $GITHUB_STEP_SUMMARY
|
||||||
echo "You must ensure fixes pass the **GitHub check** and also work with **\`hugo server\`**." >> $GITHUB_STEP_SUMMARY
|
echo "You must ensure fixes pass the **GitHub check** and also work with **\`hugo server\`**." >> $GITHUB_STEP_SUMMARY
|
||||||
echo "See [Link Checking and Fixing with Lychee](https://github.com/googleapis/genai-toolbox/blob/main/DEVELOPER.md#link-checking-and-fixing-with-lychee) for more details." >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "---" >> $GITHUB_STEP_SUMMARY
|
echo "---" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
echo "### Broken Links Found" >> $GITHUB_STEP_SUMMARY
|
|
||||||
cat ./lychee-report.md >> $GITHUB_STEP_SUMMARY
|
|
||||||
|
|
||||||
exit 1
|
|
||||||
|
|||||||
4
.github/workflows/lint.yaml
vendored
4
.github/workflows/lint.yaml
vendored
@@ -51,11 +51,11 @@ jobs:
|
|||||||
console.log('Failed to remove label. Another job may have already removed it!');
|
console.log('Failed to remove label. Another job may have already removed it!');
|
||||||
}
|
}
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
|
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||||
with:
|
with:
|
||||||
go-version: "1.25"
|
go-version: "1.25"
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||||
|
|||||||
2
.github/workflows/publish-mcp.yml
vendored
2
.github/workflows/publish-mcp.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||||
|
|
||||||
- name: Wait for image in Artifact Registry
|
- name: Wait for image in Artifact Registry
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|||||||
2
.github/workflows/sync-labels.yaml
vendored
2
.github/workflows/sync-labels.yaml
vendored
@@ -29,7 +29,7 @@ jobs:
|
|||||||
issues: 'write'
|
issues: 'write'
|
||||||
pull-requests: 'write'
|
pull-requests: 'write'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
- uses: micnncim/action-label-syncer@3abd5ab72fda571e69fffd97bd4e0033dd5f495c # v1.3.0
|
- uses: micnncim/action-label-syncer@3abd5ab72fda571e69fffd97bd4e0033dd5f495c # v1.3.0
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
4
.github/workflows/tests.yaml
vendored
4
.github/workflows/tests.yaml
vendored
@@ -57,12 +57,12 @@ jobs:
|
|||||||
}
|
}
|
||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
|
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||||
with:
|
with:
|
||||||
go-version: "1.24"
|
go-version: "1.24"
|
||||||
|
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||||
|
|||||||
@@ -51,10 +51,6 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
|
|||||||
# Add a new version block here before every release
|
# Add a new version block here before every release
|
||||||
# The order of versions in this file is mirrored into the dropdown
|
# The order of versions in this file is mirrored into the dropdown
|
||||||
|
|
||||||
[[params.versions]]
|
|
||||||
version = "v0.26.0"
|
|
||||||
url = "https://googleapis.github.io/genai-toolbox/v0.26.0/"
|
|
||||||
|
|
||||||
[[params.versions]]
|
[[params.versions]]
|
||||||
version = "v0.25.0"
|
version = "v0.25.0"
|
||||||
url = "https://googleapis.github.io/genai-toolbox/v0.25.0/"
|
url = "https://googleapis.github.io/genai-toolbox/v0.25.0/"
|
||||||
|
|||||||
@@ -23,7 +23,8 @@ https://cloud.dgraph.io/login
|
|||||||
https://dgraph.io/docs
|
https://dgraph.io/docs
|
||||||
|
|
||||||
# MySQL Community downloads and main site (often protected by bot mitigation)
|
# MySQL Community downloads and main site (often protected by bot mitigation)
|
||||||
^https?://(.*\.)?mysql\.com/.*
|
https://dev.mysql.com/downloads/installer/
|
||||||
|
https://www.mysql.com/
|
||||||
|
|
||||||
# Claude desktop download link
|
# Claude desktop download link
|
||||||
https://claude.ai/download
|
https://claude.ai/download
|
||||||
@@ -36,9 +37,9 @@ https://dev.mysql.com/doc/refman/8.4/en/sql-prepared-statements.html
|
|||||||
https://dev.mysql.com/doc/refman/8.4/en/user-names.html
|
https://dev.mysql.com/doc/refman/8.4/en/user-names.html
|
||||||
|
|
||||||
# npmjs links can occasionally trigger rate limiting during high-frequency CI builds
|
# npmjs links can occasionally trigger rate limiting during high-frequency CI builds
|
||||||
^https?://(www\.)?npmjs\.com/.*
|
https://www.npmjs.com/package/@toolbox-sdk/core
|
||||||
|
https://www.npmjs.com/package/@toolbox-sdk/adk
|
||||||
|
|
||||||
https://www.oceanbase.com/
|
|
||||||
|
|
||||||
# Ignore social media and blog profiles to reduce external request overhead
|
# Ignore social media and blog profiles to reduce external request overhead
|
||||||
https://medium.com/@mcp_toolbox
|
https://medium.com/@mcp_toolbox
|
||||||
25
CHANGELOG.md
25
CHANGELOG.md
@@ -1,30 +1,5 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
## [0.26.0](https://github.com/googleapis/genai-toolbox/compare/v0.25.0...v0.26.0) (2026-01-22)
|
|
||||||
|
|
||||||
|
|
||||||
### ⚠ BREAKING CHANGES
|
|
||||||
|
|
||||||
* Validate tool naming ([#2305](https://github.com/googleapis/genai-toolbox/issues/2305)) ([5054212](https://github.com/googleapis/genai-toolbox/commit/5054212fa43017207fe83275d27b9fbab96e8ab5))
|
|
||||||
* **tools/cloudgda:** Update description and parameter name for cloudgda tool ([#2288](https://github.com/googleapis/genai-toolbox/issues/2288)) ([6b02591](https://github.com/googleapis/genai-toolbox/commit/6b025917032394a66840488259db8ff2c3063016))
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
* Add new `user-agent-metadata` flag ([#2302](https://github.com/googleapis/genai-toolbox/issues/2302)) ([adc9589](https://github.com/googleapis/genai-toolbox/commit/adc9589766904d9e3cbe0a6399222f8d4bb9d0cc))
|
|
||||||
* Add remaining flag to Toolbox server in MCP registry ([#2272](https://github.com/googleapis/genai-toolbox/issues/2272)) ([5e0999e](https://github.com/googleapis/genai-toolbox/commit/5e0999ebf5cdd9046e96857738254b2e0561b6d2))
|
|
||||||
* **embeddingModel:** Add embedding model to MCP handler ([#2310](https://github.com/googleapis/genai-toolbox/issues/2310)) ([e4f60e5](https://github.com/googleapis/genai-toolbox/commit/e4f60e56335b755ef55b9553d3f40b31858ec8d9))
|
|
||||||
* **sources/bigquery:** Make maximum rows returned from queries configurable ([#2262](https://github.com/googleapis/genai-toolbox/issues/2262)) ([4abf0c3](https://github.com/googleapis/genai-toolbox/commit/4abf0c39e717d53b22cc61efb65e09928c598236))
|
|
||||||
* **prebuilt/cloud-sql:** Add create backup tool for Cloud SQL ([#2141](https://github.com/googleapis/genai-toolbox/issues/2141)) ([8e0fb03](https://github.com/googleapis/genai-toolbox/commit/8e0fb0348315a80f63cb47b3c7204869482448f4))
|
|
||||||
* **prebuilt/cloud-sql:** Add restore backup tool for Cloud SQL ([#2171](https://github.com/googleapis/genai-toolbox/issues/2171)) ([00c3e6d](https://github.com/googleapis/genai-toolbox/commit/00c3e6d8cba54e2ab6cb271c7e6b378895df53e1))
|
|
||||||
* Support combining multiple prebuilt configurations ([#2295](https://github.com/googleapis/genai-toolbox/issues/2295)) ([e535b37](https://github.com/googleapis/genai-toolbox/commit/e535b372ea81864d644a67135a1b07e4e519b4b4))
|
|
||||||
* Support MCP specs version 2025-11-25 ([#2303](https://github.com/googleapis/genai-toolbox/issues/2303)) ([4d23a3b](https://github.com/googleapis/genai-toolbox/commit/4d23a3bbf2797b1f7fe328aeb5789e778121da23))
|
|
||||||
* **tools:** Add `valueFromParam` support to Tool config ([#2333](https://github.com/googleapis/genai-toolbox/issues/2333)) ([15101b1](https://github.com/googleapis/genai-toolbox/commit/15101b1edbe2b85a4a5f9f819c23cf83138f4ee1))
|
|
||||||
|
|
||||||
|
|
||||||
### Bug Fixes
|
|
||||||
|
|
||||||
* **tools/cloudhealthcare:** Add check for client authorization before retrieving token string ([#2327](https://github.com/googleapis/genai-toolbox/issues/2327)) ([c25a233](https://github.com/googleapis/genai-toolbox/commit/c25a2330fea2ac382a398842c9e572e4e19bcb08))
|
|
||||||
|
|
||||||
## [0.25.0](https://github.com/googleapis/genai-toolbox/compare/v0.24.0...v0.25.0) (2026-01-08)
|
## [0.25.0](https://github.com/googleapis/genai-toolbox/compare/v0.24.0...v0.25.0) (2026-01-08)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -92,11 +92,11 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s
|
|||||||
`newdb.go`. Create a `Config` struct to include all the necessary parameters
|
`newdb.go`. Create a `Config` struct to include all the necessary parameters
|
||||||
for connecting to the database (e.g., host, port, username, password, database
|
for connecting to the database (e.g., host, port, username, password, database
|
||||||
name) and a `Source` struct to store necessary parameters for tools (e.g.,
|
name) and a `Source` struct to store necessary parameters for tools (e.g.,
|
||||||
Name, Type, connection object, additional config).
|
Name, Kind, connection object, additional config).
|
||||||
* **Implement the
|
* **Implement the
|
||||||
[`SourceConfig`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/sources/sources.go#L57)
|
[`SourceConfig`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/sources/sources.go#L57)
|
||||||
interface**. This interface requires two methods:
|
interface**. This interface requires two methods:
|
||||||
* `SourceConfigType() string`: Returns a unique string identifier for your
|
* `SourceConfigKind() string`: Returns a unique string identifier for your
|
||||||
data source (e.g., `"newdb"`).
|
data source (e.g., `"newdb"`).
|
||||||
* `Initialize(ctx context.Context, tracer trace.Tracer) (Source, error)`:
|
* `Initialize(ctx context.Context, tracer trace.Tracer) (Source, error)`:
|
||||||
Creates a new instance of your data source and establishes a connection to
|
Creates a new instance of your data source and establishes a connection to
|
||||||
@@ -104,7 +104,7 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s
|
|||||||
* **Implement the
|
* **Implement the
|
||||||
[`Source`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/sources/sources.go#L63)
|
[`Source`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/sources/sources.go#L63)
|
||||||
interface**. This interface requires one method:
|
interface**. This interface requires one method:
|
||||||
* `SourceType() string`: Returns the same string identifier as `SourceConfigType()`.
|
* `SourceKind() string`: Returns the same string identifier as `SourceConfigKind()`.
|
||||||
* **Implement `init()`** to register the new Source.
|
* **Implement `init()`** to register the new Source.
|
||||||
* **Implement Unit Tests** in a file named `newdb_test.go`.
|
* **Implement Unit Tests** in a file named `newdb_test.go`.
|
||||||
|
|
||||||
@@ -126,7 +126,7 @@ tools.
|
|||||||
* **Implement the
|
* **Implement the
|
||||||
[`ToolConfig`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/tools/tools.go#L61)
|
[`ToolConfig`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/tools/tools.go#L61)
|
||||||
interface**. This interface requires one method:
|
interface**. This interface requires one method:
|
||||||
* `ToolConfigType() string`: Returns a unique string identifier for your tool
|
* `ToolConfigKind() string`: Returns a unique string identifier for your tool
|
||||||
(e.g., `"newdb-tool"`).
|
(e.g., `"newdb-tool"`).
|
||||||
* `Initialize(sources map[string]Source) (Tool, error)`: Creates a new
|
* `Initialize(sources map[string]Source) (Tool, error)`: Creates a new
|
||||||
instance of your tool and validates that it can connect to the specified
|
instance of your tool and validates that it can connect to the specified
|
||||||
@@ -243,7 +243,7 @@ resources.
|
|||||||
| style | Update src code, with only formatting and whitespace updates (e.g. code formatter or linter changes). |
|
| style | Update src code, with only formatting and whitespace updates (e.g. code formatter or linter changes). |
|
||||||
|
|
||||||
Pull requests should always add scope whenever possible. The scope is
|
Pull requests should always add scope whenever possible. The scope is
|
||||||
formatted as `<scope-resource>/<scope-type>` (e.g., `sources/postgres`, or
|
formatted as `<scope-type>/<scope-kind>` (e.g., `sources/postgres`, or
|
||||||
`tools/mssql-sql`).
|
`tools/mssql-sql`).
|
||||||
|
|
||||||
Ideally, **each PR covers only one scope**, if this is
|
Ideally, **each PR covers only one scope**, if this is
|
||||||
|
|||||||
17
DEVELOPER.md
17
DEVELOPER.md
@@ -47,12 +47,11 @@ Before you begin, ensure you have the following:
|
|||||||
### Tool Naming Conventions
|
### Tool Naming Conventions
|
||||||
|
|
||||||
This section details the purpose and conventions for MCP Toolbox's tools naming
|
This section details the purpose and conventions for MCP Toolbox's tools naming
|
||||||
properties, **tool name** and **tool type**.
|
properties, **tool name** and **tool kind**.
|
||||||
|
|
||||||
```
|
```
|
||||||
kind: tools
|
cancel_hotel: <- tool name
|
||||||
name: cancel_hotel <- tool name
|
kind: postgres-sql <- tool kind
|
||||||
type: postgres-sql <- tool type
|
|
||||||
source: my_pg_source
|
source: my_pg_source
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -77,17 +76,17 @@ The following guidelines apply to tool names:
|
|||||||
to a function) until they can be validated through extensive testing to ensure
|
to a function) until they can be validated through extensive testing to ensure
|
||||||
they do not negatively impact agent's performances.
|
they do not negatively impact agent's performances.
|
||||||
|
|
||||||
#### Tool Type
|
#### Tool Kind
|
||||||
|
|
||||||
Tool type serves as a category or type that a user can assign to a tool.
|
Tool kind serves as a category or type that a user can assign to a tool.
|
||||||
|
|
||||||
The following guidelines apply to tool types:
|
The following guidelines apply to tool kinds:
|
||||||
|
|
||||||
* Should use hyphens over underscores (e.g. `firestore-list-collections` or
|
* Should user hyphens over underscores (e.g. `firestore-list-collections` or
|
||||||
`firestore_list_colelctions`).
|
`firestore_list_colelctions`).
|
||||||
* Should use product name in name (e.g. `firestore-list-collections` over
|
* Should use product name in name (e.g. `firestore-list-collections` over
|
||||||
`list-collections`).
|
`list-collections`).
|
||||||
* Changes to tool type are breaking changes and should be avoided.
|
* Changes to tool kind are breaking changes and should be avoided.
|
||||||
|
|
||||||
## Testing
|
## Testing
|
||||||
|
|
||||||
|
|||||||
32
README.md
32
README.md
@@ -2,8 +2,6 @@
|
|||||||
|
|
||||||
# MCP Toolbox for Databases
|
# MCP Toolbox for Databases
|
||||||
|
|
||||||
<a href="https://trendshift.io/repositories/13019" target="_blank"><img src="https://trendshift.io/api/badge/repositories/13019" alt="googleapis%2Fgenai-toolbox | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
|
||||||
|
|
||||||
[](https://googleapis.github.io/genai-toolbox/)
|
[](https://googleapis.github.io/genai-toolbox/)
|
||||||
[](https://discord.gg/Dmm69peqjh)
|
[](https://discord.gg/Dmm69peqjh)
|
||||||
[](https://medium.com/@mcp_toolbox)
|
[](https://medium.com/@mcp_toolbox)
|
||||||
@@ -107,7 +105,7 @@ redeploying your application.
|
|||||||
|
|
||||||
## Getting Started
|
## Getting Started
|
||||||
|
|
||||||
### Quickstart: Running Toolbox using NPX
|
### (Non-production) Running Toolbox
|
||||||
|
|
||||||
You can run Toolbox directly with a [configuration file](#configuration):
|
You can run Toolbox directly with a [configuration file](#configuration):
|
||||||
|
|
||||||
@@ -142,7 +140,7 @@ To install Toolbox as a binary:
|
|||||||
>
|
>
|
||||||
> ```sh
|
> ```sh
|
||||||
> # see releases page for other versions
|
> # see releases page for other versions
|
||||||
> export VERSION=0.26.0
|
> export VERSION=0.25.0
|
||||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||||
> chmod +x toolbox
|
> chmod +x toolbox
|
||||||
> ```
|
> ```
|
||||||
@@ -155,7 +153,7 @@ To install Toolbox as a binary:
|
|||||||
>
|
>
|
||||||
> ```sh
|
> ```sh
|
||||||
> # see releases page for other versions
|
> # see releases page for other versions
|
||||||
> export VERSION=0.26.0
|
> export VERSION=0.25.0
|
||||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||||
> chmod +x toolbox
|
> chmod +x toolbox
|
||||||
> ```
|
> ```
|
||||||
@@ -168,7 +166,7 @@ To install Toolbox as a binary:
|
|||||||
>
|
>
|
||||||
> ```sh
|
> ```sh
|
||||||
> # see releases page for other versions
|
> # see releases page for other versions
|
||||||
> export VERSION=0.26.0
|
> export VERSION=0.25.0
|
||||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||||
> chmod +x toolbox
|
> chmod +x toolbox
|
||||||
> ```
|
> ```
|
||||||
@@ -181,7 +179,7 @@ To install Toolbox as a binary:
|
|||||||
>
|
>
|
||||||
> ```cmd
|
> ```cmd
|
||||||
> :: see releases page for other versions
|
> :: see releases page for other versions
|
||||||
> set VERSION=0.26.0
|
> set VERSION=0.25.0
|
||||||
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||||
> ```
|
> ```
|
||||||
>
|
>
|
||||||
@@ -193,7 +191,7 @@ To install Toolbox as a binary:
|
|||||||
>
|
>
|
||||||
> ```powershell
|
> ```powershell
|
||||||
> # see releases page for other versions
|
> # see releases page for other versions
|
||||||
> $VERSION = "0.26.0"
|
> $VERSION = "0.25.0"
|
||||||
> curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
> curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||||
> ```
|
> ```
|
||||||
>
|
>
|
||||||
@@ -206,7 +204,7 @@ You can also install Toolbox as a container:
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# see releases page for other versions
|
# see releases page for other versions
|
||||||
export VERSION=0.26.0
|
export VERSION=0.25.0
|
||||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -230,7 +228,7 @@ To install from source, ensure you have the latest version of
|
|||||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
go install github.com/googleapis/genai-toolbox@v0.26.0
|
go install github.com/googleapis/genai-toolbox@v0.25.0
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|
||||||
@@ -940,9 +938,9 @@ Toolbox should have access to. Most tools will have at least one source to
|
|||||||
execute against.
|
execute against.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: sources
|
sources:
|
||||||
name: my-pg-source
|
my-pg-source:
|
||||||
type: postgres
|
kind: postgres
|
||||||
host: 127.0.0.1
|
host: 127.0.0.1
|
||||||
port: 5432
|
port: 5432
|
||||||
database: toolbox_db
|
database: toolbox_db
|
||||||
@@ -956,12 +954,12 @@ For more details on configuring different types of sources, see the
|
|||||||
### Tools
|
### Tools
|
||||||
|
|
||||||
The `tools` section of a `tools.yaml` define the actions an agent can take: what
|
The `tools` section of a `tools.yaml` define the actions an agent can take: what
|
||||||
type of tool it is, which source(s) it affects, what parameters it uses, etc.
|
kind of tool it is, which source(s) it affects, what parameters it uses, etc.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: tools
|
tools:
|
||||||
name: search-hotels-by-name
|
search-hotels-by-name:
|
||||||
type: postgres-sql
|
kind: postgres-sql
|
||||||
source: my-pg-source
|
source: my-pg-source
|
||||||
description: Search for hotels based on name.
|
description: Search for hotels based on name.
|
||||||
parameters:
|
parameters:
|
||||||
|
|||||||
@@ -1,253 +0,0 @@
|
|||||||
// Copyright 2026 Google LLC
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package internal
|
|
||||||
|
|
||||||
import (
|
|
||||||
// Import prompt packages for side effect of registration
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/prompts/custom"
|
|
||||||
|
|
||||||
// Import tool packages for side effect of registration
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreatecluster"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreateinstance"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreateuser"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetcluster"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetinstance"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetuser"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistclusters"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistinstances"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistusers"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbwaitforoperation"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydbainl"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryanalyzecontribution"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryconversationalanalytics"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryexecutesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryforecast"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygetdatasetinfo"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygettableinfo"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylistdatasetids"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylisttableids"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysearchcatalog"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigtable"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cassandra/cassandracql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouseexecutesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselistdatabases"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselisttables"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhousesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudgda"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirfetchpage"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatienteverything"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatientsearch"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdataset"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdicomstore"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdicomstoremetrics"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirresource"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirstore"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirstoremetrics"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarelistdicomstores"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarelistfhirstores"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcareretrieverendereddicominstance"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicominstances"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomseries"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomstudies"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminlistlognames"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminlistresourcetypes"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminquerylogs"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcloneinstance"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatebackup"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatedatabase"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreateusers"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistdatabases"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistinstances"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlrestorebackup"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmssql/cloudsqlmssqlcreateinstance"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmysql/cloudsqlmysqlcreateinstance"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlpg/cloudsqlpgcreateinstances"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlpg/cloudsqlpgupgradeprecheck"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/couchbase"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataform/dataformcompilelocal"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexlookupentry"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchaspecttypes"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchentries"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dgraph"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/elasticsearch/elasticsearchesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firebird/firebirdexecutesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firebird/firebirdsql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreadddocuments"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoredeletedocuments"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetdocuments"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetrules"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorelistcollections"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequery"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequerycollection"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreupdatedocument"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/http"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardelement"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardfilter"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerconversationalanalytics"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookercreateprojectfile"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdeleteprojectfile"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdevmode"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergenerateembedurl"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiondatabases"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnections"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectionschemas"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiontablecolumns"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiontables"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdashboards"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdimensions"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetexplores"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetfilters"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetlooks"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmeasures"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmodels"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetparameters"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojectfile"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojectfiles"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojects"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthanalyze"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthpulse"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthvacuum"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakedashboard"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakelook"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquery"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquerysql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerqueryurl"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrundashboard"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrunlook"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerupdateprojectfile"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookervalidateproject"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mindsdb/mindsdbexecutesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mindsdb/mindsdbsql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbaggregate"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeletemany"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeleteone"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfind"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfindone"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertmany"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertone"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdatemany"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdateone"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlexecutesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqllisttables"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlsql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlexecutesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlgetqueryplan"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllistactivequeries"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttablefragmentation"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttables"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttablesmissinguniqueindexes"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlsql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jcypher"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jexecutecypher"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbaseexecutesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbasesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/oracle/oracleexecutesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/oracle/oraclesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresdatabaseoverview"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresexecutesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresgetcolumncardinality"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistactivequeries"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistavailableextensions"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistdatabasestats"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistindexes"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistinstalledextensions"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistlocks"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpgsettings"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpublicationtables"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistquerystats"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistroles"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistschemas"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistsequences"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresliststoredprocedure"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablespaces"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablestats"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttriggers"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistviews"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslongrunningtransactions"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresreplicationstats"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/redis"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcancelbatch"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatepysparkbatch"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatesparkbatch"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkgetbatch"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparklistbatches"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoreexecutesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoresql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakeexecutesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlistgraphs"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlisttables"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannersql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqliteexecutesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbexecutesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbsql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinoexecutesql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinosql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/wait"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/valkey"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/yugabytedbsql"
|
|
||||||
|
|
||||||
// Import source packages for side effect of registration
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/alloydbadmin"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/alloydbpg"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/bigtable"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cassandra"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/clickhouse"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudgda"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudhealthcare"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudloggingadmin"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudmonitoring"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmssql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmysql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/couchbase"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/dataplex"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/dgraph"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/elasticsearch"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/firebird"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/firestore"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/http"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/looker"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mindsdb"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mongodb"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mssql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mysql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/neo4j"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/oceanbase"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/oracle"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/postgres"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/redis"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/serverlessspark"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/singlestore"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/snowflake"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/spanner"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/tidb"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/trino"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/valkey"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/yugabytedb"
|
|
||||||
)
|
|
||||||
@@ -1,139 +0,0 @@
|
|||||||
// Copyright 2026 Google LLC
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package invoke
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/googleapis/genai-toolbox/cmd/internal"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/server"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
|
||||||
|
|
||||||
func NewCommand(opts *internal.ToolboxOptions) *cobra.Command {
|
|
||||||
cmd := &cobra.Command{
|
|
||||||
Use: "invoke <tool-name> [params]",
|
|
||||||
Short: "Execute a tool directly",
|
|
||||||
Long: `Execute a tool directly with parameters.
|
|
||||||
Params must be a JSON string.
|
|
||||||
Example:
|
|
||||||
toolbox invoke my-tool '{"param1": "value1"}'`,
|
|
||||||
Args: cobra.MinimumNArgs(1),
|
|
||||||
RunE: func(c *cobra.Command, args []string) error {
|
|
||||||
return runInvoke(c, args, opts)
|
|
||||||
},
|
|
||||||
}
|
|
||||||
return cmd
|
|
||||||
}
|
|
||||||
|
|
||||||
func runInvoke(cmd *cobra.Command, args []string, opts *internal.ToolboxOptions) error {
|
|
||||||
ctx, cancel := context.WithCancel(cmd.Context())
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
ctx, shutdown, err := opts.Setup(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer func() {
|
|
||||||
_ = shutdown(ctx)
|
|
||||||
}()
|
|
||||||
|
|
||||||
_, err = opts.LoadConfig(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initialize Resources
|
|
||||||
sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, opts.Cfg)
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("failed to initialize resources: %w", err)
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
resourceMgr := resources.NewResourceManager(sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap)
|
|
||||||
|
|
||||||
// Execute Tool
|
|
||||||
toolName := args[0]
|
|
||||||
tool, ok := resourceMgr.GetTool(toolName)
|
|
||||||
if !ok {
|
|
||||||
errMsg := fmt.Errorf("tool %q not found", toolName)
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
var paramsInput string
|
|
||||||
if len(args) > 1 {
|
|
||||||
paramsInput = args[1]
|
|
||||||
}
|
|
||||||
|
|
||||||
params := make(map[string]any)
|
|
||||||
if paramsInput != "" {
|
|
||||||
if err := json.Unmarshal([]byte(paramsInput), ¶ms); err != nil {
|
|
||||||
errMsg := fmt.Errorf("params must be a valid JSON string: %w", err)
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
parsedParams, err := parameters.ParseParams(tool.GetParameters(), params, nil)
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("invalid parameters: %w", err)
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
parsedParams, err = tool.EmbedParams(ctx, parsedParams, resourceMgr.GetEmbeddingModelMap())
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("error embedding parameters: %w", err)
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
// Client Auth not supported for ephemeral CLI call
|
|
||||||
requiresAuth, err := tool.RequiresClientAuthorization(resourceMgr)
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("failed to check auth requirements: %w", err)
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
if requiresAuth {
|
|
||||||
errMsg := fmt.Errorf("client authorization is not supported")
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
result, err := tool.Invoke(ctx, resourceMgr, parsedParams, "")
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("tool execution failed: %w", err)
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
// Print Result
|
|
||||||
output, err := json.MarshalIndent(result, "", " ")
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("failed to marshal result: %w", err)
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
fmt.Fprintln(opts.IOStreams.Out, string(output))
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
@@ -1,153 +0,0 @@
|
|||||||
// Copyright 2026 Google LLC
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package invoke
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/googleapis/genai-toolbox/cmd/internal"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
|
||||||
|
|
||||||
func invokeCommand(args []string) (string, error) {
|
|
||||||
parentCmd := &cobra.Command{Use: "toolbox"}
|
|
||||||
|
|
||||||
buf := new(bytes.Buffer)
|
|
||||||
opts := internal.NewToolboxOptions(internal.WithIOStreams(buf, buf))
|
|
||||||
internal.PersistentFlags(parentCmd, opts)
|
|
||||||
|
|
||||||
cmd := NewCommand(opts)
|
|
||||||
parentCmd.AddCommand(cmd)
|
|
||||||
parentCmd.SetArgs(args)
|
|
||||||
|
|
||||||
err := parentCmd.Execute()
|
|
||||||
return buf.String(), err
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInvokeTool(t *testing.T) {
|
|
||||||
// Create a temporary tools file
|
|
||||||
tmpDir := t.TempDir()
|
|
||||||
|
|
||||||
toolsFileContent := `
|
|
||||||
sources:
|
|
||||||
my-sqlite:
|
|
||||||
kind: sqlite
|
|
||||||
database: test.db
|
|
||||||
tools:
|
|
||||||
hello-sqlite:
|
|
||||||
kind: sqlite-sql
|
|
||||||
source: my-sqlite
|
|
||||||
description: "hello tool"
|
|
||||||
statement: "SELECT 'hello' as greeting"
|
|
||||||
echo-tool:
|
|
||||||
kind: sqlite-sql
|
|
||||||
source: my-sqlite
|
|
||||||
description: "echo tool"
|
|
||||||
statement: "SELECT ? as msg"
|
|
||||||
parameters:
|
|
||||||
- name: message
|
|
||||||
type: string
|
|
||||||
description: message to echo
|
|
||||||
`
|
|
||||||
|
|
||||||
toolsFilePath := filepath.Join(tmpDir, "tools.yaml")
|
|
||||||
if err := os.WriteFile(toolsFilePath, []byte(toolsFileContent), 0644); err != nil {
|
|
||||||
t.Fatalf("failed to write tools file: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
tcs := []struct {
|
|
||||||
desc string
|
|
||||||
args []string
|
|
||||||
want string
|
|
||||||
wantErr bool
|
|
||||||
errStr string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
desc: "success - basic tool call",
|
|
||||||
args: []string{"invoke", "hello-sqlite", "--tools-file", toolsFilePath},
|
|
||||||
want: `"greeting": "hello"`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "success - tool call with parameters",
|
|
||||||
args: []string{"invoke", "echo-tool", `{"message": "world"}`, "--tools-file", toolsFilePath},
|
|
||||||
want: `"msg": "world"`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "error - tool not found",
|
|
||||||
args: []string{"invoke", "non-existent", "--tools-file", toolsFilePath},
|
|
||||||
wantErr: true,
|
|
||||||
errStr: `tool "non-existent" not found`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "error - invalid JSON params",
|
|
||||||
args: []string{"invoke", "echo-tool", `invalid-json`, "--tools-file", toolsFilePath},
|
|
||||||
wantErr: true,
|
|
||||||
errStr: `params must be a valid JSON string`,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tc := range tcs {
|
|
||||||
t.Run(tc.desc, func(t *testing.T) {
|
|
||||||
got, err := invokeCommand(tc.args)
|
|
||||||
if (err != nil) != tc.wantErr {
|
|
||||||
t.Fatalf("got error %v, wantErr %v", err, tc.wantErr)
|
|
||||||
}
|
|
||||||
if tc.wantErr && !strings.Contains(err.Error(), tc.errStr) {
|
|
||||||
t.Fatalf("got error %v, want error containing %q", err, tc.errStr)
|
|
||||||
}
|
|
||||||
if !tc.wantErr && !strings.Contains(got, tc.want) {
|
|
||||||
t.Fatalf("got %q, want it to contain %q", got, tc.want)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInvokeTool_AuthUnsupported(t *testing.T) {
|
|
||||||
tmpDir := t.TempDir()
|
|
||||||
toolsFileContent := `
|
|
||||||
sources:
|
|
||||||
my-bq:
|
|
||||||
kind: bigquery
|
|
||||||
project: my-project
|
|
||||||
useClientOAuth: true
|
|
||||||
tools:
|
|
||||||
bq-tool:
|
|
||||||
kind: bigquery-sql
|
|
||||||
source: my-bq
|
|
||||||
description: "bq tool"
|
|
||||||
statement: "SELECT 1"
|
|
||||||
`
|
|
||||||
toolsFilePath := filepath.Join(tmpDir, "auth_tools.yaml")
|
|
||||||
if err := os.WriteFile(toolsFilePath, []byte(toolsFileContent), 0644); err != nil {
|
|
||||||
t.Fatalf("failed to write tools file: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
args := []string{"invoke", "bq-tool", "--tools-file", toolsFilePath}
|
|
||||||
_, err := invokeCommand(args)
|
|
||||||
if err == nil {
|
|
||||||
t.Fatal("expected error for tool requiring client auth, but got nil")
|
|
||||||
}
|
|
||||||
if !strings.Contains(err.Error(), "client authorization is not supported") {
|
|
||||||
t.Fatalf("unexpected error message: %v", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,251 +0,0 @@
|
|||||||
// Copyright 2026 Google LLC
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package internal
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"os"
|
|
||||||
"slices"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/log"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/server"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/telemetry"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/util"
|
|
||||||
)
|
|
||||||
|
|
||||||
type IOStreams struct {
|
|
||||||
In io.Reader
|
|
||||||
Out io.Writer
|
|
||||||
ErrOut io.Writer
|
|
||||||
}
|
|
||||||
|
|
||||||
// ToolboxOptions holds dependencies shared by all commands.
|
|
||||||
type ToolboxOptions struct {
|
|
||||||
IOStreams IOStreams
|
|
||||||
Logger log.Logger
|
|
||||||
Cfg server.ServerConfig
|
|
||||||
ToolsFile string
|
|
||||||
ToolsFiles []string
|
|
||||||
ToolsFolder string
|
|
||||||
PrebuiltConfigs []string
|
|
||||||
}
|
|
||||||
|
|
||||||
// Option defines a function that modifies the ToolboxOptions struct.
|
|
||||||
type Option func(*ToolboxOptions)
|
|
||||||
|
|
||||||
// NewToolboxOptions creates a new instance with defaults, then applies any
|
|
||||||
// provided options.
|
|
||||||
func NewToolboxOptions(opts ...Option) *ToolboxOptions {
|
|
||||||
o := &ToolboxOptions{
|
|
||||||
IOStreams: IOStreams{
|
|
||||||
In: os.Stdin,
|
|
||||||
Out: os.Stdout,
|
|
||||||
ErrOut: os.Stderr,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, opt := range opts {
|
|
||||||
opt(o)
|
|
||||||
}
|
|
||||||
return o
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply allows you to update an EXISTING ToolboxOptions instance.
|
|
||||||
// This is useful for "late binding".
|
|
||||||
func (o *ToolboxOptions) Apply(opts ...Option) {
|
|
||||||
for _, opt := range opts {
|
|
||||||
opt(o)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// WithIOStreams updates the IO streams.
|
|
||||||
func WithIOStreams(out, err io.Writer) Option {
|
|
||||||
return func(o *ToolboxOptions) {
|
|
||||||
o.IOStreams.Out = out
|
|
||||||
o.IOStreams.ErrOut = err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Setup create logger and telemetry instrumentations.
|
|
||||||
func (opts *ToolboxOptions) Setup(ctx context.Context) (context.Context, func(context.Context) error, error) {
|
|
||||||
// If stdio, set logger's out stream (usually DEBUG and INFO logs) to
|
|
||||||
// errStream
|
|
||||||
loggerOut := opts.IOStreams.Out
|
|
||||||
if opts.Cfg.Stdio {
|
|
||||||
loggerOut = opts.IOStreams.ErrOut
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle logger separately from config
|
|
||||||
logger, err := log.NewLogger(opts.Cfg.LoggingFormat.String(), opts.Cfg.LogLevel.String(), loggerOut, opts.IOStreams.ErrOut)
|
|
||||||
if err != nil {
|
|
||||||
return ctx, nil, fmt.Errorf("unable to initialize logger: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx = util.WithLogger(ctx, logger)
|
|
||||||
opts.Logger = logger
|
|
||||||
|
|
||||||
// Set up OpenTelemetry
|
|
||||||
otelShutdown, err := telemetry.SetupOTel(ctx, opts.Cfg.Version, opts.Cfg.TelemetryOTLP, opts.Cfg.TelemetryGCP, opts.Cfg.TelemetryServiceName)
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("error setting up OpenTelemetry: %w", err)
|
|
||||||
logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return ctx, nil, errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
shutdownFunc := func(ctx context.Context) error {
|
|
||||||
err := otelShutdown(ctx)
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("error shutting down OpenTelemetry: %w", err)
|
|
||||||
logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
instrumentation, err := telemetry.CreateTelemetryInstrumentation(opts.Cfg.Version)
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("unable to create telemetry instrumentation: %w", err)
|
|
||||||
logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return ctx, shutdownFunc, errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx = util.WithInstrumentation(ctx, instrumentation)
|
|
||||||
|
|
||||||
return ctx, shutdownFunc, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// LoadConfig checks and merge files that should be loaded into the server
|
|
||||||
func (opts *ToolboxOptions) LoadConfig(ctx context.Context) (bool, error) {
|
|
||||||
// Determine if Custom Files should be loaded
|
|
||||||
// Check for explicit custom flags
|
|
||||||
isCustomConfigured := opts.ToolsFile != "" || len(opts.ToolsFiles) > 0 || opts.ToolsFolder != ""
|
|
||||||
|
|
||||||
// Determine if default 'tools.yaml' should be used (No prebuilt AND No custom flags)
|
|
||||||
useDefaultToolsFile := len(opts.PrebuiltConfigs) == 0 && !isCustomConfigured
|
|
||||||
|
|
||||||
if useDefaultToolsFile {
|
|
||||||
opts.ToolsFile = "tools.yaml"
|
|
||||||
isCustomConfigured = true
|
|
||||||
}
|
|
||||||
|
|
||||||
logger, err := util.LoggerFromContext(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return isCustomConfigured, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var allToolsFiles []ToolsFile
|
|
||||||
|
|
||||||
// Load Prebuilt Configuration
|
|
||||||
|
|
||||||
if len(opts.PrebuiltConfigs) > 0 {
|
|
||||||
slices.Sort(opts.PrebuiltConfigs)
|
|
||||||
sourcesList := strings.Join(opts.PrebuiltConfigs, ", ")
|
|
||||||
logMsg := fmt.Sprintf("Using prebuilt tool configurations for: %s", sourcesList)
|
|
||||||
logger.InfoContext(ctx, logMsg)
|
|
||||||
|
|
||||||
for _, configName := range opts.PrebuiltConfigs {
|
|
||||||
buf, err := prebuiltconfigs.Get(configName)
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorContext(ctx, err.Error())
|
|
||||||
return isCustomConfigured, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse into ToolsFile struct
|
|
||||||
parsed, err := parseToolsFile(ctx, buf)
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("unable to parse prebuilt tool configuration for '%s': %w", configName, err)
|
|
||||||
logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return isCustomConfigured, errMsg
|
|
||||||
}
|
|
||||||
allToolsFiles = append(allToolsFiles, parsed)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Load Custom Configurations
|
|
||||||
if isCustomConfigured {
|
|
||||||
// Enforce exclusivity among custom flags (tools-file vs tools-files vs tools-folder)
|
|
||||||
if (opts.ToolsFile != "" && len(opts.ToolsFiles) > 0) ||
|
|
||||||
(opts.ToolsFile != "" && opts.ToolsFolder != "") ||
|
|
||||||
(len(opts.ToolsFiles) > 0 && opts.ToolsFolder != "") {
|
|
||||||
errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously")
|
|
||||||
logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return isCustomConfigured, errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
var customTools ToolsFile
|
|
||||||
var err error
|
|
||||||
|
|
||||||
if len(opts.ToolsFiles) > 0 {
|
|
||||||
// Use tools-files
|
|
||||||
logger.InfoContext(ctx, fmt.Sprintf("Loading and merging %d tool configuration files", len(opts.ToolsFiles)))
|
|
||||||
customTools, err = LoadAndMergeToolsFiles(ctx, opts.ToolsFiles)
|
|
||||||
} else if opts.ToolsFolder != "" {
|
|
||||||
// Use tools-folder
|
|
||||||
logger.InfoContext(ctx, fmt.Sprintf("Loading and merging all YAML files from directory: %s", opts.ToolsFolder))
|
|
||||||
customTools, err = LoadAndMergeToolsFolder(ctx, opts.ToolsFolder)
|
|
||||||
} else {
|
|
||||||
// Use single file (tools-file or default `tools.yaml`)
|
|
||||||
buf, readFileErr := os.ReadFile(opts.ToolsFile)
|
|
||||||
if readFileErr != nil {
|
|
||||||
errMsg := fmt.Errorf("unable to read tool file at %q: %w", opts.ToolsFile, readFileErr)
|
|
||||||
logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return isCustomConfigured, errMsg
|
|
||||||
}
|
|
||||||
customTools, err = parseToolsFile(ctx, buf)
|
|
||||||
if err != nil {
|
|
||||||
err = fmt.Errorf("unable to parse tool file at %q: %w", opts.ToolsFile, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorContext(ctx, err.Error())
|
|
||||||
return isCustomConfigured, err
|
|
||||||
}
|
|
||||||
allToolsFiles = append(allToolsFiles, customTools)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Modify version string based on loaded configurations
|
|
||||||
if len(opts.PrebuiltConfigs) > 0 {
|
|
||||||
tag := "prebuilt"
|
|
||||||
if isCustomConfigured {
|
|
||||||
tag = "custom"
|
|
||||||
}
|
|
||||||
// prebuiltConfigs is already sorted above
|
|
||||||
for _, configName := range opts.PrebuiltConfigs {
|
|
||||||
opts.Cfg.Version += fmt.Sprintf("+%s.%s", tag, configName)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Merge Everything
|
|
||||||
// This will error if custom tools collide with prebuilt tools
|
|
||||||
finalToolsFile, err := mergeToolsFiles(allToolsFiles...)
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorContext(ctx, err.Error())
|
|
||||||
return isCustomConfigured, err
|
|
||||||
}
|
|
||||||
|
|
||||||
opts.Cfg.SourceConfigs = finalToolsFile.Sources
|
|
||||||
opts.Cfg.AuthServiceConfigs = finalToolsFile.AuthServices
|
|
||||||
opts.Cfg.EmbeddingModelConfigs = finalToolsFile.EmbeddingModels
|
|
||||||
opts.Cfg.ToolConfigs = finalToolsFile.Tools
|
|
||||||
opts.Cfg.ToolsetConfigs = finalToolsFile.Toolsets
|
|
||||||
opts.Cfg.PromptConfigs = finalToolsFile.Prompts
|
|
||||||
|
|
||||||
return isCustomConfigured, nil
|
|
||||||
}
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
// Copyright 2026 Google LLC
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package internal
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
|
||||||
|
|
||||||
// PersistentFlags sets up flags that are available for all commands and
|
|
||||||
// subcommands
|
|
||||||
// It is also used to set up persistent flags during subcommand unit tests
|
|
||||||
func PersistentFlags(parentCmd *cobra.Command, opts *ToolboxOptions) {
|
|
||||||
persistentFlags := parentCmd.PersistentFlags()
|
|
||||||
|
|
||||||
persistentFlags.StringVar(&opts.ToolsFile, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
|
|
||||||
persistentFlags.StringSliceVar(&opts.ToolsFiles, "tools-files", []string{}, "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file, or --tools-folder.")
|
|
||||||
persistentFlags.StringVar(&opts.ToolsFolder, "tools-folder", "", "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file, or --tools-files.")
|
|
||||||
persistentFlags.Var(&opts.Cfg.LogLevel, "log-level", "Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'.")
|
|
||||||
persistentFlags.Var(&opts.Cfg.LoggingFormat, "logging-format", "Specify logging format to use. Allowed: 'standard' or 'JSON'.")
|
|
||||||
persistentFlags.BoolVar(&opts.Cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.")
|
|
||||||
persistentFlags.StringVar(&opts.Cfg.TelemetryOTLP, "telemetry-otlp", "", "Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318')")
|
|
||||||
persistentFlags.StringVar(&opts.Cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
|
|
||||||
// Fetch prebuilt tools sources to customize the help description
|
|
||||||
prebuiltHelp := fmt.Sprintf(
|
|
||||||
"Use a prebuilt tool configuration by source type. Allowed: '%s'. Can be specified multiple times.",
|
|
||||||
strings.Join(prebuiltconfigs.GetPrebuiltSources(), "', '"),
|
|
||||||
)
|
|
||||||
persistentFlags.StringSliceVar(&opts.PrebuiltConfigs, "prebuilt", []string{}, prebuiltHelp)
|
|
||||||
persistentFlags.StringSliceVar(&opts.Cfg.UserAgentMetadata, "user-agent-metadata", []string{}, "Appends additional metadata to the User-Agent.")
|
|
||||||
}
|
|
||||||
@@ -1,214 +0,0 @@
|
|||||||
// Copyright 2026 Google LLC
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package skills
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
_ "embed"
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"sort"
|
|
||||||
|
|
||||||
"github.com/googleapis/genai-toolbox/cmd/internal"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/server"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
|
||||||
|
|
||||||
// skillsCmd is the command for generating skills.
|
|
||||||
type skillsCmd struct {
|
|
||||||
*cobra.Command
|
|
||||||
name string
|
|
||||||
description string
|
|
||||||
toolset string
|
|
||||||
outputDir string
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewCommand creates a new Command.
|
|
||||||
func NewCommand(opts *internal.ToolboxOptions) *cobra.Command {
|
|
||||||
cmd := &skillsCmd{}
|
|
||||||
cmd.Command = &cobra.Command{
|
|
||||||
Use: "skills-generate",
|
|
||||||
Short: "Generate skills from tool configurations",
|
|
||||||
RunE: func(c *cobra.Command, args []string) error {
|
|
||||||
return run(cmd, opts)
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
cmd.Flags().StringVar(&cmd.name, "name", "", "Name of the generated skill.")
|
|
||||||
cmd.Flags().StringVar(&cmd.description, "description", "", "Description of the generated skill")
|
|
||||||
cmd.Flags().StringVar(&cmd.toolset, "toolset", "", "Name of the toolset to convert into a skill. If not provided, all tools will be included.")
|
|
||||||
cmd.Flags().StringVar(&cmd.outputDir, "output-dir", "skills", "Directory to output generated skills")
|
|
||||||
|
|
||||||
_ = cmd.MarkFlagRequired("name")
|
|
||||||
_ = cmd.MarkFlagRequired("description")
|
|
||||||
return cmd.Command
|
|
||||||
}
|
|
||||||
|
|
||||||
func run(cmd *skillsCmd, opts *internal.ToolboxOptions) error {
|
|
||||||
ctx, cancel := context.WithCancel(cmd.Context())
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
ctx, shutdown, err := opts.Setup(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer func() {
|
|
||||||
_ = shutdown(ctx)
|
|
||||||
}()
|
|
||||||
|
|
||||||
_, err = opts.LoadConfig(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := os.MkdirAll(cmd.outputDir, 0755); err != nil {
|
|
||||||
errMsg := fmt.Errorf("error creating output directory: %w", err)
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
opts.Logger.InfoContext(ctx, fmt.Sprintf("Generating skill '%s'...", cmd.name))
|
|
||||||
|
|
||||||
// Initialize toolbox and collect tools
|
|
||||||
allTools, err := cmd.collectTools(ctx, opts)
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("error collecting tools: %w", err)
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(allTools) == 0 {
|
|
||||||
opts.Logger.InfoContext(ctx, "No tools found to generate.")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate the combined skill directory
|
|
||||||
skillPath := filepath.Join(cmd.outputDir, cmd.name)
|
|
||||||
if err := os.MkdirAll(skillPath, 0755); err != nil {
|
|
||||||
errMsg := fmt.Errorf("error creating skill directory: %w", err)
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate assets directory
|
|
||||||
assetsPath := filepath.Join(skillPath, "assets")
|
|
||||||
if err := os.MkdirAll(assetsPath, 0755); err != nil {
|
|
||||||
errMsg := fmt.Errorf("error creating assets dir: %w", err)
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate scripts directory
|
|
||||||
scriptsPath := filepath.Join(skillPath, "scripts")
|
|
||||||
if err := os.MkdirAll(scriptsPath, 0755); err != nil {
|
|
||||||
errMsg := fmt.Errorf("error creating scripts dir: %w", err)
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
// Iterate over keys to ensure deterministic order
|
|
||||||
var toolNames []string
|
|
||||||
for name := range allTools {
|
|
||||||
toolNames = append(toolNames, name)
|
|
||||||
}
|
|
||||||
sort.Strings(toolNames)
|
|
||||||
|
|
||||||
for _, toolName := range toolNames {
|
|
||||||
// Generate YAML config in asset directory
|
|
||||||
minimizedContent, err := generateToolConfigYAML(opts.Cfg, toolName)
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("error generating filtered config for %s: %w", toolName, err)
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
specificToolsFileName := fmt.Sprintf("%s.yaml", toolName)
|
|
||||||
if minimizedContent != nil {
|
|
||||||
destPath := filepath.Join(assetsPath, specificToolsFileName)
|
|
||||||
if err := os.WriteFile(destPath, minimizedContent, 0644); err != nil {
|
|
||||||
errMsg := fmt.Errorf("error writing filtered config for %s: %w", toolName, err)
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate wrapper script in scripts directory
|
|
||||||
scriptContent, err := generateScriptContent(toolName, specificToolsFileName)
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("error generating script content for %s: %w", toolName, err)
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
scriptFilename := filepath.Join(scriptsPath, fmt.Sprintf("%s.js", toolName))
|
|
||||||
if err := os.WriteFile(scriptFilename, []byte(scriptContent), 0755); err != nil {
|
|
||||||
errMsg := fmt.Errorf("error writing script %s: %w", scriptFilename, err)
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate SKILL.md
|
|
||||||
skillContent, err := generateSkillMarkdown(cmd.name, cmd.description, allTools)
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("error generating SKILL.md content: %w", err)
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
skillMdPath := filepath.Join(skillPath, "SKILL.md")
|
|
||||||
if err := os.WriteFile(skillMdPath, []byte(skillContent), 0644); err != nil {
|
|
||||||
errMsg := fmt.Errorf("error writing SKILL.md: %w", err)
|
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
opts.Logger.InfoContext(ctx, fmt.Sprintf("Successfully generated skill '%s' with %d tools.", cmd.name, len(allTools)))
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *skillsCmd) collectTools(ctx context.Context, opts *internal.ToolboxOptions) (map[string]tools.Tool, error) {
|
|
||||||
// Initialize Resources
|
|
||||||
sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, opts.Cfg)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to initialize resources: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
resourceMgr := resources.NewResourceManager(sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap)
|
|
||||||
|
|
||||||
result := make(map[string]tools.Tool)
|
|
||||||
|
|
||||||
if c.toolset == "" {
|
|
||||||
return toolsMap, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
ts, ok := resourceMgr.GetToolset(c.toolset)
|
|
||||||
if !ok {
|
|
||||||
return nil, fmt.Errorf("toolset %q not found", c.toolset)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, t := range ts.Tools {
|
|
||||||
if t != nil {
|
|
||||||
tool := *t
|
|
||||||
result[tool.McpManifest().Name] = tool
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
@@ -1,195 +0,0 @@
|
|||||||
// Copyright 2026 Google LLC
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package skills
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/googleapis/genai-toolbox/cmd/internal"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
|
||||||
|
|
||||||
func invokeCommand(args []string) (string, error) {
|
|
||||||
parentCmd := &cobra.Command{Use: "toolbox"}
|
|
||||||
|
|
||||||
buf := new(bytes.Buffer)
|
|
||||||
opts := internal.NewToolboxOptions(internal.WithIOStreams(buf, buf))
|
|
||||||
internal.PersistentFlags(parentCmd, opts)
|
|
||||||
|
|
||||||
cmd := NewCommand(opts)
|
|
||||||
parentCmd.AddCommand(cmd)
|
|
||||||
parentCmd.SetArgs(args)
|
|
||||||
|
|
||||||
err := parentCmd.Execute()
|
|
||||||
return buf.String(), err
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGenerateSkill(t *testing.T) {
|
|
||||||
// Create a temporary directory for tests
|
|
||||||
tmpDir := t.TempDir()
|
|
||||||
outputDir := filepath.Join(tmpDir, "skills")
|
|
||||||
|
|
||||||
// Create a tools.yaml file with a sqlite tool
|
|
||||||
toolsFileContent := `
|
|
||||||
sources:
|
|
||||||
my-sqlite:
|
|
||||||
kind: sqlite
|
|
||||||
database: test.db
|
|
||||||
tools:
|
|
||||||
hello-sqlite:
|
|
||||||
kind: sqlite-sql
|
|
||||||
source: my-sqlite
|
|
||||||
description: "hello tool"
|
|
||||||
statement: "SELECT 'hello' as greeting"
|
|
||||||
`
|
|
||||||
|
|
||||||
toolsFilePath := filepath.Join(tmpDir, "tools.yaml")
|
|
||||||
if err := os.WriteFile(toolsFilePath, []byte(toolsFileContent), 0644); err != nil {
|
|
||||||
t.Fatalf("failed to write tools file: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
args := []string{
|
|
||||||
"skills-generate",
|
|
||||||
"--tools-file", toolsFilePath,
|
|
||||||
"--output-dir", outputDir,
|
|
||||||
"--name", "hello-sqlite",
|
|
||||||
"--description", "hello tool",
|
|
||||||
}
|
|
||||||
|
|
||||||
got, err := invokeCommand(args)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("command failed: %v\nOutput: %s", err, got)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify generated directory structure
|
|
||||||
skillPath := filepath.Join(outputDir, "hello-sqlite")
|
|
||||||
if _, err := os.Stat(skillPath); os.IsNotExist(err) {
|
|
||||||
t.Fatalf("skill directory not created: %s", skillPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check SKILL.md
|
|
||||||
skillMarkdown := filepath.Join(skillPath, "SKILL.md")
|
|
||||||
content, err := os.ReadFile(skillMarkdown)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to read SKILL.md: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
expectedFrontmatter := `---
|
|
||||||
name: hello-sqlite
|
|
||||||
description: hello tool
|
|
||||||
---`
|
|
||||||
if !strings.HasPrefix(string(content), expectedFrontmatter) {
|
|
||||||
t.Errorf("SKILL.md does not have expected frontmatter format.\nExpected prefix:\n%s\nGot:\n%s", expectedFrontmatter, string(content))
|
|
||||||
}
|
|
||||||
|
|
||||||
if !strings.Contains(string(content), "## Usage") {
|
|
||||||
t.Errorf("SKILL.md does not contain '## Usage' section")
|
|
||||||
}
|
|
||||||
|
|
||||||
if !strings.Contains(string(content), "## Scripts") {
|
|
||||||
t.Errorf("SKILL.md does not contain '## Scripts' section")
|
|
||||||
}
|
|
||||||
|
|
||||||
if !strings.Contains(string(content), "### hello-sqlite") {
|
|
||||||
t.Errorf("SKILL.md does not contain '### hello-sqlite' tool header")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check script file
|
|
||||||
scriptFilename := "hello-sqlite.js"
|
|
||||||
scriptPath := filepath.Join(skillPath, "scripts", scriptFilename)
|
|
||||||
if _, err := os.Stat(scriptPath); os.IsNotExist(err) {
|
|
||||||
t.Fatalf("script file not created: %s", scriptPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
scriptContent, err := os.ReadFile(scriptPath)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to read script file: %v", err)
|
|
||||||
}
|
|
||||||
if !strings.Contains(string(scriptContent), "hello-sqlite") {
|
|
||||||
t.Errorf("script file does not contain expected tool name")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check assets
|
|
||||||
assetPath := filepath.Join(skillPath, "assets", "hello-sqlite.yaml")
|
|
||||||
if _, err := os.Stat(assetPath); os.IsNotExist(err) {
|
|
||||||
t.Fatalf("asset file not created: %s", assetPath)
|
|
||||||
}
|
|
||||||
assetContent, err := os.ReadFile(assetPath)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to read asset file: %v", err)
|
|
||||||
}
|
|
||||||
if !strings.Contains(string(assetContent), "hello-sqlite") {
|
|
||||||
t.Errorf("asset file does not contain expected tool name")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGenerateSkill_NoConfig(t *testing.T) {
|
|
||||||
tmpDir := t.TempDir()
|
|
||||||
outputDir := filepath.Join(tmpDir, "skills")
|
|
||||||
|
|
||||||
args := []string{
|
|
||||||
"skills-generate",
|
|
||||||
"--output-dir", outputDir,
|
|
||||||
"--name", "test",
|
|
||||||
"--description", "test",
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := invokeCommand(args)
|
|
||||||
if err == nil {
|
|
||||||
t.Fatal("expected command to fail when no configuration is provided and tools.yaml is missing")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Should not have created the directory if no config was processed
|
|
||||||
if _, err := os.Stat(outputDir); !os.IsNotExist(err) {
|
|
||||||
t.Errorf("output directory should not have been created")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGenerateSkill_MissingArguments(t *testing.T) {
|
|
||||||
tmpDir := t.TempDir()
|
|
||||||
toolsFilePath := filepath.Join(tmpDir, "tools.yaml")
|
|
||||||
if err := os.WriteFile(toolsFilePath, []byte("tools: {}"), 0644); err != nil {
|
|
||||||
t.Fatalf("failed to write tools file: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
args []string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "missing name",
|
|
||||||
args: []string{"skills-generate", "--tools-file", toolsFilePath, "--description", "test"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "missing description",
|
|
||||||
args: []string{"skills-generate", "--tools-file", toolsFilePath, "--name", "test"},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tt := range tests {
|
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
|
||||||
got, err := invokeCommand(tt.args)
|
|
||||||
if err == nil {
|
|
||||||
t.Fatalf("expected command to fail due to missing arguments, but it succeeded\nOutput: %s", got)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,296 +0,0 @@
|
|||||||
// Copyright 2026 Google LLC
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package skills
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
"text/template"
|
|
||||||
|
|
||||||
"github.com/goccy/go-yaml"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/server"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
|
||||||
)
|
|
||||||
|
|
||||||
const skillTemplate = `---
|
|
||||||
name: {{.SkillName}}
|
|
||||||
description: {{.SkillDescription}}
|
|
||||||
---
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
All scripts can be executed using Node.js. Replace ` + "`" + `<param_name>` + "`" + ` and ` + "`" + `<param_value>` + "`" + ` with actual values.
|
|
||||||
|
|
||||||
**Bash:**
|
|
||||||
` + "`" + `node scripts/<script_name>.js '{"<param_name>": "<param_value>"}'` + "`" + `
|
|
||||||
|
|
||||||
**PowerShell:**
|
|
||||||
` + "`" + `node scripts/<script_name>.js '{\"<param_name>\": \"<param_value>\"}'` + "`" + `
|
|
||||||
|
|
||||||
## Scripts
|
|
||||||
|
|
||||||
{{range .Tools}}
|
|
||||||
### {{.Name}}
|
|
||||||
|
|
||||||
{{.Description}}
|
|
||||||
|
|
||||||
{{.ParametersSchema}}
|
|
||||||
|
|
||||||
---
|
|
||||||
{{end}}
|
|
||||||
`
|
|
||||||
|
|
||||||
type toolTemplateData struct {
|
|
||||||
Name string
|
|
||||||
Description string
|
|
||||||
ParametersSchema string
|
|
||||||
}
|
|
||||||
|
|
||||||
type skillTemplateData struct {
|
|
||||||
SkillName string
|
|
||||||
SkillDescription string
|
|
||||||
Tools []toolTemplateData
|
|
||||||
}
|
|
||||||
|
|
||||||
// generateSkillMarkdown generates the content of the SKILL.md file.
|
|
||||||
// It includes usage instructions and a reference section for each tool in the skill,
|
|
||||||
// detailing its description and parameters.
|
|
||||||
func generateSkillMarkdown(skillName, skillDescription string, toolsMap map[string]tools.Tool) (string, error) {
|
|
||||||
var toolsData []toolTemplateData
|
|
||||||
|
|
||||||
// Order tools based on name
|
|
||||||
var toolNames []string
|
|
||||||
for name := range toolsMap {
|
|
||||||
toolNames = append(toolNames, name)
|
|
||||||
}
|
|
||||||
sort.Strings(toolNames)
|
|
||||||
|
|
||||||
for _, name := range toolNames {
|
|
||||||
tool := toolsMap[name]
|
|
||||||
manifest := tool.Manifest()
|
|
||||||
|
|
||||||
parametersSchema, err := formatParameters(manifest.Parameters)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
toolsData = append(toolsData, toolTemplateData{
|
|
||||||
Name: name,
|
|
||||||
Description: manifest.Description,
|
|
||||||
ParametersSchema: parametersSchema,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
data := skillTemplateData{
|
|
||||||
SkillName: skillName,
|
|
||||||
SkillDescription: skillDescription,
|
|
||||||
Tools: toolsData,
|
|
||||||
}
|
|
||||||
|
|
||||||
tmpl, err := template.New("markdown").Parse(skillTemplate)
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("error parsing markdown template: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
var buf strings.Builder
|
|
||||||
if err := tmpl.Execute(&buf, data); err != nil {
|
|
||||||
return "", fmt.Errorf("error executing markdown template: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return buf.String(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
const nodeScriptTemplate = `#!/usr/bin/env node
|
|
||||||
|
|
||||||
const { spawn, execSync } = require('child_process');
|
|
||||||
const path = require('path');
|
|
||||||
const fs = require('fs');
|
|
||||||
|
|
||||||
const toolName = "{{.Name}}";
|
|
||||||
const toolsFileName = "{{.ToolsFileName}}";
|
|
||||||
|
|
||||||
function getToolboxPath() {
|
|
||||||
try {
|
|
||||||
const checkCommand = process.platform === 'win32' ? 'where toolbox' : 'which toolbox';
|
|
||||||
const globalPath = execSync(checkCommand, { stdio: 'pipe', encoding: 'utf-8' }).trim();
|
|
||||||
if (globalPath) {
|
|
||||||
return globalPath.split('\n')[0].trim();
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
// Ignore error;
|
|
||||||
}
|
|
||||||
const localPath = path.resolve(__dirname, '../../../toolbox');
|
|
||||||
if (fs.existsSync(localPath)) {
|
|
||||||
return localPath;
|
|
||||||
}
|
|
||||||
throw new Error("Toolbox binary not found");
|
|
||||||
}
|
|
||||||
|
|
||||||
let toolboxBinary;
|
|
||||||
try {
|
|
||||||
toolboxBinary = getToolboxPath();
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error:", err.message);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
let configArgs = [];
|
|
||||||
if (toolsFileName) {
|
|
||||||
configArgs.push("--tools-file", path.join(__dirname, "..", "assets", toolsFileName));
|
|
||||||
}
|
|
||||||
|
|
||||||
const args = process.argv.slice(2);
|
|
||||||
const toolboxArgs = [...configArgs, "invoke", toolName, ...args];
|
|
||||||
|
|
||||||
const child = spawn(toolboxBinary, toolboxArgs, { stdio: 'inherit' });
|
|
||||||
|
|
||||||
child.on('close', (code) => {
|
|
||||||
process.exit(code);
|
|
||||||
});
|
|
||||||
|
|
||||||
child.on('error', (err) => {
|
|
||||||
console.error("Error executing toolbox:", err);
|
|
||||||
process.exit(1);
|
|
||||||
});
|
|
||||||
`
|
|
||||||
|
|
||||||
type scriptData struct {
|
|
||||||
Name string
|
|
||||||
ToolsFileName string
|
|
||||||
}
|
|
||||||
|
|
||||||
// generateScriptContent creates the content for a Node.js wrapper script.
|
|
||||||
// This script invokes the toolbox CLI with the appropriate configuration
|
|
||||||
// (using a generated tools file) and arguments to execute the specific tool.
|
|
||||||
func generateScriptContent(name string, toolsFileName string) (string, error) {
|
|
||||||
data := scriptData{
|
|
||||||
Name: name,
|
|
||||||
ToolsFileName: toolsFileName,
|
|
||||||
}
|
|
||||||
|
|
||||||
tmpl, err := template.New("script").Parse(nodeScriptTemplate)
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("error parsing script template: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
var buf strings.Builder
|
|
||||||
if err := tmpl.Execute(&buf, data); err != nil {
|
|
||||||
return "", fmt.Errorf("error executing script template: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return buf.String(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// formatParameters converts a list of parameter manifests into a formatted JSON schema string.
|
|
||||||
// This schema is used in the skill documentation to describe the input parameters for a tool.
|
|
||||||
func formatParameters(params []parameters.ParameterManifest) (string, error) {
|
|
||||||
if len(params) == 0 {
|
|
||||||
return "", nil
|
|
||||||
}
|
|
||||||
|
|
||||||
properties := make(map[string]interface{})
|
|
||||||
var required []string
|
|
||||||
|
|
||||||
for _, p := range params {
|
|
||||||
paramMap := map[string]interface{}{
|
|
||||||
"type": p.Type,
|
|
||||||
"description": p.Description,
|
|
||||||
}
|
|
||||||
if p.Default != nil {
|
|
||||||
paramMap["default"] = p.Default
|
|
||||||
}
|
|
||||||
properties[p.Name] = paramMap
|
|
||||||
if p.Required {
|
|
||||||
required = append(required, p.Name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
schema := map[string]interface{}{
|
|
||||||
"type": "object",
|
|
||||||
"properties": properties,
|
|
||||||
}
|
|
||||||
if len(required) > 0 {
|
|
||||||
schema["required"] = required
|
|
||||||
}
|
|
||||||
|
|
||||||
schemaJSON, err := json.MarshalIndent(schema, "", " ")
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("error generating parameters schema: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return fmt.Sprintf("#### Parameters\n\n```json\n%s\n```", string(schemaJSON)), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// generateToolConfigYAML generates the YAML configuration for a single tool and its dependency (source).
|
|
||||||
// It extracts the relevant tool and source configurations from the server config and formats them
|
|
||||||
// into a YAML document suitable for inclusion in the skill's assets.
|
|
||||||
func generateToolConfigYAML(cfg server.ServerConfig, toolName string) ([]byte, error) {
|
|
||||||
toolCfg, ok := cfg.ToolConfigs[toolName]
|
|
||||||
if !ok {
|
|
||||||
return nil, fmt.Errorf("error finding tool config: %s", toolName)
|
|
||||||
}
|
|
||||||
|
|
||||||
var buf bytes.Buffer
|
|
||||||
encoder := yaml.NewEncoder(&buf)
|
|
||||||
|
|
||||||
// Process Tool Config
|
|
||||||
toolWrapper := struct {
|
|
||||||
Kind string `yaml:"kind"`
|
|
||||||
Config tools.ToolConfig `yaml:",inline"`
|
|
||||||
}{
|
|
||||||
Kind: "tools",
|
|
||||||
Config: toolCfg,
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := encoder.Encode(toolWrapper); err != nil {
|
|
||||||
return nil, fmt.Errorf("error encoding tool config: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process Source Config
|
|
||||||
var toolMap map[string]interface{}
|
|
||||||
b, err := yaml.Marshal(toolCfg)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("error marshaling tool config: %w", err)
|
|
||||||
}
|
|
||||||
if err := yaml.Unmarshal(b, &toolMap); err != nil {
|
|
||||||
return nil, fmt.Errorf("error unmarshaling tool config map: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if sourceName, ok := toolMap["source"].(string); ok && sourceName != "" {
|
|
||||||
sourceCfg, ok := cfg.SourceConfigs[sourceName]
|
|
||||||
if !ok {
|
|
||||||
return nil, fmt.Errorf("error finding source config: %s", sourceName)
|
|
||||||
}
|
|
||||||
|
|
||||||
sourceWrapper := struct {
|
|
||||||
Kind string `yaml:"kind"`
|
|
||||||
Config sources.SourceConfig `yaml:",inline"`
|
|
||||||
}{
|
|
||||||
Kind: "sources",
|
|
||||||
Config: sourceCfg,
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := encoder.Encode(sourceWrapper); err != nil {
|
|
||||||
return nil, fmt.Errorf("error encoding source config: %w", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return buf.Bytes(), nil
|
|
||||||
}
|
|
||||||
@@ -1,347 +0,0 @@
|
|||||||
// Copyright 2026 Google LLC
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package skills
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/server"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
|
||||||
"go.opentelemetry.io/otel/trace"
|
|
||||||
)
|
|
||||||
|
|
||||||
type MockToolConfig struct {
|
|
||||||
Name string `yaml:"name"`
|
|
||||||
Type string `yaml:"type"`
|
|
||||||
Source string `yaml:"source"`
|
|
||||||
Other string `yaml:"other"`
|
|
||||||
Parameters parameters.Parameters `yaml:"parameters"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m MockToolConfig) ToolConfigType() string {
|
|
||||||
return m.Type
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m MockToolConfig) Initialize(map[string]sources.Source) (tools.Tool, error) {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type MockSourceConfig struct {
|
|
||||||
Name string `yaml:"name"`
|
|
||||||
Type string `yaml:"type"`
|
|
||||||
ConnectionString string `yaml:"connection_string"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m MockSourceConfig) SourceConfigType() string {
|
|
||||||
return m.Type
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m MockSourceConfig) Initialize(context.Context, trace.Tracer) (sources.Source, error) {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestFormatParameters(t *testing.T) {
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
params []parameters.ParameterManifest
|
|
||||||
wantContains []string
|
|
||||||
wantErr bool
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "empty parameters",
|
|
||||||
params: []parameters.ParameterManifest{},
|
|
||||||
wantContains: []string{""},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "single required string parameter",
|
|
||||||
params: []parameters.ParameterManifest{
|
|
||||||
{
|
|
||||||
Name: "param1",
|
|
||||||
Description: "A test parameter",
|
|
||||||
Type: "string",
|
|
||||||
Required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
wantContains: []string{
|
|
||||||
"## Parameters",
|
|
||||||
"```json",
|
|
||||||
`"type": "object"`,
|
|
||||||
`"properties": {`,
|
|
||||||
`"param1": {`,
|
|
||||||
`"type": "string"`,
|
|
||||||
`"description": "A test parameter"`,
|
|
||||||
`"required": [`,
|
|
||||||
`"param1"`,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "mixed parameters with defaults",
|
|
||||||
params: []parameters.ParameterManifest{
|
|
||||||
{
|
|
||||||
Name: "param1",
|
|
||||||
Description: "Param 1",
|
|
||||||
Type: "string",
|
|
||||||
Required: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "param2",
|
|
||||||
Description: "Param 2",
|
|
||||||
Type: "integer",
|
|
||||||
Default: 42,
|
|
||||||
Required: false,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
wantContains: []string{
|
|
||||||
`"param1": {`,
|
|
||||||
`"param2": {`,
|
|
||||||
`"default": 42`,
|
|
||||||
`"required": [`,
|
|
||||||
`"param1"`,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tt := range tests {
|
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
|
||||||
got, err := formatParameters(tt.params)
|
|
||||||
if (err != nil) != tt.wantErr {
|
|
||||||
t.Errorf("formatParameters() error = %v, wantErr %v", err, tt.wantErr)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if tt.wantErr {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(tt.params) == 0 {
|
|
||||||
if got != "" {
|
|
||||||
t.Errorf("formatParameters() = %v, want empty string", got)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, want := range tt.wantContains {
|
|
||||||
if !strings.Contains(got, want) {
|
|
||||||
t.Errorf("formatParameters() result missing expected string: %s\nGot:\n%s", want, got)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGenerateSkillMarkdown(t *testing.T) {
|
|
||||||
toolsMap := map[string]tools.Tool{
|
|
||||||
"tool1": server.MockTool{
|
|
||||||
Description: "First tool",
|
|
||||||
Params: []parameters.Parameter{
|
|
||||||
parameters.NewStringParameter("p1", "d1"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
got, err := generateSkillMarkdown("MySkill", "My Description", toolsMap)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("generateSkillMarkdown() error = %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
expectedSubstrings := []string{
|
|
||||||
"name: MySkill",
|
|
||||||
"description: My Description",
|
|
||||||
"## Usage",
|
|
||||||
"All scripts can be executed using Node.js",
|
|
||||||
"**Bash:**",
|
|
||||||
"`node scripts/<script_name>.js '{\"<param_name>\": \"<param_value>\"}'`",
|
|
||||||
"**PowerShell:**",
|
|
||||||
"`node scripts/<script_name>.js '{\"<param_name>\": \"<param_value>\"}'`",
|
|
||||||
"## Scripts",
|
|
||||||
"### tool1",
|
|
||||||
"First tool",
|
|
||||||
"## Parameters",
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, s := range expectedSubstrings {
|
|
||||||
if !strings.Contains(got, s) {
|
|
||||||
t.Errorf("generateSkillMarkdown() missing substring %q", s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGenerateScriptContent(t *testing.T) {
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
toolName string
|
|
||||||
toolsFileName string
|
|
||||||
wantContains []string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "basic script",
|
|
||||||
toolName: "test-tool",
|
|
||||||
toolsFileName: "",
|
|
||||||
wantContains: []string{
|
|
||||||
`const toolName = "test-tool";`,
|
|
||||||
`const toolsFileName = "";`,
|
|
||||||
`const toolboxArgs = [...configArgs, "invoke", toolName, ...args];`,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "script with tools file",
|
|
||||||
toolName: "complex-tool",
|
|
||||||
toolsFileName: "tools.yaml",
|
|
||||||
wantContains: []string{
|
|
||||||
`const toolName = "complex-tool";`,
|
|
||||||
`const toolsFileName = "tools.yaml";`,
|
|
||||||
`configArgs.push("--tools-file", path.join(__dirname, "..", "assets", toolsFileName));`,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tt := range tests {
|
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
|
||||||
got, err := generateScriptContent(tt.toolName, tt.toolsFileName)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("generateScriptContent() error = %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, s := range tt.wantContains {
|
|
||||||
if !strings.Contains(got, s) {
|
|
||||||
t.Errorf("generateScriptContent() missing substring %q\nGot:\n%s", s, got)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGenerateToolConfigYAML(t *testing.T) {
|
|
||||||
cfg := server.ServerConfig{
|
|
||||||
ToolConfigs: server.ToolConfigs{
|
|
||||||
"tool1": MockToolConfig{
|
|
||||||
Name: "tool1",
|
|
||||||
Type: "custom-tool",
|
|
||||||
Source: "src1",
|
|
||||||
Other: "foo",
|
|
||||||
},
|
|
||||||
"toolNoSource": MockToolConfig{
|
|
||||||
Name: "toolNoSource",
|
|
||||||
Type: "http",
|
|
||||||
},
|
|
||||||
"toolWithParams": MockToolConfig{
|
|
||||||
Name: "toolWithParams",
|
|
||||||
Type: "custom-tool",
|
|
||||||
Parameters: []parameters.Parameter{
|
|
||||||
parameters.NewStringParameter("param1", "desc1"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"toolWithMissingSource": MockToolConfig{
|
|
||||||
Name: "toolWithMissingSource",
|
|
||||||
Type: "custom-tool",
|
|
||||||
Source: "missing-src",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
SourceConfigs: server.SourceConfigs{
|
|
||||||
"src1": MockSourceConfig{
|
|
||||||
Name: "src1",
|
|
||||||
Type: "postgres",
|
|
||||||
ConnectionString: "conn1",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
toolName string
|
|
||||||
wantContains []string
|
|
||||||
wantErr bool
|
|
||||||
wantNil bool
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "tool with source",
|
|
||||||
toolName: "tool1",
|
|
||||||
wantContains: []string{
|
|
||||||
"kind: tools",
|
|
||||||
"name: tool1",
|
|
||||||
"type: custom-tool",
|
|
||||||
"source: src1",
|
|
||||||
"other: foo",
|
|
||||||
"---",
|
|
||||||
"kind: sources",
|
|
||||||
"name: src1",
|
|
||||||
"type: postgres",
|
|
||||||
"connection_string: conn1",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "tool without source",
|
|
||||||
toolName: "toolNoSource",
|
|
||||||
wantContains: []string{
|
|
||||||
"kind: tools",
|
|
||||||
"name: toolNoSource",
|
|
||||||
"type: http",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "tool with parameters",
|
|
||||||
toolName: "toolWithParams",
|
|
||||||
wantContains: []string{
|
|
||||||
"kind: tools",
|
|
||||||
"name: toolWithParams",
|
|
||||||
"type: custom-tool",
|
|
||||||
"parameters:",
|
|
||||||
"- name: param1",
|
|
||||||
"type: string",
|
|
||||||
"description: desc1",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "non-existent tool",
|
|
||||||
toolName: "missing-tool",
|
|
||||||
wantErr: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "tool with missing source config",
|
|
||||||
toolName: "toolWithMissingSource",
|
|
||||||
wantErr: true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tt := range tests {
|
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
|
||||||
gotBytes, err := generateToolConfigYAML(cfg, tt.toolName)
|
|
||||||
if (err != nil) != tt.wantErr {
|
|
||||||
t.Errorf("generateToolConfigYAML() error = %v, wantErr %v", err, tt.wantErr)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if tt.wantErr {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if tt.wantNil {
|
|
||||||
if gotBytes != nil {
|
|
||||||
t.Errorf("generateToolConfigYAML() expected nil, got %s", string(gotBytes))
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
got := string(gotBytes)
|
|
||||||
for _, want := range tt.wantContains {
|
|
||||||
if !strings.Contains(got, want) {
|
|
||||||
t.Errorf("generateToolConfigYAML() result missing expected string: %q\nGot:\n%s", want, got)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,349 +0,0 @@
|
|||||||
// Copyright 2026 Google LLC
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package internal
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"regexp"
|
|
||||||
"slices"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/goccy/go-yaml"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/server"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ToolsFile struct {
|
|
||||||
Sources server.SourceConfigs `yaml:"sources"`
|
|
||||||
AuthServices server.AuthServiceConfigs `yaml:"authServices"`
|
|
||||||
EmbeddingModels server.EmbeddingModelConfigs `yaml:"embeddingModels"`
|
|
||||||
Tools server.ToolConfigs `yaml:"tools"`
|
|
||||||
Toolsets server.ToolsetConfigs `yaml:"toolsets"`
|
|
||||||
Prompts server.PromptConfigs `yaml:"prompts"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// parseEnv replaces environment variables ${ENV_NAME} with their values.
|
|
||||||
// also support ${ENV_NAME:default_value}.
|
|
||||||
func parseEnv(input string) (string, error) {
|
|
||||||
re := regexp.MustCompile(`\$\{(\w+)(:([^}]*))?\}`)
|
|
||||||
|
|
||||||
var err error
|
|
||||||
output := re.ReplaceAllStringFunc(input, func(match string) string {
|
|
||||||
parts := re.FindStringSubmatch(match)
|
|
||||||
|
|
||||||
// extract the variable name
|
|
||||||
variableName := parts[1]
|
|
||||||
if value, found := os.LookupEnv(variableName); found {
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
if len(parts) >= 4 && parts[2] != "" {
|
|
||||||
return parts[3]
|
|
||||||
}
|
|
||||||
err = fmt.Errorf("environment variable not found: %q", variableName)
|
|
||||||
return ""
|
|
||||||
})
|
|
||||||
return output, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// parseToolsFile parses the provided yaml into appropriate configs.
|
|
||||||
func parseToolsFile(ctx context.Context, raw []byte) (ToolsFile, error) {
|
|
||||||
var toolsFile ToolsFile
|
|
||||||
// Replace environment variables if found
|
|
||||||
output, err := parseEnv(string(raw))
|
|
||||||
if err != nil {
|
|
||||||
return toolsFile, fmt.Errorf("error parsing environment variables: %s", err)
|
|
||||||
}
|
|
||||||
raw = []byte(output)
|
|
||||||
|
|
||||||
raw, err = convertToolsFile(raw)
|
|
||||||
if err != nil {
|
|
||||||
return toolsFile, fmt.Errorf("error converting tools file: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse contents
|
|
||||||
toolsFile.Sources, toolsFile.AuthServices, toolsFile.EmbeddingModels, toolsFile.Tools, toolsFile.Toolsets, toolsFile.Prompts, err = server.UnmarshalResourceConfig(ctx, raw)
|
|
||||||
if err != nil {
|
|
||||||
return toolsFile, err
|
|
||||||
}
|
|
||||||
return toolsFile, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func convertToolsFile(raw []byte) ([]byte, error) {
|
|
||||||
var input yaml.MapSlice
|
|
||||||
decoder := yaml.NewDecoder(bytes.NewReader(raw), yaml.UseOrderedMap())
|
|
||||||
|
|
||||||
// convert to tools file v2
|
|
||||||
var buf bytes.Buffer
|
|
||||||
encoder := yaml.NewEncoder(&buf)
|
|
||||||
|
|
||||||
v1keys := []string{"sources", "authSources", "authServices", "embeddingModels", "tools", "toolsets", "prompts"}
|
|
||||||
for {
|
|
||||||
if err := decoder.Decode(&input); err != nil {
|
|
||||||
if err == io.EOF {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
for _, item := range input {
|
|
||||||
key, ok := item.Key.(string)
|
|
||||||
if !ok {
|
|
||||||
return nil, fmt.Errorf("unexpected non-string key in input: %v", item.Key)
|
|
||||||
}
|
|
||||||
// check if the key is config file v1's key
|
|
||||||
if slices.Contains(v1keys, key) {
|
|
||||||
// check if value conversion to yaml.MapSlice successfully
|
|
||||||
// fields such as "tools" in toolsets might pass the first check but
|
|
||||||
// fail to convert to MapSlice
|
|
||||||
if slice, ok := item.Value.(yaml.MapSlice); ok {
|
|
||||||
// Deprecated: convert authSources to authServices
|
|
||||||
if key == "authSources" {
|
|
||||||
key = "authServices"
|
|
||||||
}
|
|
||||||
transformed, err := transformDocs(key, slice)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
// encode per-doc
|
|
||||||
for _, doc := range transformed {
|
|
||||||
if err := encoder.Encode(doc); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// invalid input will be ignored
|
|
||||||
// we don't want to throw error here since the config could
|
|
||||||
// be valid but with a different order such as:
|
|
||||||
// ---
|
|
||||||
// tools:
|
|
||||||
// - tool_a
|
|
||||||
// kind: toolsets
|
|
||||||
// ---
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// this doc is already v2, encode to buf
|
|
||||||
if err := encoder.Encode(input); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return buf.Bytes(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// transformDocs transforms the configuration file from v1 format to v2
|
|
||||||
// yaml.MapSlice will preserve the order in a map
|
|
||||||
func transformDocs(kind string, input yaml.MapSlice) ([]yaml.MapSlice, error) {
|
|
||||||
var transformed []yaml.MapSlice
|
|
||||||
for _, entry := range input {
|
|
||||||
entryName, ok := entry.Key.(string)
|
|
||||||
if !ok {
|
|
||||||
return nil, fmt.Errorf("unexpected non-string key for entry in '%s': %v", kind, entry.Key)
|
|
||||||
}
|
|
||||||
entryBody := ProcessValue(entry.Value, kind == "toolsets")
|
|
||||||
|
|
||||||
currentTransformed := yaml.MapSlice{
|
|
||||||
{Key: "kind", Value: kind},
|
|
||||||
{Key: "name", Value: entryName},
|
|
||||||
}
|
|
||||||
|
|
||||||
// Merge the transformed body into our result
|
|
||||||
if bodySlice, ok := entryBody.(yaml.MapSlice); ok {
|
|
||||||
currentTransformed = append(currentTransformed, bodySlice...)
|
|
||||||
} else {
|
|
||||||
return nil, fmt.Errorf("unable to convert entryBody to MapSlice")
|
|
||||||
}
|
|
||||||
transformed = append(transformed, currentTransformed)
|
|
||||||
}
|
|
||||||
return transformed, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ProcessValue recursively looks for MapSlices to rename 'kind' -> 'type'
|
|
||||||
func ProcessValue(v any, isToolset bool) any {
|
|
||||||
switch val := v.(type) {
|
|
||||||
case yaml.MapSlice:
|
|
||||||
// creating a new MapSlice is safer for recursive transformation
|
|
||||||
newVal := make(yaml.MapSlice, len(val))
|
|
||||||
for i, item := range val {
|
|
||||||
// Perform renaming
|
|
||||||
if item.Key == "kind" {
|
|
||||||
item.Key = "type"
|
|
||||||
}
|
|
||||||
// Recursive call for nested values (e.g., nested objects or lists)
|
|
||||||
item.Value = ProcessValue(item.Value, false)
|
|
||||||
newVal[i] = item
|
|
||||||
}
|
|
||||||
return newVal
|
|
||||||
case []any:
|
|
||||||
// Process lists: If it's a toolset top-level list, wrap it.
|
|
||||||
if isToolset {
|
|
||||||
return yaml.MapSlice{{Key: "tools", Value: val}}
|
|
||||||
}
|
|
||||||
// Otherwise, recurse into list items (to catch nested objects)
|
|
||||||
newVal := make([]any, len(val))
|
|
||||||
for i := range val {
|
|
||||||
newVal[i] = ProcessValue(val[i], false)
|
|
||||||
}
|
|
||||||
return newVal
|
|
||||||
default:
|
|
||||||
return val
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// mergeToolsFiles merges multiple ToolsFile structs into one.
|
|
||||||
// Detects and raises errors for resource conflicts in sources, authServices, tools, and toolsets.
|
|
||||||
// All resource names (sources, authServices, tools, toolsets) must be unique across all files.
|
|
||||||
func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) {
|
|
||||||
merged := ToolsFile{
|
|
||||||
Sources: make(server.SourceConfigs),
|
|
||||||
AuthServices: make(server.AuthServiceConfigs),
|
|
||||||
EmbeddingModels: make(server.EmbeddingModelConfigs),
|
|
||||||
Tools: make(server.ToolConfigs),
|
|
||||||
Toolsets: make(server.ToolsetConfigs),
|
|
||||||
Prompts: make(server.PromptConfigs),
|
|
||||||
}
|
|
||||||
|
|
||||||
var conflicts []string
|
|
||||||
|
|
||||||
for fileIndex, file := range files {
|
|
||||||
// Check for conflicts and merge sources
|
|
||||||
for name, source := range file.Sources {
|
|
||||||
if _, exists := merged.Sources[name]; exists {
|
|
||||||
conflicts = append(conflicts, fmt.Sprintf("source '%s' (file #%d)", name, fileIndex+1))
|
|
||||||
} else {
|
|
||||||
merged.Sources[name] = source
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for conflicts and merge authServices
|
|
||||||
for name, authService := range file.AuthServices {
|
|
||||||
if _, exists := merged.AuthServices[name]; exists {
|
|
||||||
conflicts = append(conflicts, fmt.Sprintf("authService '%s' (file #%d)", name, fileIndex+1))
|
|
||||||
} else {
|
|
||||||
merged.AuthServices[name] = authService
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for conflicts and merge embeddingModels
|
|
||||||
for name, em := range file.EmbeddingModels {
|
|
||||||
if _, exists := merged.EmbeddingModels[name]; exists {
|
|
||||||
conflicts = append(conflicts, fmt.Sprintf("embedding model '%s' (file #%d)", name, fileIndex+1))
|
|
||||||
} else {
|
|
||||||
merged.EmbeddingModels[name] = em
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for conflicts and merge tools
|
|
||||||
for name, tool := range file.Tools {
|
|
||||||
if _, exists := merged.Tools[name]; exists {
|
|
||||||
conflicts = append(conflicts, fmt.Sprintf("tool '%s' (file #%d)", name, fileIndex+1))
|
|
||||||
} else {
|
|
||||||
merged.Tools[name] = tool
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for conflicts and merge toolsets
|
|
||||||
for name, toolset := range file.Toolsets {
|
|
||||||
if _, exists := merged.Toolsets[name]; exists {
|
|
||||||
conflicts = append(conflicts, fmt.Sprintf("toolset '%s' (file #%d)", name, fileIndex+1))
|
|
||||||
} else {
|
|
||||||
merged.Toolsets[name] = toolset
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for conflicts and merge prompts
|
|
||||||
for name, prompt := range file.Prompts {
|
|
||||||
if _, exists := merged.Prompts[name]; exists {
|
|
||||||
conflicts = append(conflicts, fmt.Sprintf("prompt '%s' (file #%d)", name, fileIndex+1))
|
|
||||||
} else {
|
|
||||||
merged.Prompts[name] = prompt
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If conflicts were detected, return an error
|
|
||||||
if len(conflicts) > 0 {
|
|
||||||
return ToolsFile{}, fmt.Errorf("resource conflicts detected:\n - %s\n\nPlease ensure each source, authService, tool, toolset and prompt has a unique name across all files", strings.Join(conflicts, "\n - "))
|
|
||||||
}
|
|
||||||
|
|
||||||
return merged, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// LoadAndMergeToolsFiles loads multiple YAML files and merges them
|
|
||||||
func LoadAndMergeToolsFiles(ctx context.Context, filePaths []string) (ToolsFile, error) {
|
|
||||||
var toolsFiles []ToolsFile
|
|
||||||
|
|
||||||
for _, filePath := range filePaths {
|
|
||||||
buf, err := os.ReadFile(filePath)
|
|
||||||
if err != nil {
|
|
||||||
return ToolsFile{}, fmt.Errorf("unable to read tool file at %q: %w", filePath, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
toolsFile, err := parseToolsFile(ctx, buf)
|
|
||||||
if err != nil {
|
|
||||||
return ToolsFile{}, fmt.Errorf("unable to parse tool file at %q: %w", filePath, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
toolsFiles = append(toolsFiles, toolsFile)
|
|
||||||
}
|
|
||||||
|
|
||||||
mergedFile, err := mergeToolsFiles(toolsFiles...)
|
|
||||||
if err != nil {
|
|
||||||
return ToolsFile{}, fmt.Errorf("unable to merge tools files: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return mergedFile, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// LoadAndMergeToolsFolder loads all YAML files from a directory and merges them
|
|
||||||
func LoadAndMergeToolsFolder(ctx context.Context, folderPath string) (ToolsFile, error) {
|
|
||||||
// Check if directory exists
|
|
||||||
info, err := os.Stat(folderPath)
|
|
||||||
if err != nil {
|
|
||||||
return ToolsFile{}, fmt.Errorf("unable to access tools folder at %q: %w", folderPath, err)
|
|
||||||
}
|
|
||||||
if !info.IsDir() {
|
|
||||||
return ToolsFile{}, fmt.Errorf("path %q is not a directory", folderPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find all YAML files in the directory
|
|
||||||
pattern := filepath.Join(folderPath, "*.yaml")
|
|
||||||
yamlFiles, err := filepath.Glob(pattern)
|
|
||||||
if err != nil {
|
|
||||||
return ToolsFile{}, fmt.Errorf("error finding YAML files in %q: %w", folderPath, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Also find .yml files
|
|
||||||
ymlPattern := filepath.Join(folderPath, "*.yml")
|
|
||||||
ymlFiles, err := filepath.Glob(ymlPattern)
|
|
||||||
if err != nil {
|
|
||||||
return ToolsFile{}, fmt.Errorf("error finding YML files in %q: %w", folderPath, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Combine both file lists
|
|
||||||
allFiles := append(yamlFiles, ymlFiles...)
|
|
||||||
|
|
||||||
if len(allFiles) == 0 {
|
|
||||||
return ToolsFile{}, fmt.Errorf("no YAML files found in directory %q", folderPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use existing LoadAndMergeToolsFiles function
|
|
||||||
return LoadAndMergeToolsFiles(ctx, allFiles)
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
30
cmd/options.go
Normal file
30
cmd/options.go
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
// Copyright 2024 Google LLC
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Option is a function that configures a Command.
|
||||||
|
type Option func(*Command)
|
||||||
|
|
||||||
|
// WithStreams overrides the default writer.
|
||||||
|
func WithStreams(out, err io.Writer) Option {
|
||||||
|
return func(c *Command) {
|
||||||
|
c.outStream = out
|
||||||
|
c.errStream = err
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -12,38 +12,57 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package internal
|
package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"io"
|
"io"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestToolboxOptions(t *testing.T) {
|
func TestCommandOptions(t *testing.T) {
|
||||||
w := io.Discard
|
w := io.Discard
|
||||||
tcs := []struct {
|
tcs := []struct {
|
||||||
desc string
|
desc string
|
||||||
isValid func(*ToolboxOptions) error
|
isValid func(*Command) error
|
||||||
option Option
|
option Option
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
desc: "with logger",
|
desc: "with logger",
|
||||||
isValid: func(o *ToolboxOptions) error {
|
isValid: func(c *Command) error {
|
||||||
if o.IOStreams.Out != w || o.IOStreams.ErrOut != w {
|
if c.outStream != w || c.errStream != w {
|
||||||
return errors.New("loggers do not match")
|
return errors.New("loggers do not match")
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
},
|
},
|
||||||
option: WithIOStreams(w, w),
|
option: WithStreams(w, w),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, tc := range tcs {
|
for _, tc := range tcs {
|
||||||
t.Run(tc.desc, func(t *testing.T) {
|
t.Run(tc.desc, func(t *testing.T) {
|
||||||
got := NewToolboxOptions(tc.option)
|
got, err := invokeProxyWithOption(tc.option)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
if err := tc.isValid(got); err != nil {
|
if err := tc.isValid(got); err != nil {
|
||||||
t.Errorf("option did not initialize command correctly: %v", err)
|
t.Errorf("option did not initialize command correctly: %v", err)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func invokeProxyWithOption(o Option) (*Command, error) {
|
||||||
|
c := NewCommand(o)
|
||||||
|
// Keep the test output quiet
|
||||||
|
c.SilenceUsage = true
|
||||||
|
c.SilenceErrors = true
|
||||||
|
// Disable execute behavior
|
||||||
|
c.RunE = func(*cobra.Command, []string) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
err := c.Execute()
|
||||||
|
return c, err
|
||||||
|
}
|
||||||
729
cmd/root.go
729
cmd/root.go
@@ -23,6 +23,7 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"os/signal"
|
"os/signal"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
"runtime"
|
"runtime"
|
||||||
"slices"
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
@@ -30,18 +31,248 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/fsnotify/fsnotify"
|
"github.com/fsnotify/fsnotify"
|
||||||
// Importing the cmd/internal package also import packages for side effect of registration
|
yaml "github.com/goccy/go-yaml"
|
||||||
"github.com/googleapis/genai-toolbox/cmd/internal"
|
|
||||||
"github.com/googleapis/genai-toolbox/cmd/internal/invoke"
|
|
||||||
"github.com/googleapis/genai-toolbox/cmd/internal/skills"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/auth"
|
"github.com/googleapis/genai-toolbox/internal/auth"
|
||||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/log"
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
|
||||||
"github.com/googleapis/genai-toolbox/internal/prompts"
|
"github.com/googleapis/genai-toolbox/internal/prompts"
|
||||||
"github.com/googleapis/genai-toolbox/internal/server"
|
"github.com/googleapis/genai-toolbox/internal/server"
|
||||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/telemetry"
|
||||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||||
"github.com/googleapis/genai-toolbox/internal/util"
|
"github.com/googleapis/genai-toolbox/internal/util"
|
||||||
|
|
||||||
|
// Import prompt packages for side effect of registration
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/prompts/custom"
|
||||||
|
|
||||||
|
// Import tool packages for side effect of registration
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreatecluster"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreateinstance"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreateuser"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetcluster"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetinstance"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetuser"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistclusters"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistinstances"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistusers"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbwaitforoperation"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydbainl"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryanalyzecontribution"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryconversationalanalytics"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryexecutesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryforecast"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygetdatasetinfo"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygettableinfo"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylistdatasetids"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylisttableids"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysearchcatalog"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/bigtable"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cassandra/cassandracql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouseexecutesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselistdatabases"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselisttables"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhousesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudgda"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirfetchpage"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatienteverything"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatientsearch"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdataset"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdicomstore"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdicomstoremetrics"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirresource"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirstore"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirstoremetrics"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarelistdicomstores"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarelistfhirstores"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcareretrieverendereddicominstance"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicominstances"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomseries"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomstudies"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcloneinstance"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatebackup"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatedatabase"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreateusers"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistdatabases"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistinstances"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmssql/cloudsqlmssqlcreateinstance"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmysql/cloudsqlmysqlcreateinstance"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlpg/cloudsqlpgcreateinstances"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlpg/cloudsqlpgupgradeprecheck"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/couchbase"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/dataform/dataformcompilelocal"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexlookupentry"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchaspecttypes"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchentries"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/dgraph"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/elasticsearch/elasticsearchesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/firebird/firebirdexecutesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/firebird/firebirdsql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreadddocuments"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoredeletedocuments"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetdocuments"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetrules"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorelistcollections"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequery"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequerycollection"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreupdatedocument"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/http"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardelement"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardfilter"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerconversationalanalytics"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookercreateprojectfile"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdeleteprojectfile"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdevmode"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergenerateembedurl"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiondatabases"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnections"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectionschemas"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiontablecolumns"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiontables"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdashboards"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdimensions"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetexplores"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetfilters"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetlooks"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmeasures"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmodels"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetparameters"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojectfile"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojectfiles"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojects"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthanalyze"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthpulse"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthvacuum"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakedashboard"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakelook"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquery"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquerysql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerqueryurl"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrundashboard"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrunlook"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerupdateprojectfile"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mindsdb/mindsdbexecutesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mindsdb/mindsdbsql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbaggregate"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeletemany"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeleteone"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfind"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfindone"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertmany"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertone"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdatemany"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdateone"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlexecutesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqllisttables"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlsql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlexecutesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlgetqueryplan"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllistactivequeries"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttablefragmentation"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttables"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttablesmissinguniqueindexes"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlsql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jcypher"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jexecutecypher"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbaseexecutesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbasesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/oracle/oracleexecutesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/oracle/oraclesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresdatabaseoverview"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresexecutesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresgetcolumncardinality"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistactivequeries"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistavailableextensions"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistdatabasestats"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistindexes"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistinstalledextensions"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistlocks"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpgsettings"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpublicationtables"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistquerystats"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistroles"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistschemas"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistsequences"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresliststoredprocedure"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablespaces"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablestats"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttriggers"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistviews"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslongrunningtransactions"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresreplicationstats"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/redis"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcancelbatch"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatepysparkbatch"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatesparkbatch"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkgetbatch"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparklistbatches"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoreexecutesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoresql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakeexecutesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlistgraphs"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlisttables"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannersql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqliteexecutesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbexecutesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbsql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinoexecutesql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinosql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/wait"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/valkey"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/yugabytedbsql"
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/alloydbadmin"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/alloydbpg"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/bigtable"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/cassandra"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/clickhouse"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudgda"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudhealthcare"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudmonitoring"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmssql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmysql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/couchbase"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/dataplex"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/dgraph"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/elasticsearch"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/firebird"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/firestore"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/http"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/looker"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/mindsdb"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/mongodb"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/mssql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/mysql"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/neo4j"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/oceanbase"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/oracle"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/postgres"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/redis"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/serverlessspark"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/singlestore"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/snowflake"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/spanner"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/tidb"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/trino"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/valkey"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/yugabytedb"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@@ -70,74 +301,302 @@ func semanticVersion() string {
|
|||||||
return v
|
return v
|
||||||
}
|
}
|
||||||
|
|
||||||
// GenerateCommand returns a new Command object with the specified IO streams
|
|
||||||
// This is used for integration test package
|
|
||||||
func GenerateCommand(out, err io.Writer) *cobra.Command {
|
|
||||||
opts := internal.NewToolboxOptions(internal.WithIOStreams(out, err))
|
|
||||||
return NewCommand(opts)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Execute adds all child commands to the root command and sets flags appropriately.
|
// Execute adds all child commands to the root command and sets flags appropriately.
|
||||||
// This is called by main.main(). It only needs to happen once to the rootCmd.
|
// This is called by main.main(). It only needs to happen once to the rootCmd.
|
||||||
func Execute() {
|
func Execute() {
|
||||||
// Initialize options
|
if err := NewCommand().Execute(); err != nil {
|
||||||
opts := internal.NewToolboxOptions()
|
|
||||||
|
|
||||||
if err := NewCommand(opts).Execute(); err != nil {
|
|
||||||
exit := 1
|
exit := 1
|
||||||
os.Exit(exit)
|
os.Exit(exit)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Command represents an invocation of the CLI.
|
||||||
|
type Command struct {
|
||||||
|
*cobra.Command
|
||||||
|
|
||||||
|
cfg server.ServerConfig
|
||||||
|
logger log.Logger
|
||||||
|
tools_file string
|
||||||
|
tools_files []string
|
||||||
|
tools_folder string
|
||||||
|
prebuiltConfig string
|
||||||
|
inStream io.Reader
|
||||||
|
outStream io.Writer
|
||||||
|
errStream io.Writer
|
||||||
|
}
|
||||||
|
|
||||||
// NewCommand returns a Command object representing an invocation of the CLI.
|
// NewCommand returns a Command object representing an invocation of the CLI.
|
||||||
func NewCommand(opts *internal.ToolboxOptions) *cobra.Command {
|
func NewCommand(opts ...Option) *Command {
|
||||||
cmd := &cobra.Command{
|
in := os.Stdin
|
||||||
|
out := os.Stdout
|
||||||
|
err := os.Stderr
|
||||||
|
|
||||||
|
baseCmd := &cobra.Command{
|
||||||
Use: "toolbox",
|
Use: "toolbox",
|
||||||
Version: versionString,
|
Version: versionString,
|
||||||
SilenceErrors: true,
|
SilenceErrors: true,
|
||||||
}
|
}
|
||||||
|
cmd := &Command{
|
||||||
|
Command: baseCmd,
|
||||||
|
inStream: in,
|
||||||
|
outStream: out,
|
||||||
|
errStream: err,
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, o := range opts {
|
||||||
|
o(cmd)
|
||||||
|
}
|
||||||
|
|
||||||
// Do not print Usage on runtime error
|
// Do not print Usage on runtime error
|
||||||
cmd.SilenceUsage = true
|
cmd.SilenceUsage = true
|
||||||
|
|
||||||
// Set server version
|
// Set server version
|
||||||
opts.Cfg.Version = versionString
|
cmd.cfg.Version = versionString
|
||||||
|
|
||||||
// set baseCmd in, out and err the same as cmd.
|
// set baseCmd in, out and err the same as cmd.
|
||||||
cmd.SetIn(opts.IOStreams.In)
|
baseCmd.SetIn(cmd.inStream)
|
||||||
cmd.SetOut(opts.IOStreams.Out)
|
baseCmd.SetOut(cmd.outStream)
|
||||||
cmd.SetErr(opts.IOStreams.ErrOut)
|
baseCmd.SetErr(cmd.errStream)
|
||||||
|
|
||||||
// setup flags that are common across all commands
|
|
||||||
internal.PersistentFlags(cmd, opts)
|
|
||||||
|
|
||||||
flags := cmd.Flags()
|
flags := cmd.Flags()
|
||||||
|
flags.StringVarP(&cmd.cfg.Address, "address", "a", "127.0.0.1", "Address of the interface the server will listen on.")
|
||||||
|
flags.IntVarP(&cmd.cfg.Port, "port", "p", 5000, "Port the server will listen on.")
|
||||||
|
|
||||||
flags.StringVarP(&opts.Cfg.Address, "address", "a", "127.0.0.1", "Address of the interface the server will listen on.")
|
flags.StringVar(&cmd.tools_file, "tools_file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
|
||||||
flags.IntVarP(&opts.Cfg.Port, "port", "p", 5000, "Port the server will listen on.")
|
|
||||||
|
|
||||||
flags.StringVar(&opts.ToolsFile, "tools_file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
|
|
||||||
// deprecate tools_file
|
// deprecate tools_file
|
||||||
_ = flags.MarkDeprecated("tools_file", "please use --tools-file instead")
|
_ = flags.MarkDeprecated("tools_file", "please use --tools-file instead")
|
||||||
flags.BoolVar(&opts.Cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
|
flags.StringVar(&cmd.tools_file, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
|
||||||
flags.BoolVar(&opts.Cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
|
flags.StringSliceVar(&cmd.tools_files, "tools-files", []string{}, "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file, or --tools-folder.")
|
||||||
flags.BoolVar(&opts.Cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
|
flags.StringVar(&cmd.tools_folder, "tools-folder", "", "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file, or --tools-files.")
|
||||||
|
flags.Var(&cmd.cfg.LogLevel, "log-level", "Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'.")
|
||||||
|
flags.Var(&cmd.cfg.LoggingFormat, "logging-format", "Specify logging format to use. Allowed: 'standard' or 'JSON'.")
|
||||||
|
flags.BoolVar(&cmd.cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.")
|
||||||
|
flags.StringVar(&cmd.cfg.TelemetryOTLP, "telemetry-otlp", "", "Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318')")
|
||||||
|
flags.StringVar(&cmd.cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
|
||||||
|
// Fetch prebuilt tools sources to customize the help description
|
||||||
|
prebuiltHelp := fmt.Sprintf(
|
||||||
|
"Use a prebuilt tool configuration by source type. Allowed: '%s'.",
|
||||||
|
strings.Join(prebuiltconfigs.GetPrebuiltSources(), "', '"),
|
||||||
|
)
|
||||||
|
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", prebuiltHelp)
|
||||||
|
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
|
||||||
|
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
|
||||||
|
flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
|
||||||
// TODO: Insecure by default. Might consider updating this for v1.0.0
|
// TODO: Insecure by default. Might consider updating this for v1.0.0
|
||||||
flags.StringSliceVar(&opts.Cfg.AllowedOrigins, "allowed-origins", []string{"*"}, "Specifies a list of origins permitted to access this server. Defaults to '*'.")
|
flags.StringSliceVar(&cmd.cfg.AllowedOrigins, "allowed-origins", []string{"*"}, "Specifies a list of origins permitted to access this server. Defaults to '*'.")
|
||||||
flags.StringSliceVar(&opts.Cfg.AllowedHosts, "allowed-hosts", []string{"*"}, "Specifies a list of hosts permitted to access this server. Defaults to '*'.")
|
flags.StringSliceVar(&cmd.cfg.AllowedHosts, "allowed-hosts", []string{"*"}, "Specifies a list of hosts permitted to access this server. Defaults to '*'.")
|
||||||
|
|
||||||
// wrap RunE command so that we have access to original Command object
|
// wrap RunE command so that we have access to original Command object
|
||||||
cmd.RunE = func(*cobra.Command, []string) error { return run(cmd, opts) }
|
cmd.RunE = func(*cobra.Command, []string) error { return run(cmd) }
|
||||||
|
|
||||||
// Register subcommands for tool invocation
|
|
||||||
cmd.AddCommand(invoke.NewCommand(opts))
|
|
||||||
// Register subcommands for skill generation
|
|
||||||
cmd.AddCommand(skills.NewCommand(opts))
|
|
||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
}
|
}
|
||||||
|
|
||||||
func handleDynamicReload(ctx context.Context, toolsFile internal.ToolsFile, s *server.Server) error {
|
type ToolsFile struct {
|
||||||
|
Sources server.SourceConfigs `yaml:"sources"`
|
||||||
|
AuthSources server.AuthServiceConfigs `yaml:"authSources"` // Deprecated: Kept for compatibility.
|
||||||
|
AuthServices server.AuthServiceConfigs `yaml:"authServices"`
|
||||||
|
EmbeddingModels server.EmbeddingModelConfigs `yaml:"embeddingModels"`
|
||||||
|
Tools server.ToolConfigs `yaml:"tools"`
|
||||||
|
Toolsets server.ToolsetConfigs `yaml:"toolsets"`
|
||||||
|
Prompts server.PromptConfigs `yaml:"prompts"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseEnv replaces environment variables ${ENV_NAME} with their values.
|
||||||
|
// also support ${ENV_NAME:default_value}.
|
||||||
|
func parseEnv(input string) (string, error) {
|
||||||
|
re := regexp.MustCompile(`\$\{(\w+)(:([^}]*))?\}`)
|
||||||
|
|
||||||
|
var err error
|
||||||
|
output := re.ReplaceAllStringFunc(input, func(match string) string {
|
||||||
|
parts := re.FindStringSubmatch(match)
|
||||||
|
|
||||||
|
// extract the variable name
|
||||||
|
variableName := parts[1]
|
||||||
|
if value, found := os.LookupEnv(variableName); found {
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
if len(parts) >= 4 && parts[2] != "" {
|
||||||
|
return parts[3]
|
||||||
|
}
|
||||||
|
err = fmt.Errorf("environment variable not found: %q", variableName)
|
||||||
|
return ""
|
||||||
|
})
|
||||||
|
return output, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseToolsFile parses the provided yaml into appropriate configs.
|
||||||
|
func parseToolsFile(ctx context.Context, raw []byte) (ToolsFile, error) {
|
||||||
|
var toolsFile ToolsFile
|
||||||
|
// Replace environment variables if found
|
||||||
|
output, err := parseEnv(string(raw))
|
||||||
|
if err != nil {
|
||||||
|
return toolsFile, fmt.Errorf("error parsing environment variables: %s", err)
|
||||||
|
}
|
||||||
|
raw = []byte(output)
|
||||||
|
|
||||||
|
// Parse contents
|
||||||
|
err = yaml.UnmarshalContext(ctx, raw, &toolsFile, yaml.Strict())
|
||||||
|
if err != nil {
|
||||||
|
return toolsFile, err
|
||||||
|
}
|
||||||
|
return toolsFile, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// mergeToolsFiles merges multiple ToolsFile structs into one.
|
||||||
|
// Detects and raises errors for resource conflicts in sources, authServices, tools, and toolsets.
|
||||||
|
// All resource names (sources, authServices, tools, toolsets) must be unique across all files.
|
||||||
|
func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) {
|
||||||
|
merged := ToolsFile{
|
||||||
|
Sources: make(server.SourceConfigs),
|
||||||
|
AuthServices: make(server.AuthServiceConfigs),
|
||||||
|
EmbeddingModels: make(server.EmbeddingModelConfigs),
|
||||||
|
Tools: make(server.ToolConfigs),
|
||||||
|
Toolsets: make(server.ToolsetConfigs),
|
||||||
|
Prompts: make(server.PromptConfigs),
|
||||||
|
}
|
||||||
|
|
||||||
|
var conflicts []string
|
||||||
|
|
||||||
|
for fileIndex, file := range files {
|
||||||
|
// Check for conflicts and merge sources
|
||||||
|
for name, source := range file.Sources {
|
||||||
|
if _, exists := merged.Sources[name]; exists {
|
||||||
|
conflicts = append(conflicts, fmt.Sprintf("source '%s' (file #%d)", name, fileIndex+1))
|
||||||
|
} else {
|
||||||
|
merged.Sources[name] = source
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for conflicts and merge authSources (deprecated, but still support)
|
||||||
|
for name, authSource := range file.AuthSources {
|
||||||
|
if _, exists := merged.AuthSources[name]; exists {
|
||||||
|
conflicts = append(conflicts, fmt.Sprintf("authSource '%s' (file #%d)", name, fileIndex+1))
|
||||||
|
} else {
|
||||||
|
if merged.AuthSources == nil {
|
||||||
|
merged.AuthSources = make(server.AuthServiceConfigs)
|
||||||
|
}
|
||||||
|
merged.AuthSources[name] = authSource
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for conflicts and merge authServices
|
||||||
|
for name, authService := range file.AuthServices {
|
||||||
|
if _, exists := merged.AuthServices[name]; exists {
|
||||||
|
conflicts = append(conflicts, fmt.Sprintf("authService '%s' (file #%d)", name, fileIndex+1))
|
||||||
|
} else {
|
||||||
|
merged.AuthServices[name] = authService
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for conflicts and merge embeddingModels
|
||||||
|
for name, em := range file.EmbeddingModels {
|
||||||
|
if _, exists := merged.EmbeddingModels[name]; exists {
|
||||||
|
conflicts = append(conflicts, fmt.Sprintf("embedding model '%s' (file #%d)", name, fileIndex+1))
|
||||||
|
} else {
|
||||||
|
merged.EmbeddingModels[name] = em
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for conflicts and merge tools
|
||||||
|
for name, tool := range file.Tools {
|
||||||
|
if _, exists := merged.Tools[name]; exists {
|
||||||
|
conflicts = append(conflicts, fmt.Sprintf("tool '%s' (file #%d)", name, fileIndex+1))
|
||||||
|
} else {
|
||||||
|
merged.Tools[name] = tool
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for conflicts and merge toolsets
|
||||||
|
for name, toolset := range file.Toolsets {
|
||||||
|
if _, exists := merged.Toolsets[name]; exists {
|
||||||
|
conflicts = append(conflicts, fmt.Sprintf("toolset '%s' (file #%d)", name, fileIndex+1))
|
||||||
|
} else {
|
||||||
|
merged.Toolsets[name] = toolset
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for conflicts and merge prompts
|
||||||
|
for name, prompt := range file.Prompts {
|
||||||
|
if _, exists := merged.Prompts[name]; exists {
|
||||||
|
conflicts = append(conflicts, fmt.Sprintf("prompt '%s' (file #%d)", name, fileIndex+1))
|
||||||
|
} else {
|
||||||
|
merged.Prompts[name] = prompt
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If conflicts were detected, return an error
|
||||||
|
if len(conflicts) > 0 {
|
||||||
|
return ToolsFile{}, fmt.Errorf("resource conflicts detected:\n - %s\n\nPlease ensure each source, authService, tool, toolset and prompt has a unique name across all files", strings.Join(conflicts, "\n - "))
|
||||||
|
}
|
||||||
|
|
||||||
|
return merged, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// loadAndMergeToolsFiles loads multiple YAML files and merges them
|
||||||
|
func loadAndMergeToolsFiles(ctx context.Context, filePaths []string) (ToolsFile, error) {
|
||||||
|
var toolsFiles []ToolsFile
|
||||||
|
|
||||||
|
for _, filePath := range filePaths {
|
||||||
|
buf, err := os.ReadFile(filePath)
|
||||||
|
if err != nil {
|
||||||
|
return ToolsFile{}, fmt.Errorf("unable to read tool file at %q: %w", filePath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
toolsFile, err := parseToolsFile(ctx, buf)
|
||||||
|
if err != nil {
|
||||||
|
return ToolsFile{}, fmt.Errorf("unable to parse tool file at %q: %w", filePath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
toolsFiles = append(toolsFiles, toolsFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
mergedFile, err := mergeToolsFiles(toolsFiles...)
|
||||||
|
if err != nil {
|
||||||
|
return ToolsFile{}, fmt.Errorf("unable to merge tools files: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return mergedFile, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// loadAndMergeToolsFolder loads all YAML files from a directory and merges them
|
||||||
|
func loadAndMergeToolsFolder(ctx context.Context, folderPath string) (ToolsFile, error) {
|
||||||
|
// Check if directory exists
|
||||||
|
info, err := os.Stat(folderPath)
|
||||||
|
if err != nil {
|
||||||
|
return ToolsFile{}, fmt.Errorf("unable to access tools folder at %q: %w", folderPath, err)
|
||||||
|
}
|
||||||
|
if !info.IsDir() {
|
||||||
|
return ToolsFile{}, fmt.Errorf("path %q is not a directory", folderPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find all YAML files in the directory
|
||||||
|
pattern := filepath.Join(folderPath, "*.yaml")
|
||||||
|
yamlFiles, err := filepath.Glob(pattern)
|
||||||
|
if err != nil {
|
||||||
|
return ToolsFile{}, fmt.Errorf("error finding YAML files in %q: %w", folderPath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Also find .yml files
|
||||||
|
ymlPattern := filepath.Join(folderPath, "*.yml")
|
||||||
|
ymlFiles, err := filepath.Glob(ymlPattern)
|
||||||
|
if err != nil {
|
||||||
|
return ToolsFile{}, fmt.Errorf("error finding YML files in %q: %w", folderPath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Combine both file lists
|
||||||
|
allFiles := append(yamlFiles, ymlFiles...)
|
||||||
|
|
||||||
|
if len(allFiles) == 0 {
|
||||||
|
return ToolsFile{}, fmt.Errorf("no YAML files found in directory %q", folderPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use existing loadAndMergeToolsFiles function
|
||||||
|
return loadAndMergeToolsFiles(ctx, allFiles)
|
||||||
|
}
|
||||||
|
|
||||||
|
func handleDynamicReload(ctx context.Context, toolsFile ToolsFile, s *server.Server) error {
|
||||||
logger, err := util.LoggerFromContext(ctx)
|
logger, err := util.LoggerFromContext(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
@@ -157,7 +616,7 @@ func handleDynamicReload(ctx context.Context, toolsFile internal.ToolsFile, s *s
|
|||||||
|
|
||||||
// validateReloadEdits checks that the reloaded tools file configs can initialized without failing
|
// validateReloadEdits checks that the reloaded tools file configs can initialized without failing
|
||||||
func validateReloadEdits(
|
func validateReloadEdits(
|
||||||
ctx context.Context, toolsFile internal.ToolsFile,
|
ctx context.Context, toolsFile ToolsFile,
|
||||||
) (map[string]sources.Source, map[string]auth.AuthService, map[string]embeddingmodels.EmbeddingModel, map[string]tools.Tool, map[string]tools.Toolset, map[string]prompts.Prompt, map[string]prompts.Promptset, error,
|
) (map[string]sources.Source, map[string]auth.AuthService, map[string]embeddingmodels.EmbeddingModel, map[string]tools.Tool, map[string]tools.Toolset, map[string]prompts.Prompt, map[string]prompts.Promptset, error,
|
||||||
) {
|
) {
|
||||||
logger, err := util.LoggerFromContext(ctx)
|
logger, err := util.LoggerFromContext(ctx)
|
||||||
@@ -283,18 +742,18 @@ func watchChanges(ctx context.Context, watchDirs map[string]bool, watchedFiles m
|
|||||||
|
|
||||||
case <-debounce.C:
|
case <-debounce.C:
|
||||||
debounce.Stop()
|
debounce.Stop()
|
||||||
var reloadedToolsFile internal.ToolsFile
|
var reloadedToolsFile ToolsFile
|
||||||
|
|
||||||
if watchingFolder {
|
if watchingFolder {
|
||||||
logger.DebugContext(ctx, "Reloading tools folder.")
|
logger.DebugContext(ctx, "Reloading tools folder.")
|
||||||
reloadedToolsFile, err = internal.LoadAndMergeToolsFolder(ctx, folderToWatch)
|
reloadedToolsFile, err = loadAndMergeToolsFolder(ctx, folderToWatch)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.WarnContext(ctx, "error loading tools folder %s", err)
|
logger.WarnContext(ctx, "error loading tools folder %s", err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
logger.DebugContext(ctx, "Reloading tools file(s).")
|
logger.DebugContext(ctx, "Reloading tools file(s).")
|
||||||
reloadedToolsFile, err = internal.LoadAndMergeToolsFiles(ctx, slices.Collect(maps.Keys(watchedFiles)))
|
reloadedToolsFile, err = loadAndMergeToolsFiles(ctx, slices.Collect(maps.Keys(watchedFiles)))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.WarnContext(ctx, "error loading tools files %s", err)
|
logger.WarnContext(ctx, "error loading tools files %s", err)
|
||||||
continue
|
continue
|
||||||
@@ -338,7 +797,7 @@ func resolveWatcherInputs(toolsFile string, toolsFiles []string, toolsFolder str
|
|||||||
return watchDirs, watchedFiles
|
return watchDirs, watchedFiles
|
||||||
}
|
}
|
||||||
|
|
||||||
func run(cmd *cobra.Command, opts *internal.ToolboxOptions) error {
|
func run(cmd *Command) error {
|
||||||
ctx, cancel := context.WithCancel(cmd.Context())
|
ctx, cancel := context.WithCancel(cmd.Context())
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
|
||||||
@@ -355,40 +814,184 @@ func run(cmd *cobra.Command, opts *internal.ToolboxOptions) error {
|
|||||||
}
|
}
|
||||||
switch s {
|
switch s {
|
||||||
case syscall.SIGINT:
|
case syscall.SIGINT:
|
||||||
opts.Logger.DebugContext(sCtx, "Received SIGINT signal to shutdown.")
|
cmd.logger.DebugContext(sCtx, "Received SIGINT signal to shutdown.")
|
||||||
case syscall.SIGTERM:
|
case syscall.SIGTERM:
|
||||||
opts.Logger.DebugContext(sCtx, "Sending SIGTERM signal to shutdown.")
|
cmd.logger.DebugContext(sCtx, "Sending SIGTERM signal to shutdown.")
|
||||||
}
|
}
|
||||||
cancel()
|
cancel()
|
||||||
}(ctx)
|
}(ctx)
|
||||||
|
|
||||||
ctx, shutdown, err := opts.Setup(ctx)
|
// If stdio, set logger's out stream (usually DEBUG and INFO logs) to errStream
|
||||||
|
loggerOut := cmd.outStream
|
||||||
|
if cmd.cfg.Stdio {
|
||||||
|
loggerOut = cmd.errStream
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle logger separately from config
|
||||||
|
switch strings.ToLower(cmd.cfg.LoggingFormat.String()) {
|
||||||
|
case "json":
|
||||||
|
logger, err := log.NewStructuredLogger(loggerOut, cmd.errStream, cmd.cfg.LogLevel.String())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return fmt.Errorf("unable to initialize logger: %w", err)
|
||||||
|
}
|
||||||
|
cmd.logger = logger
|
||||||
|
case "standard":
|
||||||
|
logger, err := log.NewStdLogger(loggerOut, cmd.errStream, cmd.cfg.LogLevel.String())
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to initialize logger: %w", err)
|
||||||
|
}
|
||||||
|
cmd.logger = logger
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("logging format invalid")
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx = util.WithLogger(ctx, cmd.logger)
|
||||||
|
|
||||||
|
// Set up OpenTelemetry
|
||||||
|
otelShutdown, err := telemetry.SetupOTel(ctx, cmd.cfg.Version, cmd.cfg.TelemetryOTLP, cmd.cfg.TelemetryGCP, cmd.cfg.TelemetryServiceName)
|
||||||
|
if err != nil {
|
||||||
|
errMsg := fmt.Errorf("error setting up OpenTelemetry: %w", err)
|
||||||
|
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||||
|
return errMsg
|
||||||
}
|
}
|
||||||
defer func() {
|
defer func() {
|
||||||
_ = shutdown(ctx)
|
err := otelShutdown(ctx)
|
||||||
|
if err != nil {
|
||||||
|
errMsg := fmt.Errorf("error shutting down OpenTelemetry: %w", err)
|
||||||
|
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||||
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
isCustomConfigured, err := opts.LoadConfig(ctx)
|
var allToolsFiles []ToolsFile
|
||||||
|
|
||||||
|
// Load Prebuilt Configuration
|
||||||
|
if cmd.prebuiltConfig != "" {
|
||||||
|
buf, err := prebuiltconfigs.Get(cmd.prebuiltConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
cmd.logger.ErrorContext(ctx, err.Error())
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
logMsg := fmt.Sprint("Using prebuilt tool configuration for ", cmd.prebuiltConfig)
|
||||||
|
cmd.logger.InfoContext(ctx, logMsg)
|
||||||
|
// Append prebuilt.source to Version string for the User Agent
|
||||||
|
cmd.cfg.Version += "+prebuilt." + cmd.prebuiltConfig
|
||||||
|
|
||||||
|
parsed, err := parseToolsFile(ctx, buf)
|
||||||
|
if err != nil {
|
||||||
|
errMsg := fmt.Errorf("unable to parse prebuilt tool configuration: %w", err)
|
||||||
|
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||||
|
return errMsg
|
||||||
|
}
|
||||||
|
allToolsFiles = append(allToolsFiles, parsed)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine if Custom Files should be loaded
|
||||||
|
// Check for explicit custom flags
|
||||||
|
isCustomConfigured := cmd.tools_file != "" || len(cmd.tools_files) > 0 || cmd.tools_folder != ""
|
||||||
|
|
||||||
|
// Determine if default 'tools.yaml' should be used (No prebuilt AND No custom flags)
|
||||||
|
useDefaultToolsFile := cmd.prebuiltConfig == "" && !isCustomConfigured
|
||||||
|
|
||||||
|
if useDefaultToolsFile {
|
||||||
|
cmd.tools_file = "tools.yaml"
|
||||||
|
isCustomConfigured = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load Custom Configurations
|
||||||
|
if isCustomConfigured {
|
||||||
|
// Enforce exclusivity among custom flags (tools-file vs tools-files vs tools-folder)
|
||||||
|
if (cmd.tools_file != "" && len(cmd.tools_files) > 0) ||
|
||||||
|
(cmd.tools_file != "" && cmd.tools_folder != "") ||
|
||||||
|
(len(cmd.tools_files) > 0 && cmd.tools_folder != "") {
|
||||||
|
errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously")
|
||||||
|
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||||
|
return errMsg
|
||||||
|
}
|
||||||
|
|
||||||
|
var customTools ToolsFile
|
||||||
|
var err error
|
||||||
|
|
||||||
|
if len(cmd.tools_files) > 0 {
|
||||||
|
// Use tools-files
|
||||||
|
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging %d tool configuration files", len(cmd.tools_files)))
|
||||||
|
customTools, err = loadAndMergeToolsFiles(ctx, cmd.tools_files)
|
||||||
|
} else if cmd.tools_folder != "" {
|
||||||
|
// Use tools-folder
|
||||||
|
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging all YAML files from directory: %s", cmd.tools_folder))
|
||||||
|
customTools, err = loadAndMergeToolsFolder(ctx, cmd.tools_folder)
|
||||||
|
} else {
|
||||||
|
// Use single file (tools-file or default `tools.yaml`)
|
||||||
|
buf, readFileErr := os.ReadFile(cmd.tools_file)
|
||||||
|
if readFileErr != nil {
|
||||||
|
errMsg := fmt.Errorf("unable to read tool file at %q: %w", cmd.tools_file, readFileErr)
|
||||||
|
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||||
|
return errMsg
|
||||||
|
}
|
||||||
|
customTools, err = parseToolsFile(ctx, buf)
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("unable to parse tool file at %q: %w", cmd.tools_file, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
cmd.logger.ErrorContext(ctx, err.Error())
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
allToolsFiles = append(allToolsFiles, customTools)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge Everything
|
||||||
|
// This will error if custom tools collide with prebuilt tools
|
||||||
|
finalToolsFile, err := mergeToolsFiles(allToolsFiles...)
|
||||||
|
if err != nil {
|
||||||
|
cmd.logger.ErrorContext(ctx, err.Error())
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cmd.cfg.SourceConfigs = finalToolsFile.Sources
|
||||||
|
cmd.cfg.AuthServiceConfigs = finalToolsFile.AuthServices
|
||||||
|
cmd.cfg.EmbeddingModelConfigs = finalToolsFile.EmbeddingModels
|
||||||
|
cmd.cfg.ToolConfigs = finalToolsFile.Tools
|
||||||
|
cmd.cfg.ToolsetConfigs = finalToolsFile.Toolsets
|
||||||
|
cmd.cfg.PromptConfigs = finalToolsFile.Prompts
|
||||||
|
|
||||||
|
authSourceConfigs := finalToolsFile.AuthSources
|
||||||
|
if authSourceConfigs != nil {
|
||||||
|
cmd.logger.WarnContext(ctx, "`authSources` is deprecated, use `authServices` instead")
|
||||||
|
|
||||||
|
for k, v := range authSourceConfigs {
|
||||||
|
if _, exists := cmd.cfg.AuthServiceConfigs[k]; exists {
|
||||||
|
errMsg := fmt.Errorf("resource conflict detected: authSource '%s' has the same name as an existing authService. Please rename your authSource", k)
|
||||||
|
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||||
|
return errMsg
|
||||||
|
}
|
||||||
|
cmd.cfg.AuthServiceConfigs[k] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
instrumentation, err := telemetry.CreateTelemetryInstrumentation(versionString)
|
||||||
|
if err != nil {
|
||||||
|
errMsg := fmt.Errorf("unable to create telemetry instrumentation: %w", err)
|
||||||
|
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||||
|
return errMsg
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx = util.WithInstrumentation(ctx, instrumentation)
|
||||||
|
|
||||||
// start server
|
// start server
|
||||||
s, err := server.NewServer(ctx, opts.Cfg)
|
s, err := server.NewServer(ctx, cmd.cfg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errMsg := fmt.Errorf("toolbox failed to initialize: %w", err)
|
errMsg := fmt.Errorf("toolbox failed to initialize: %w", err)
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||||
return errMsg
|
return errMsg
|
||||||
}
|
}
|
||||||
|
|
||||||
// run server in background
|
// run server in background
|
||||||
srvErr := make(chan error)
|
srvErr := make(chan error)
|
||||||
if opts.Cfg.Stdio {
|
if cmd.cfg.Stdio {
|
||||||
go func() {
|
go func() {
|
||||||
defer close(srvErr)
|
defer close(srvErr)
|
||||||
err = s.ServeStdio(ctx, opts.IOStreams.In, opts.IOStreams.Out)
|
err = s.ServeStdio(ctx, cmd.inStream, cmd.outStream)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
srvErr <- err
|
srvErr <- err
|
||||||
}
|
}
|
||||||
@@ -397,12 +1000,12 @@ func run(cmd *cobra.Command, opts *internal.ToolboxOptions) error {
|
|||||||
err = s.Listen(ctx)
|
err = s.Listen(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errMsg := fmt.Errorf("toolbox failed to start listener: %w", err)
|
errMsg := fmt.Errorf("toolbox failed to start listener: %w", err)
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||||
return errMsg
|
return errMsg
|
||||||
}
|
}
|
||||||
opts.Logger.InfoContext(ctx, "Server ready to serve!")
|
cmd.logger.InfoContext(ctx, "Server ready to serve!")
|
||||||
if opts.Cfg.UI {
|
if cmd.cfg.UI {
|
||||||
opts.Logger.InfoContext(ctx, fmt.Sprintf("Toolbox UI is up and running at: http://%s:%d/ui", opts.Cfg.Address, opts.Cfg.Port))
|
cmd.logger.InfoContext(ctx, fmt.Sprintf("Toolbox UI is up and running at: http://%s:%d/ui", cmd.cfg.Address, cmd.cfg.Port))
|
||||||
}
|
}
|
||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
@@ -414,8 +1017,8 @@ func run(cmd *cobra.Command, opts *internal.ToolboxOptions) error {
|
|||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
if isCustomConfigured && !opts.Cfg.DisableReload {
|
if isCustomConfigured && !cmd.cfg.DisableReload {
|
||||||
watchDirs, watchedFiles := resolveWatcherInputs(opts.ToolsFile, opts.ToolsFiles, opts.ToolsFolder)
|
watchDirs, watchedFiles := resolveWatcherInputs(cmd.tools_file, cmd.tools_files, cmd.tools_folder)
|
||||||
// start watching the file(s) or folder for changes to trigger dynamic reloading
|
// start watching the file(s) or folder for changes to trigger dynamic reloading
|
||||||
go watchChanges(ctx, watchDirs, watchedFiles, s)
|
go watchChanges(ctx, watchDirs, watchedFiles, s)
|
||||||
}
|
}
|
||||||
@@ -425,13 +1028,13 @@ func run(cmd *cobra.Command, opts *internal.ToolboxOptions) error {
|
|||||||
case err := <-srvErr:
|
case err := <-srvErr:
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errMsg := fmt.Errorf("toolbox crashed with the following error: %w", err)
|
errMsg := fmt.Errorf("toolbox crashed with the following error: %w", err)
|
||||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||||
return errMsg
|
return errMsg
|
||||||
}
|
}
|
||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
shutdownContext, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
shutdownContext, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
opts.Logger.WarnContext(shutdownContext, "Shutting down gracefully...")
|
cmd.logger.WarnContext(shutdownContext, "Shutting down gracefully...")
|
||||||
err := s.Shutdown(shutdownContext)
|
err := s.Shutdown(shutdownContext)
|
||||||
if err == context.DeadlineExceeded {
|
if err == context.DeadlineExceeded {
|
||||||
return fmt.Errorf("graceful shutdown timed out... forcing exit")
|
return fmt.Errorf("graceful shutdown timed out... forcing exit")
|
||||||
|
|||||||
1489
cmd/root_test.go
1489
cmd/root_test.go
File diff suppressed because it is too large
Load Diff
@@ -1 +1 @@
|
|||||||
0.26.0
|
0.25.0
|
||||||
|
|||||||
@@ -23,14 +23,6 @@ To connect to the database to explore and query data, search the MCP store for t
|
|||||||
|
|
||||||
In the Antigravity MCP Store, click the "Install" button.
|
In the Antigravity MCP Store, click the "Install" button.
|
||||||
|
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt alloydb-postgres-admin```.
|
|
||||||
|
|
||||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
|
|||||||
@@ -27,13 +27,6 @@ For AlloyDB infrastructure management, search the MCP store for the AlloyDB for
|
|||||||
## Install & Configuration
|
## Install & Configuration
|
||||||
|
|
||||||
1. In the Antigravity MCP Store, click the "Install" button.
|
1. In the Antigravity MCP Store, click the "Install" button.
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt alloydb-postgres```.
|
|
||||||
|
|
||||||
2. Add the required inputs for your [cluster](https://docs.cloud.google.com/alloydb/docs/cluster-list) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
2. Add the required inputs for your [cluster](https://docs.cloud.google.com/alloydb/docs/cluster-list) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||||
|
|
||||||
|
|||||||
@@ -21,13 +21,6 @@ An editor configured to use the BigQuery MCP server can use its AI capabilities
|
|||||||
## Install & Configuration
|
## Install & Configuration
|
||||||
|
|
||||||
1. In the Antigravity MCP Store, click the "Install" button.
|
1. In the Antigravity MCP Store, click the "Install" button.
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt bigquery```.
|
|
||||||
|
|
||||||
2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||||
|
|
||||||
|
|||||||
@@ -23,14 +23,6 @@ To connect to the database to explore and query data, search the MCP store for t
|
|||||||
|
|
||||||
In the Antigravity MCP Store, click the "Install" button.
|
In the Antigravity MCP Store, click the "Install" button.
|
||||||
|
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mssql-admin```.
|
|
||||||
|
|
||||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
|
|||||||
@@ -24,13 +24,6 @@ For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL
|
|||||||
## Install & Configuration
|
## Install & Configuration
|
||||||
|
|
||||||
1. In the Antigravity MCP Store, click the "Install" button.
|
1. In the Antigravity MCP Store, click the "Install" button.
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mssql```.
|
|
||||||
|
|
||||||
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/sqlserver/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/sqlserver/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||||
|
|
||||||
|
|||||||
@@ -23,14 +23,6 @@ To connect to the database to explore and query data, search the MCP store for t
|
|||||||
|
|
||||||
In the Antigravity MCP Store, click the "Install" button.
|
In the Antigravity MCP Store, click the "Install" button.
|
||||||
|
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mysql-admin```.
|
|
||||||
|
|
||||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
|
|||||||
@@ -26,13 +26,6 @@ For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL
|
|||||||
## Install & Configuration
|
## Install & Configuration
|
||||||
|
|
||||||
1. In the Antigravity MCP Store, click the "Install" button.
|
1. In the Antigravity MCP Store, click the "Install" button.
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mysql```.
|
|
||||||
|
|
||||||
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/mysql/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/mysql/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||||
|
|
||||||
|
|||||||
@@ -23,14 +23,6 @@ To connect to the database to explore and query data, search the MCP store for t
|
|||||||
|
|
||||||
In the Antigravity MCP Store, click the "Install" button.
|
In the Antigravity MCP Store, click the "Install" button.
|
||||||
|
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-postgres-admin```.
|
|
||||||
|
|
||||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
|
|||||||
@@ -26,13 +26,6 @@ For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL
|
|||||||
## Install & Configuration
|
## Install & Configuration
|
||||||
|
|
||||||
1. In the Antigravity MCP Store, click the "Install" button.
|
1. In the Antigravity MCP Store, click the "Install" button.
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-postgres```.
|
|
||||||
|
|
||||||
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/postgres/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/postgres/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||||
|
|
||||||
|
|||||||
@@ -20,13 +20,6 @@ An editor configured to use the Dataplex MCP server can use its AI capabilities
|
|||||||
## Install & Configuration
|
## Install & Configuration
|
||||||
|
|
||||||
1. In the Antigravity MCP Store, click the "Install" button.
|
1. In the Antigravity MCP Store, click the "Install" button.
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt dataplex```.
|
|
||||||
|
|
||||||
2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||||
|
|
||||||
|
|||||||
@@ -21,13 +21,6 @@ An editor configured to use the Looker MCP server can use its AI capabilities to
|
|||||||
## Install & Configuration
|
## Install & Configuration
|
||||||
|
|
||||||
1. In the Antigravity MCP Store, click the "Install" button.
|
1. In the Antigravity MCP Store, click the "Install" button.
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt looker```.
|
|
||||||
|
|
||||||
2. Add the required inputs for your [instance](https://docs.cloud.google.com/looker/docs/set-up-and-administer-looker) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
2. Add the required inputs for your [instance](https://docs.cloud.google.com/looker/docs/set-up-and-administer-looker) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||||
|
|
||||||
|
|||||||
@@ -21,13 +21,6 @@ An editor configured to use the Cloud Spanner MCP server can use its AI capabili
|
|||||||
## Install & Configuration
|
## Install & Configuration
|
||||||
|
|
||||||
1. In the Antigravity MCP Store, click the "Install" button.
|
1. In the Antigravity MCP Store, click the "Install" button.
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt spanner```.
|
|
||||||
|
|
||||||
2. Add the required inputs for your [instance](https://docs.cloud.google.com/spanner/docs/instances) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
2. Add the required inputs for your [instance](https://docs.cloud.google.com/spanner/docs/instances) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||||
|
|
||||||
|
|||||||
@@ -12,17 +12,10 @@ The MCP Toolbox for Databases Server gives AI-powered development tools the abil
|
|||||||
## Install & Configuration
|
## Install & Configuration
|
||||||
|
|
||||||
1. In the Antigravity MCP Store, click the **Install** button. A configuration window will appear.
|
1. In the Antigravity MCP Store, click the **Install** button. A configuration window will appear.
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest```.
|
|
||||||
|
|
||||||
3. Create your [`tools.yaml` configuration file](https://googleapis.github.io/genai-toolbox/getting-started/configure/).
|
2. Create your [`tools.yaml` configuration file](https://googleapis.github.io/genai-toolbox/getting-started/configure/).
|
||||||
|
|
||||||
4. In the configuration window, enter the full absolute path to your `tools.yaml` file and click **Save**.
|
3. In the configuration window, enter the full absolute path to your `tools.yaml` file and click **Save**.
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ most popular issues, so make sure to +1 ones you are the most interested in.
|
|||||||
## Can Toolbox be used for non-database tools?
|
## Can Toolbox be used for non-database tools?
|
||||||
|
|
||||||
**Yes!** While Toolbox is primarily focused on databases, it also supports generic
|
**Yes!** While Toolbox is primarily focused on databases, it also supports generic
|
||||||
**HTTP tools** (`type: http`). These allow you to connect your agents to REST APIs
|
**HTTP tools** (`kind: http`). These allow you to connect your agents to REST APIs
|
||||||
and other web services, enabling workflows that extend beyond database interactions.
|
and other web services, enabling workflows that extend beyond database interactions.
|
||||||
|
|
||||||
For configuration details, see the [HTTP Tools documentation](../resources/tools/http/http.md).
|
For configuration details, see the [HTTP Tools documentation](../resources/tools/http/http.md).
|
||||||
|
|||||||
@@ -64,7 +64,7 @@ The structured logging outputs log as JSON:
|
|||||||
"timestamp":"2024-11-04T16:45:11.987299-08:00",
|
"timestamp":"2024-11-04T16:45:11.987299-08:00",
|
||||||
"severity":"ERROR",
|
"severity":"ERROR",
|
||||||
"logging.googleapis.com/sourceLocation":{...},
|
"logging.googleapis.com/sourceLocation":{...},
|
||||||
"message":"unable to parse tool file at \"tools.yaml\": \"cloud-sql-postgres1\" is not a valid type of data source"
|
"message":"unable to parse tool file at \"tools.yaml\": \"cloud-sql-postgres1\" is not a valid kind of data source"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@@ -234,7 +234,7 @@
|
|||||||
},
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"version = \"0.26.0\" # x-release-please-version\n",
|
"version = \"0.25.0\" # x-release-please-version\n",
|
||||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# Make the binary executable\n",
|
"# Make the binary executable\n",
|
||||||
@@ -300,18 +300,17 @@
|
|||||||
"# You can also upload a tools file and use that to run toolbox.\n",
|
"# You can also upload a tools file and use that to run toolbox.\n",
|
||||||
"tools_file_name = \"tools.yml\"\n",
|
"tools_file_name = \"tools.yml\"\n",
|
||||||
"file_content = f\"\"\"\n",
|
"file_content = f\"\"\"\n",
|
||||||
"kind: sources\n",
|
"sources:\n",
|
||||||
"name: my-pg-source\n",
|
" my-pg-source:\n",
|
||||||
"type: postgres\n",
|
" kind: postgres\n",
|
||||||
" host: 127.0.0.1\n",
|
" host: 127.0.0.1\n",
|
||||||
" port: 5432\n",
|
" port: 5432\n",
|
||||||
" database: toolbox_db\n",
|
" database: toolbox_db\n",
|
||||||
" user: toolbox_user\n",
|
" user: toolbox_user\n",
|
||||||
" password: my-password\n",
|
" password: my-password\n",
|
||||||
"---\n",
|
"tools:\n",
|
||||||
"kind: tools\n",
|
" search-hotels-by-name:\n",
|
||||||
"name: search-hotels-by-name\n",
|
" kind: postgres-sql\n",
|
||||||
"type: postgres-sql\n",
|
|
||||||
" source: my-pg-source\n",
|
" source: my-pg-source\n",
|
||||||
" description: Search for hotels based on name.\n",
|
" description: Search for hotels based on name.\n",
|
||||||
" parameters:\n",
|
" parameters:\n",
|
||||||
@@ -319,10 +318,8 @@
|
|||||||
" type: string\n",
|
" type: string\n",
|
||||||
" description: The name of the hotel.\n",
|
" description: The name of the hotel.\n",
|
||||||
" statement: SELECT * FROM hotels WHERE name ILIKE '%' || \\$1 || '%';\n",
|
" statement: SELECT * FROM hotels WHERE name ILIKE '%' || \\$1 || '%';\n",
|
||||||
"---\n",
|
" search-hotels-by-location:\n",
|
||||||
"kind: tools\n",
|
" kind: postgres-sql\n",
|
||||||
"name: search-hotels-by-location\n",
|
|
||||||
"type: postgres-sql\n",
|
|
||||||
" source: my-pg-source\n",
|
" source: my-pg-source\n",
|
||||||
" description: Search for hotels based on location.\n",
|
" description: Search for hotels based on location.\n",
|
||||||
" parameters:\n",
|
" parameters:\n",
|
||||||
@@ -330,10 +327,8 @@
|
|||||||
" type: string\n",
|
" type: string\n",
|
||||||
" description: The location of the hotel.\n",
|
" description: The location of the hotel.\n",
|
||||||
" statement: SELECT * FROM hotels WHERE location ILIKE '%' || \\$1 || '%';\n",
|
" statement: SELECT * FROM hotels WHERE location ILIKE '%' || \\$1 || '%';\n",
|
||||||
"---\n",
|
" book-hotel:\n",
|
||||||
"kind: tools\n",
|
" kind: postgres-sql\n",
|
||||||
"name: book-hotel\n",
|
|
||||||
"type: postgres-sql\n",
|
|
||||||
" source: my-pg-source\n",
|
" source: my-pg-source\n",
|
||||||
" description: >-\n",
|
" description: >-\n",
|
||||||
" Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.\n",
|
" Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.\n",
|
||||||
@@ -342,10 +337,8 @@
|
|||||||
" type: string\n",
|
" type: string\n",
|
||||||
" description: The ID of the hotel to book.\n",
|
" description: The ID of the hotel to book.\n",
|
||||||
" statement: UPDATE hotels SET booked = B'1' WHERE id = \\$1;\n",
|
" statement: UPDATE hotels SET booked = B'1' WHERE id = \\$1;\n",
|
||||||
"---\n",
|
" update-hotel:\n",
|
||||||
"kind: tools\n",
|
" kind: postgres-sql\n",
|
||||||
"name: update-hotel\n",
|
|
||||||
"type: postgres-sql\n",
|
|
||||||
" source: my-pg-source\n",
|
" source: my-pg-source\n",
|
||||||
" description: >-\n",
|
" description: >-\n",
|
||||||
" Update a hotel's check-in and check-out dates by its ID. Returns a message\n",
|
" Update a hotel's check-in and check-out dates by its ID. Returns a message\n",
|
||||||
@@ -363,10 +356,8 @@
|
|||||||
" statement: >-\n",
|
" statement: >-\n",
|
||||||
" UPDATE hotels SET checkin_date = CAST(\\$2 as date), checkout_date = CAST(\\$3\n",
|
" UPDATE hotels SET checkin_date = CAST(\\$2 as date), checkout_date = CAST(\\$3\n",
|
||||||
" as date) WHERE id = \\$1;\n",
|
" as date) WHERE id = \\$1;\n",
|
||||||
"---\n",
|
" cancel-hotel:\n",
|
||||||
"kind: tools\n",
|
" kind: postgres-sql\n",
|
||||||
"name: cancel-hotel\n",
|
|
||||||
"type: postgres-sql\n",
|
|
||||||
" source: my-pg-source\n",
|
" source: my-pg-source\n",
|
||||||
" description: Cancel a hotel by its ID.\n",
|
" description: Cancel a hotel by its ID.\n",
|
||||||
" parameters:\n",
|
" parameters:\n",
|
||||||
@@ -374,10 +365,8 @@
|
|||||||
" type: string\n",
|
" type: string\n",
|
||||||
" description: The ID of the hotel to cancel.\n",
|
" description: The ID of the hotel to cancel.\n",
|
||||||
" statement: UPDATE hotels SET booked = B'0' WHERE id = \\$1;\n",
|
" statement: UPDATE hotels SET booked = B'0' WHERE id = \\$1;\n",
|
||||||
"---\n",
|
"toolsets:\n",
|
||||||
"kind: toolsets\n",
|
" my-toolset:\n",
|
||||||
"name: my-toolset\n",
|
|
||||||
"tools:\n",
|
|
||||||
" - search-hotels-by-name\n",
|
" - search-hotels-by-name\n",
|
||||||
" - search-hotels-by-location\n",
|
" - search-hotels-by-location\n",
|
||||||
" - book-hotel\n",
|
" - book-hotel\n",
|
||||||
@@ -520,7 +509,8 @@
|
|||||||
},
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"! pip install google-adk[toolbox] --quiet"
|
"! pip install toolbox-core --quiet\n",
|
||||||
|
"! pip install google-adk --quiet"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -535,18 +525,14 @@
|
|||||||
"from google.adk.runners import Runner\n",
|
"from google.adk.runners import Runner\n",
|
||||||
"from google.adk.sessions import InMemorySessionService\n",
|
"from google.adk.sessions import InMemorySessionService\n",
|
||||||
"from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService\n",
|
"from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService\n",
|
||||||
"from google.adk.tools.toolbox_toolset import ToolboxToolset\n",
|
|
||||||
"from google.genai import types\n",
|
"from google.genai import types\n",
|
||||||
|
"from toolbox_core import ToolboxSyncClient\n",
|
||||||
"\n",
|
"\n",
|
||||||
"import os\n",
|
"import os\n",
|
||||||
"# TODO(developer): replace this with your Google API key\n",
|
"# TODO(developer): replace this with your Google API key\n",
|
||||||
"os.environ['GOOGLE_API_KEY'] = \"<GOOGLE_API_KEY>\"\n",
|
"os.environ['GOOGLE_API_KEY'] = \"<GOOGLE_API_KEY>\"\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# Configure toolset\n",
|
"toolbox_client = ToolboxSyncClient(\"http://127.0.0.1:5000\")\n",
|
||||||
"toolset = ToolboxToolset(\n",
|
|
||||||
" server_url=\"http://127.0.0.1:5000\",\n",
|
|
||||||
" toolset_name=\"my-toolset\"\n",
|
|
||||||
")\n",
|
|
||||||
"\n",
|
"\n",
|
||||||
"prompt = \"\"\"\n",
|
"prompt = \"\"\"\n",
|
||||||
" You're a helpful hotel assistant. You handle hotel searching, booking and\n",
|
" You're a helpful hotel assistant. You handle hotel searching, booking and\n",
|
||||||
@@ -563,7 +549,7 @@
|
|||||||
" name='hotel_agent',\n",
|
" name='hotel_agent',\n",
|
||||||
" description='A helpful AI assistant.',\n",
|
" description='A helpful AI assistant.',\n",
|
||||||
" instruction=prompt,\n",
|
" instruction=prompt,\n",
|
||||||
" tools=[toolset],\n",
|
" tools=toolbox_client.load_toolset(\"my-toolset\"),\n",
|
||||||
")\n",
|
")\n",
|
||||||
"\n",
|
"\n",
|
||||||
"session_service = InMemorySessionService()\n",
|
"session_service = InMemorySessionService()\n",
|
||||||
|
|||||||
@@ -36,9 +36,9 @@ Toolbox should have access to. Most tools will have at least one source to
|
|||||||
execute against.
|
execute against.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: sources
|
sources:
|
||||||
name: my-pg-source
|
my-pg-source:
|
||||||
type: postgres
|
kind: postgres
|
||||||
host: 127.0.0.1
|
host: 127.0.0.1
|
||||||
port: 5432
|
port: 5432
|
||||||
database: toolbox_db
|
database: toolbox_db
|
||||||
@@ -52,13 +52,13 @@ For more details on configuring different types of sources, see the
|
|||||||
### Tools
|
### Tools
|
||||||
|
|
||||||
The `tools` section of your `tools.yaml` defines the actions your agent can
|
The `tools` section of your `tools.yaml` defines the actions your agent can
|
||||||
take: what type of tool it is, which source(s) it affects, what parameters it
|
take: what kind of tool it is, which source(s) it affects, what parameters it
|
||||||
uses, etc.
|
uses, etc.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: tools
|
tools:
|
||||||
name: search-hotels-by-name
|
search-hotels-by-name:
|
||||||
type: postgres-sql
|
kind: postgres-sql
|
||||||
source: my-pg-source
|
source: my-pg-source
|
||||||
description: Search for hotels based on name.
|
description: Search for hotels based on name.
|
||||||
parameters:
|
parameters:
|
||||||
@@ -78,15 +78,11 @@ that you want to be able to load together. This can be useful for defining
|
|||||||
different sets for different agents or different applications.
|
different sets for different agents or different applications.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: toolsets
|
toolsets:
|
||||||
name: my_first_toolset
|
my_first_toolset:
|
||||||
tools:
|
|
||||||
- my_first_tool
|
- my_first_tool
|
||||||
- my_second_tool
|
- my_second_tool
|
||||||
---
|
my_second_toolset:
|
||||||
kind: toolsets
|
|
||||||
name: my_second_toolset
|
|
||||||
tools:
|
|
||||||
- my_second_tool
|
- my_second_tool
|
||||||
- my_third_tool
|
- my_third_tool
|
||||||
```
|
```
|
||||||
@@ -107,8 +103,8 @@ The `prompts` section of your `tools.yaml` defines the templates containing
|
|||||||
structured messages and instructions for interacting with language models.
|
structured messages and instructions for interacting with language models.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: prompts
|
prompts:
|
||||||
name: code_review
|
code_review:
|
||||||
description: "Asks the LLM to analyze code quality and suggest improvements."
|
description: "Asks the LLM to analyze code quality and suggest improvements."
|
||||||
messages:
|
messages:
|
||||||
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
||||||
|
|||||||
@@ -16,12 +16,6 @@ Databases” as its initial development predated MCP, but was renamed to align
|
|||||||
with recently added MCP compatibility.
|
with recently added MCP compatibility.
|
||||||
{{< /notice >}}
|
{{< /notice >}}
|
||||||
|
|
||||||
{{< notice note >}}
|
|
||||||
This document has been updated to support the configuration file v2 format. To
|
|
||||||
view documentation with configuration file v1 format, please navigate to the
|
|
||||||
top-right menu and select versions v0.26.0 or older.
|
|
||||||
{{< /notice >}}
|
|
||||||
|
|
||||||
## Why Toolbox?
|
## Why Toolbox?
|
||||||
|
|
||||||
Toolbox helps you build Gen AI tools that let your agents access data in your
|
Toolbox helps you build Gen AI tools that let your agents access data in your
|
||||||
@@ -77,7 +71,7 @@ redeploying your application.
|
|||||||
|
|
||||||
## Getting Started
|
## Getting Started
|
||||||
|
|
||||||
### Quickstart: Running Toolbox using NPX
|
### (Non-production) Running Toolbox
|
||||||
|
|
||||||
You can run Toolbox directly with a [configuration file](../configure.md):
|
You can run Toolbox directly with a [configuration file](../configure.md):
|
||||||
|
|
||||||
@@ -109,7 +103,7 @@ To install Toolbox as a binary on Linux (AMD64):
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# see releases page for other versions
|
# see releases page for other versions
|
||||||
export VERSION=0.26.0
|
export VERSION=0.25.0
|
||||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||||
chmod +x toolbox
|
chmod +x toolbox
|
||||||
```
|
```
|
||||||
@@ -120,7 +114,7 @@ To install Toolbox as a binary on macOS (Apple Silicon):
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# see releases page for other versions
|
# see releases page for other versions
|
||||||
export VERSION=0.26.0
|
export VERSION=0.25.0
|
||||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||||
chmod +x toolbox
|
chmod +x toolbox
|
||||||
```
|
```
|
||||||
@@ -131,7 +125,7 @@ To install Toolbox as a binary on macOS (Intel):
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# see releases page for other versions
|
# see releases page for other versions
|
||||||
export VERSION=0.26.0
|
export VERSION=0.25.0
|
||||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||||
chmod +x toolbox
|
chmod +x toolbox
|
||||||
```
|
```
|
||||||
@@ -142,7 +136,7 @@ To install Toolbox as a binary on Windows (Command Prompt):
|
|||||||
|
|
||||||
```cmd
|
```cmd
|
||||||
:: see releases page for other versions
|
:: see releases page for other versions
|
||||||
set VERSION=0.26.0
|
set VERSION=0.25.0
|
||||||
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -152,7 +146,7 @@ To install Toolbox as a binary on Windows (PowerShell):
|
|||||||
|
|
||||||
```powershell
|
```powershell
|
||||||
# see releases page for other versions
|
# see releases page for other versions
|
||||||
$VERSION = "0.26.0"
|
$VERSION = "0.25.0"
|
||||||
curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -164,7 +158,7 @@ You can also install Toolbox as a container:
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# see releases page for other versions
|
# see releases page for other versions
|
||||||
export VERSION=0.26.0
|
export VERSION=0.25.0
|
||||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -183,7 +177,7 @@ To install from source, ensure you have the latest version of
|
|||||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
go install github.com/googleapis/genai-toolbox@v0.26.0
|
go install github.com/googleapis/genai-toolbox@v0.25.0
|
||||||
```
|
```
|
||||||
|
|
||||||
{{% /tab %}}
|
{{% /tab %}}
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ runtime](https://research.google.com/colaboratory/local-runtimes.html).
|
|||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="ADK" lang="bash" >}}
|
{{< tab header="ADK" lang="bash" >}}
|
||||||
|
|
||||||
pip install google-adk[toolbox]
|
pip install toolbox-core
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< tab header="Langchain" lang="bash" >}}
|
{{< tab header="Langchain" lang="bash" >}}
|
||||||
|
|
||||||
@@ -73,7 +73,7 @@ pip install toolbox-core
|
|||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="ADK" lang="bash" >}}
|
{{< tab header="ADK" lang="bash" >}}
|
||||||
|
|
||||||
# No other dependencies required for ADK
|
pip install google-adk
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< tab header="Langchain" lang="bash" >}}
|
{{< tab header="Langchain" lang="bash" >}}
|
||||||
|
|
||||||
@@ -115,7 +115,7 @@ pip install google-genai
|
|||||||
|
|
||||||
1. Update `my_agent/agent.py` with the following content to connect to Toolbox:
|
1. Update `my_agent/agent.py` with the following content to connect to Toolbox:
|
||||||
```py
|
```py
|
||||||
{{< regionInclude "quickstart/python/adk/quickstart.py" "quickstart" >}}
|
{{< include "quickstart/python/adk/quickstart.py" >}}
|
||||||
```
|
```
|
||||||
<br/>
|
<br/>
|
||||||
|
|
||||||
|
|||||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
```bash
|
```bash
|
||||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|
||||||
@@ -125,18 +125,17 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
{{< /notice >}}
|
{{< /notice >}}
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: sources
|
sources:
|
||||||
name: my-pg-source
|
my-pg-source:
|
||||||
type: postgres
|
kind: postgres
|
||||||
host: 127.0.0.1
|
host: 127.0.0.1
|
||||||
port: 5432
|
port: 5432
|
||||||
database: toolbox_db
|
database: toolbox_db
|
||||||
user: toolbox_user
|
user: toolbox_user
|
||||||
password: my-password
|
password: my-password
|
||||||
---
|
tools:
|
||||||
kind: tools
|
search-hotels-by-name:
|
||||||
name: search-hotels-by-name
|
kind: postgres-sql
|
||||||
type: postgres-sql
|
|
||||||
source: my-pg-source
|
source: my-pg-source
|
||||||
description: Search for hotels based on name.
|
description: Search for hotels based on name.
|
||||||
parameters:
|
parameters:
|
||||||
@@ -144,10 +143,8 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
type: string
|
type: string
|
||||||
description: The name of the hotel.
|
description: The name of the hotel.
|
||||||
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
|
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
|
||||||
---
|
search-hotels-by-location:
|
||||||
kind: tools
|
kind: postgres-sql
|
||||||
name: search-hotels-by-location
|
|
||||||
type: postgres-sql
|
|
||||||
source: my-pg-source
|
source: my-pg-source
|
||||||
description: Search for hotels based on location.
|
description: Search for hotels based on location.
|
||||||
parameters:
|
parameters:
|
||||||
@@ -155,10 +152,8 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
type: string
|
type: string
|
||||||
description: The location of the hotel.
|
description: The location of the hotel.
|
||||||
statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%';
|
statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%';
|
||||||
---
|
book-hotel:
|
||||||
kind: tools
|
kind: postgres-sql
|
||||||
name: book-hotel
|
|
||||||
type: postgres-sql
|
|
||||||
source: my-pg-source
|
source: my-pg-source
|
||||||
description: >-
|
description: >-
|
||||||
Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
|
Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
|
||||||
@@ -167,10 +162,8 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
type: string
|
type: string
|
||||||
description: The ID of the hotel to book.
|
description: The ID of the hotel to book.
|
||||||
statement: UPDATE hotels SET booked = B'1' WHERE id = $1;
|
statement: UPDATE hotels SET booked = B'1' WHERE id = $1;
|
||||||
---
|
update-hotel:
|
||||||
kind: tools
|
kind: postgres-sql
|
||||||
name: update-hotel
|
|
||||||
type: postgres-sql
|
|
||||||
source: my-pg-source
|
source: my-pg-source
|
||||||
description: >-
|
description: >-
|
||||||
Update a hotel's check-in and check-out dates by its ID. Returns a message
|
Update a hotel's check-in and check-out dates by its ID. Returns a message
|
||||||
@@ -188,10 +181,8 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
statement: >-
|
statement: >-
|
||||||
UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3
|
UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3
|
||||||
as date) WHERE id = $1;
|
as date) WHERE id = $1;
|
||||||
---
|
cancel-hotel:
|
||||||
kind: tools
|
kind: postgres-sql
|
||||||
name: cancel-hotel
|
|
||||||
type: postgres-sql
|
|
||||||
source: my-pg-source
|
source: my-pg-source
|
||||||
description: Cancel a hotel by its ID.
|
description: Cancel a hotel by its ID.
|
||||||
parameters:
|
parameters:
|
||||||
@@ -199,10 +190,8 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
type: string
|
type: string
|
||||||
description: The ID of the hotel to cancel.
|
description: The ID of the hotel to cancel.
|
||||||
statement: UPDATE hotels SET booked = B'0' WHERE id = $1;
|
statement: UPDATE hotels SET booked = B'0' WHERE id = $1;
|
||||||
---
|
toolsets:
|
||||||
kind: toolsets
|
my-toolset:
|
||||||
name: my-toolset
|
|
||||||
tools:
|
|
||||||
- search-hotels-by-name
|
- search-hotels-by-name
|
||||||
- search-hotels-by-location
|
- search-hotels-by-location
|
||||||
- book-hotel
|
- book-hotel
|
||||||
|
|||||||
@@ -157,18 +157,17 @@ Create a file named `tools.yaml`. This file defines the database connection, the
|
|||||||
SQL tools available, and the prompts the agents will use.
|
SQL tools available, and the prompts the agents will use.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: sources
|
sources:
|
||||||
name: my-foodiefind-db
|
my-foodiefind-db:
|
||||||
type: postgres
|
kind: postgres
|
||||||
host: 127.0.0.1
|
host: 127.0.0.1
|
||||||
port: 5432
|
port: 5432
|
||||||
database: toolbox_db
|
database: toolbox_db
|
||||||
user: toolbox_user
|
user: toolbox_user
|
||||||
password: my-password
|
password: my-password
|
||||||
---
|
tools:
|
||||||
kind: tools
|
find_user_by_email:
|
||||||
name: find_user_by_email
|
kind: postgres-sql
|
||||||
type: postgres-sql
|
|
||||||
source: my-foodiefind-db
|
source: my-foodiefind-db
|
||||||
description: Find a user's ID by their email address.
|
description: Find a user's ID by their email address.
|
||||||
parameters:
|
parameters:
|
||||||
@@ -176,10 +175,8 @@ parameters:
|
|||||||
type: string
|
type: string
|
||||||
description: The email address of the user to find.
|
description: The email address of the user to find.
|
||||||
statement: SELECT id FROM users WHERE email = $1;
|
statement: SELECT id FROM users WHERE email = $1;
|
||||||
---
|
find_restaurant_by_name:
|
||||||
kind: tools
|
kind: postgres-sql
|
||||||
name: find_restaurant_by_name
|
|
||||||
type: postgres-sql
|
|
||||||
source: my-foodiefind-db
|
source: my-foodiefind-db
|
||||||
description: Find a restaurant's ID by its exact name.
|
description: Find a restaurant's ID by its exact name.
|
||||||
parameters:
|
parameters:
|
||||||
@@ -187,10 +184,8 @@ parameters:
|
|||||||
type: string
|
type: string
|
||||||
description: The name of the restaurant to find.
|
description: The name of the restaurant to find.
|
||||||
statement: SELECT id FROM restaurants WHERE name = $1;
|
statement: SELECT id FROM restaurants WHERE name = $1;
|
||||||
---
|
find_review_by_user_and_restaurant:
|
||||||
kind: tools
|
kind: postgres-sql
|
||||||
name: find_review_by_user_and_restaurant
|
|
||||||
type: postgres-sql
|
|
||||||
source: my-foodiefind-db
|
source: my-foodiefind-db
|
||||||
description: Find the full record for a specific review using the user's ID and the restaurant's ID.
|
description: Find the full record for a specific review using the user's ID and the restaurant's ID.
|
||||||
parameters:
|
parameters:
|
||||||
@@ -201,9 +196,8 @@ parameters:
|
|||||||
type: integer
|
type: integer
|
||||||
description: The numerical ID of the restaurant.
|
description: The numerical ID of the restaurant.
|
||||||
statement: SELECT * FROM reviews WHERE user_id = $1 AND restaurant_id = $2;
|
statement: SELECT * FROM reviews WHERE user_id = $1 AND restaurant_id = $2;
|
||||||
---
|
prompts:
|
||||||
kind: prompts
|
investigate_missing_review:
|
||||||
name: investigate_missing_review
|
|
||||||
description: "Investigates a user's missing review by finding the user, restaurant, and the review itself, then analyzing its status."
|
description: "Investigates a user's missing review by finding the user, restaurant, and the review itself, then analyzing its status."
|
||||||
arguments:
|
arguments:
|
||||||
- name: "user_email"
|
- name: "user_email"
|
||||||
|
|||||||
@@ -18,7 +18,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-5.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-5.0.2.tgz",
|
||||||
"integrity": "sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg==",
|
"integrity": "sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg==",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"arrify": "^2.0.0",
|
"arrify": "^2.0.0",
|
||||||
"extend": "^3.0.2"
|
"extend": "^3.0.2"
|
||||||
@@ -32,7 +31,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz",
|
||||||
"integrity": "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==",
|
"integrity": "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"peer": true,
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=14.0.0"
|
"node": ">=14.0.0"
|
||||||
}
|
}
|
||||||
@@ -42,17 +40,15 @@
|
|||||||
"resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.0.0.tgz",
|
||||||
"integrity": "sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g==",
|
"integrity": "sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g==",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"peer": true,
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=14"
|
"node": ">=14"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@google-cloud/storage": {
|
"node_modules/@google-cloud/storage": {
|
||||||
"version": "7.19.0",
|
"version": "7.18.0",
|
||||||
"resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.19.0.tgz",
|
"resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.18.0.tgz",
|
||||||
"integrity": "sha512-n2FjE7NAOYyshogdc7KQOl/VZb4sneqPjWouSyia9CMDdMhRX5+RIbqalNmC7LOLzuLAN89VlF2HvG8na9G+zQ==",
|
"integrity": "sha512-r3ZwDMiz4nwW6R922Z1pwpePxyRwE5GdevYX63hRmAQUkUQJcBH/79EnQPDv5cOv1mFBgevdNWQfi3tie3dHrQ==",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@google-cloud/paginator": "^5.0.0",
|
"@google-cloud/paginator": "^5.0.0",
|
||||||
"@google-cloud/projectify": "^4.0.0",
|
"@google-cloud/projectify": "^4.0.0",
|
||||||
@@ -60,7 +56,7 @@
|
|||||||
"abort-controller": "^3.0.0",
|
"abort-controller": "^3.0.0",
|
||||||
"async-retry": "^1.3.3",
|
"async-retry": "^1.3.3",
|
||||||
"duplexify": "^4.1.3",
|
"duplexify": "^4.1.3",
|
||||||
"fast-xml-parser": "^5.3.4",
|
"fast-xml-parser": "^4.4.1",
|
||||||
"gaxios": "^6.0.2",
|
"gaxios": "^6.0.2",
|
||||||
"google-auth-library": "^9.6.3",
|
"google-auth-library": "^9.6.3",
|
||||||
"html-entities": "^2.5.2",
|
"html-entities": "^2.5.2",
|
||||||
@@ -79,7 +75,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
|
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
|
||||||
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
|
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"bin": {
|
"bin": {
|
||||||
"uuid": "dist/bin/uuid"
|
"uuid": "dist/bin/uuid"
|
||||||
}
|
}
|
||||||
@@ -102,6 +97,7 @@
|
|||||||
"resolved": "https://registry.npmjs.org/@google/genai/-/genai-1.14.0.tgz",
|
"resolved": "https://registry.npmjs.org/@google/genai/-/genai-1.14.0.tgz",
|
||||||
"integrity": "sha512-jirYprAAJU1svjwSDVCzyVq+FrJpJd5CSxR/g2Ga/gZ0ZYZpcWjMS75KJl9y71K1mDN+tcx6s21CzCbB2R840g==",
|
"integrity": "sha512-jirYprAAJU1svjwSDVCzyVq+FrJpJd5CSxR/g2Ga/gZ0ZYZpcWjMS75KJl9y71K1mDN+tcx6s21CzCbB2R840g==",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"google-auth-library": "^9.14.2",
|
"google-auth-library": "^9.14.2",
|
||||||
"ws": "^8.18.0"
|
"ws": "^8.18.0"
|
||||||
@@ -140,6 +136,7 @@
|
|||||||
"resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.17.5.tgz",
|
"resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.17.5.tgz",
|
||||||
"integrity": "sha512-QakrKIGniGuRVfWBdMsDea/dx1PNE739QJ7gCM41s9q+qaCYTHCdsIBXQVVXry3mfWAiaM9kT22Hyz53Uw8mfg==",
|
"integrity": "sha512-QakrKIGniGuRVfWBdMsDea/dx1PNE739QJ7gCM41s9q+qaCYTHCdsIBXQVVXry3mfWAiaM9kT22Hyz53Uw8mfg==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"ajv": "^6.12.6",
|
"ajv": "^6.12.6",
|
||||||
"content-type": "^1.0.5",
|
"content-type": "^1.0.5",
|
||||||
@@ -302,7 +299,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz",
|
||||||
"integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==",
|
"integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
@@ -311,15 +307,13 @@
|
|||||||
"version": "0.12.5",
|
"version": "0.12.5",
|
||||||
"resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz",
|
"resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz",
|
||||||
"integrity": "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==",
|
"integrity": "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==",
|
||||||
"license": "MIT",
|
"license": "MIT"
|
||||||
"peer": true
|
|
||||||
},
|
},
|
||||||
"node_modules/@types/node": {
|
"node_modules/@types/node": {
|
||||||
"version": "24.10.1",
|
"version": "24.10.1",
|
||||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.1.tgz",
|
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.1.tgz",
|
||||||
"integrity": "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==",
|
"integrity": "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"undici-types": "~7.16.0"
|
"undici-types": "~7.16.0"
|
||||||
}
|
}
|
||||||
@@ -329,7 +323,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.13.tgz",
|
"resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.13.tgz",
|
||||||
"integrity": "sha512-FGJ6udDNUCjd19pp0Q3iTiDkwhYup7J8hpMW9c4k53NrccQFFWKRho6hvtPPEhnXWKvukfwAlB6DbDz4yhH5Gg==",
|
"integrity": "sha512-FGJ6udDNUCjd19pp0Q3iTiDkwhYup7J8hpMW9c4k53NrccQFFWKRho6hvtPPEhnXWKvukfwAlB6DbDz4yhH5Gg==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/caseless": "*",
|
"@types/caseless": "*",
|
||||||
"@types/node": "*",
|
"@types/node": "*",
|
||||||
@@ -342,7 +335,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.5.tgz",
|
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.5.tgz",
|
||||||
"integrity": "sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A==",
|
"integrity": "sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"asynckit": "^0.4.0",
|
"asynckit": "^0.4.0",
|
||||||
"combined-stream": "^1.0.8",
|
"combined-stream": "^1.0.8",
|
||||||
@@ -360,7 +352,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
||||||
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 0.6"
|
"node": ">= 0.6"
|
||||||
}
|
}
|
||||||
@@ -370,7 +361,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
||||||
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"mime-db": "1.52.0"
|
"mime-db": "1.52.0"
|
||||||
},
|
},
|
||||||
@@ -382,15 +372,13 @@
|
|||||||
"version": "4.0.5",
|
"version": "4.0.5",
|
||||||
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz",
|
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz",
|
||||||
"integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==",
|
"integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==",
|
||||||
"license": "MIT",
|
"license": "MIT"
|
||||||
"peer": true
|
|
||||||
},
|
},
|
||||||
"node_modules/abort-controller": {
|
"node_modules/abort-controller": {
|
||||||
"version": "3.0.0",
|
"version": "3.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
|
||||||
"integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==",
|
"integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"event-target-shim": "^5.0.0"
|
"event-target-shim": "^5.0.0"
|
||||||
},
|
},
|
||||||
@@ -465,7 +453,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz",
|
||||||
"integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==",
|
"integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
}
|
}
|
||||||
@@ -475,7 +462,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz",
|
"resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz",
|
||||||
"integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==",
|
"integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"retry": "0.13.1"
|
"retry": "0.13.1"
|
||||||
}
|
}
|
||||||
@@ -768,7 +754,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz",
|
"resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz",
|
||||||
"integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==",
|
"integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"end-of-stream": "^1.4.1",
|
"end-of-stream": "^1.4.1",
|
||||||
"inherits": "^2.0.3",
|
"inherits": "^2.0.3",
|
||||||
@@ -817,7 +802,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz",
|
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz",
|
||||||
"integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==",
|
"integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"once": "^1.4.0"
|
"once": "^1.4.0"
|
||||||
}
|
}
|
||||||
@@ -887,7 +871,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
|
||||||
"integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==",
|
"integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=6"
|
"node": ">=6"
|
||||||
}
|
}
|
||||||
@@ -918,6 +901,7 @@
|
|||||||
"resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz",
|
||||||
"integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==",
|
"integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"accepts": "^2.0.0",
|
"accepts": "^2.0.0",
|
||||||
"body-parser": "^2.2.0",
|
"body-parser": "^2.2.0",
|
||||||
@@ -989,9 +973,9 @@
|
|||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/fast-xml-parser": {
|
"node_modules/fast-xml-parser": {
|
||||||
"version": "5.3.5",
|
"version": "4.5.3",
|
||||||
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.3.5.tgz",
|
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.3.tgz",
|
||||||
"integrity": "sha512-JeaA2Vm9ffQKp9VjvfzObuMCjUYAp5WDYhRYL5LrBPY/jUDlUtOvDfot0vKSkB9tuX885BDHjtw4fZadD95wnA==",
|
"integrity": "sha512-RKihhV+SHsIUGXObeVy9AXiBbFwkVk7Syp8XgwN5U3JV416+Gwp/GO9i0JYKmikykgz/UHRrrV4ROuZEo/T0ig==",
|
||||||
"funding": [
|
"funding": [
|
||||||
{
|
{
|
||||||
"type": "github",
|
"type": "github",
|
||||||
@@ -999,9 +983,8 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"strnum": "^2.1.2"
|
"strnum": "^1.1.1"
|
||||||
},
|
},
|
||||||
"bin": {
|
"bin": {
|
||||||
"fxparser": "src/cli/cli.js"
|
"fxparser": "src/cli/cli.js"
|
||||||
@@ -1350,8 +1333,7 @@
|
|||||||
"url": "https://patreon.com/mdevils"
|
"url": "https://patreon.com/mdevils"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"license": "MIT",
|
"license": "MIT"
|
||||||
"peer": true
|
|
||||||
},
|
},
|
||||||
"node_modules/http-errors": {
|
"node_modules/http-errors": {
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
@@ -1383,7 +1365,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz",
|
||||||
"integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==",
|
"integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@tootallnate/once": "2",
|
"@tootallnate/once": "2",
|
||||||
"agent-base": "6",
|
"agent-base": "6",
|
||||||
@@ -1398,7 +1379,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
|
||||||
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
|
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"debug": "4"
|
"debug": "4"
|
||||||
},
|
},
|
||||||
@@ -1575,7 +1555,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz",
|
||||||
"integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==",
|
"integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"bin": {
|
"bin": {
|
||||||
"mime": "cli.js"
|
"mime": "cli.js"
|
||||||
},
|
},
|
||||||
@@ -1736,7 +1715,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
|
||||||
"integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
|
"integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"yocto-queue": "^0.1.0"
|
"yocto-queue": "^0.1.0"
|
||||||
},
|
},
|
||||||
@@ -1878,7 +1856,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
|
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
|
||||||
"integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
|
"integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"inherits": "^2.0.3",
|
"inherits": "^2.0.3",
|
||||||
"string_decoder": "^1.1.1",
|
"string_decoder": "^1.1.1",
|
||||||
@@ -1893,7 +1870,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz",
|
"resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz",
|
||||||
"integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==",
|
"integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 4"
|
"node": ">= 4"
|
||||||
}
|
}
|
||||||
@@ -1903,7 +1879,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz",
|
||||||
"integrity": "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==",
|
"integrity": "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/request": "^2.48.8",
|
"@types/request": "^2.48.8",
|
||||||
"extend": "^3.0.2",
|
"extend": "^3.0.2",
|
||||||
@@ -2132,7 +2107,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz",
|
"resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz",
|
||||||
"integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==",
|
"integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"stubs": "^3.0.0"
|
"stubs": "^3.0.0"
|
||||||
}
|
}
|
||||||
@@ -2141,15 +2115,13 @@
|
|||||||
"version": "1.0.3",
|
"version": "1.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz",
|
||||||
"integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==",
|
"integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==",
|
||||||
"license": "MIT",
|
"license": "MIT"
|
||||||
"peer": true
|
|
||||||
},
|
},
|
||||||
"node_modules/string_decoder": {
|
"node_modules/string_decoder": {
|
||||||
"version": "1.3.0",
|
"version": "1.3.0",
|
||||||
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
|
||||||
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
|
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"safe-buffer": "~5.2.0"
|
"safe-buffer": "~5.2.0"
|
||||||
}
|
}
|
||||||
@@ -2251,31 +2223,28 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/strnum": {
|
"node_modules/strnum": {
|
||||||
"version": "2.1.2",
|
"version": "1.1.2",
|
||||||
"resolved": "https://registry.npmjs.org/strnum/-/strnum-2.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/strnum/-/strnum-1.1.2.tgz",
|
||||||
"integrity": "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ==",
|
"integrity": "sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA==",
|
||||||
"funding": [
|
"funding": [
|
||||||
{
|
{
|
||||||
"type": "github",
|
"type": "github",
|
||||||
"url": "https://github.com/sponsors/NaturalIntelligence"
|
"url": "https://github.com/sponsors/NaturalIntelligence"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"license": "MIT",
|
"license": "MIT"
|
||||||
"peer": true
|
|
||||||
},
|
},
|
||||||
"node_modules/stubs": {
|
"node_modules/stubs": {
|
||||||
"version": "3.0.0",
|
"version": "3.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz",
|
||||||
"integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==",
|
"integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==",
|
||||||
"license": "MIT",
|
"license": "MIT"
|
||||||
"peer": true
|
|
||||||
},
|
},
|
||||||
"node_modules/teeny-request": {
|
"node_modules/teeny-request": {
|
||||||
"version": "9.0.0",
|
"version": "9.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz",
|
||||||
"integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==",
|
"integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"http-proxy-agent": "^5.0.0",
|
"http-proxy-agent": "^5.0.0",
|
||||||
"https-proxy-agent": "^5.0.0",
|
"https-proxy-agent": "^5.0.0",
|
||||||
@@ -2292,7 +2261,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
|
||||||
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
|
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"debug": "4"
|
"debug": "4"
|
||||||
},
|
},
|
||||||
@@ -2305,7 +2273,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz",
|
||||||
"integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==",
|
"integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"agent-base": "6",
|
"agent-base": "6",
|
||||||
"debug": "4"
|
"debug": "4"
|
||||||
@@ -2347,8 +2314,7 @@
|
|||||||
"version": "7.16.0",
|
"version": "7.16.0",
|
||||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz",
|
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz",
|
||||||
"integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==",
|
"integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==",
|
||||||
"license": "MIT",
|
"license": "MIT"
|
||||||
"peer": true
|
|
||||||
},
|
},
|
||||||
"node_modules/unpipe": {
|
"node_modules/unpipe": {
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
@@ -2372,8 +2338,7 @@
|
|||||||
"version": "1.0.2",
|
"version": "1.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
|
||||||
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
|
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
|
||||||
"license": "MIT",
|
"license": "MIT"
|
||||||
"peer": true
|
|
||||||
},
|
},
|
||||||
"node_modules/uuid": {
|
"node_modules/uuid": {
|
||||||
"version": "9.0.1",
|
"version": "9.0.1",
|
||||||
@@ -2560,7 +2525,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
|
||||||
"integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
|
"integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=10"
|
"node": ">=10"
|
||||||
},
|
},
|
||||||
@@ -2573,6 +2537,7 @@
|
|||||||
"resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
|
"resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
|
||||||
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
|
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
"peer": true,
|
||||||
"funding": {
|
"funding": {
|
||||||
"url": "https://github.com/sponsors/colinhacks"
|
"url": "https://github.com/sponsors/colinhacks"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,13 +24,12 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@dabh/diagnostics": {
|
"node_modules/@dabh/diagnostics": {
|
||||||
"version": "2.0.8",
|
"version": "2.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.3.tgz",
|
||||||
"integrity": "sha512-R4MSXTVnuMzGD7bzHdW2ZhhdPC/igELENcq5IjEverBvq5hn1SXCWcsi6eSsdWP0/Ur+SItRRjAktmdoX/8R/Q==",
|
"integrity": "sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==",
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@so-ric/colorspace": "^1.1.6",
|
"colorspace": "1.1.x",
|
||||||
"enabled": "2.0.x",
|
"enabled": "2.0.x",
|
||||||
"kuler": "^2.0.0"
|
"kuler": "^2.0.0"
|
||||||
}
|
}
|
||||||
@@ -579,10 +578,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@google-cloud/firestore": {
|
"node_modules/@google-cloud/firestore": {
|
||||||
"version": "7.11.6",
|
"version": "7.11.3",
|
||||||
"resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.11.6.tgz",
|
"resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.11.3.tgz",
|
||||||
"integrity": "sha512-EW/O8ktzwLfyWBOsNuhRoMi8lrC3clHM5LVFhGvO1HCsLozCOOXRAlHrYBoE6HL42Sc8yYMuCb2XqcnJ4OOEpw==",
|
"integrity": "sha512-qsM3/WHpawF07SRVvEJJVRwhYzM7o9qtuksyuqnrMig6fxIrwWnsezECWsG/D5TyYru51Fv5c/RTqNDQ2yU+4w==",
|
||||||
"license": "Apache-2.0",
|
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
@@ -2889,17 +2887,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz",
|
||||||
"integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="
|
"integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="
|
||||||
},
|
},
|
||||||
"node_modules/@so-ric/colorspace": {
|
|
||||||
"version": "1.1.6",
|
|
||||||
"resolved": "https://registry.npmjs.org/@so-ric/colorspace/-/colorspace-1.1.6.tgz",
|
|
||||||
"integrity": "sha512-/KiKkpHNOBgkFJwu9sh48LkHSMYGyuTcSFK/qMBdnOAlrRJzRSXAOFB5qwzaVQuDl8wAvHVMkaASQDReTahxuw==",
|
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
|
||||||
"dependencies": {
|
|
||||||
"color": "^5.0.2",
|
|
||||||
"text-hex": "1.0.x"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@toolbox-sdk/core": {
|
"node_modules/@toolbox-sdk/core": {
|
||||||
"version": "0.1.2",
|
"version": "0.1.2",
|
||||||
"resolved": "https://registry.npmjs.org/@toolbox-sdk/core/-/core-0.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/@toolbox-sdk/core/-/core-0.1.2.tgz",
|
||||||
@@ -3351,13 +3338,13 @@
|
|||||||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
|
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
|
||||||
},
|
},
|
||||||
"node_modules/axios": {
|
"node_modules/axios": {
|
||||||
"version": "1.13.5",
|
"version": "1.12.2",
|
||||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.13.5.tgz",
|
"resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz",
|
||||||
"integrity": "sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==",
|
"integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"follow-redirects": "^1.15.11",
|
"follow-redirects": "^1.15.6",
|
||||||
"form-data": "^4.0.5",
|
"form-data": "^4.0.4",
|
||||||
"proxy-from-env": "^1.1.0"
|
"proxy-from-env": "^1.1.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -3528,53 +3515,38 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/color": {
|
"node_modules/color": {
|
||||||
"version": "5.0.3",
|
"version": "3.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/color/-/color-5.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/color/-/color-3.2.1.tgz",
|
||||||
"integrity": "sha512-ezmVcLR3xAVp8kYOm4GS45ZLLgIE6SPAFoduLr6hTDajwb3KZ2F46gulK3XpcwRFb5KKGCSezCBAY4Dw4HsyXA==",
|
"integrity": "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==",
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"color-convert": "^3.1.3",
|
"color-convert": "^1.9.3",
|
||||||
"color-string": "^2.1.3"
|
"color-string": "^1.6.0"
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=18"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/color-convert": {
|
"node_modules/color-convert": {
|
||||||
"version": "3.1.3",
|
"version": "1.9.3",
|
||||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-3.1.3.tgz",
|
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
|
||||||
"integrity": "sha512-fasDH2ont2GqF5HpyO4w0+BcewlhHEZOFn9c1ckZdHpJ56Qb7MHhH/IcJZbBGgvdtwdwNbLvxiBEdg336iA9Sg==",
|
"integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"color-name": "^2.0.0"
|
"color-name": "1.1.3"
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=14.6"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/color-name": {
|
"node_modules/color-name": {
|
||||||
"version": "2.1.0",
|
"version": "1.1.3",
|
||||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-2.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
|
||||||
"integrity": "sha512-1bPaDNFm0axzE4MEAzKPuqKWeRaT43U/hyxKPBdqTfmPF+d6n7FSoTFxLVULUJOmiLp01KjhIPPH+HrXZJN4Rg==",
|
"integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==",
|
||||||
"license": "MIT",
|
"optional": true
|
||||||
"optional": true,
|
|
||||||
"engines": {
|
|
||||||
"node": ">=12.20"
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"node_modules/color-string": {
|
"node_modules/color-string": {
|
||||||
"version": "2.1.4",
|
"version": "1.9.1",
|
||||||
"resolved": "https://registry.npmjs.org/color-string/-/color-string-2.1.4.tgz",
|
"resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz",
|
||||||
"integrity": "sha512-Bb6Cq8oq0IjDOe8wJmi4JeNn763Xs9cfrBcaylK1tPypWzyoy2G3l90v9k64kjphl/ZJjPIShFztenRomi8WTg==",
|
"integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==",
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"color-name": "^2.0.0"
|
"color-name": "^1.0.0",
|
||||||
},
|
"simple-swizzle": "^0.2.2"
|
||||||
"engines": {
|
|
||||||
"node": ">=18"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/colorette": {
|
"node_modules/colorette": {
|
||||||
@@ -3582,6 +3554,16 @@
|
|||||||
"resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz",
|
"resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz",
|
||||||
"integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w=="
|
"integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w=="
|
||||||
},
|
},
|
||||||
|
"node_modules/colorspace": {
|
||||||
|
"version": "1.1.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.4.tgz",
|
||||||
|
"integrity": "sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w==",
|
||||||
|
"optional": true,
|
||||||
|
"dependencies": {
|
||||||
|
"color": "^3.1.3",
|
||||||
|
"text-hex": "1.0.x"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/combined-stream": {
|
"node_modules/combined-stream": {
|
||||||
"version": "1.0.8",
|
"version": "1.0.8",
|
||||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||||
@@ -4248,10 +4230,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/form-data": {
|
"node_modules/form-data": {
|
||||||
"version": "4.0.5",
|
"version": "4.0.4",
|
||||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz",
|
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
|
||||||
"integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==",
|
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"asynckit": "^0.4.0",
|
"asynckit": "^0.4.0",
|
||||||
"combined-stream": "^1.0.8",
|
"combined-stream": "^1.0.8",
|
||||||
@@ -4987,6 +4968,12 @@
|
|||||||
"node": ">= 0.10"
|
"node": ">= 0.10"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/is-arrayish": {
|
||||||
|
"version": "0.3.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz",
|
||||||
|
"integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==",
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
"node_modules/is-core-module": {
|
"node_modules/is-core-module": {
|
||||||
"version": "2.16.1",
|
"version": "2.16.1",
|
||||||
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz",
|
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz",
|
||||||
@@ -5127,14 +5114,13 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/jsonwebtoken/node_modules/jws": {
|
"node_modules/jsonwebtoken/node_modules/jws": {
|
||||||
"version": "3.2.3",
|
"version": "3.2.2",
|
||||||
"resolved": "https://registry.npmjs.org/jws/-/jws-3.2.3.tgz",
|
"resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz",
|
||||||
"integrity": "sha512-byiJ0FLRdLdSVSReO/U4E7RoEyOCKnEnEPMjq3HxWtvzLsV08/i5RQKsFVNkCldrCaPr2vDNAOMsfs8T/Hze7g==",
|
"integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==",
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"jwa": "^1.4.2",
|
"jwa": "^1.4.1",
|
||||||
"safe-buffer": "^5.0.1"
|
"safe-buffer": "^5.0.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -5167,12 +5153,11 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/jws": {
|
"node_modules/jws": {
|
||||||
"version": "4.0.1",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
||||||
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
|
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"jwa": "^2.0.1",
|
"jwa": "^2.0.0",
|
||||||
"safe-buffer": "^5.0.1"
|
"safe-buffer": "^5.0.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -5439,10 +5424,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/node-forge": {
|
"node_modules/node-forge": {
|
||||||
"version": "1.3.3",
|
"version": "1.3.1",
|
||||||
"resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.3.tgz",
|
"resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz",
|
||||||
"integrity": "sha512-rLvcdSyRCyouf6jcOIPe/BgwG/d7hKjzMKOas33/pHEr6gbq18IK9zV7DiPvzsz0oBJPme6qr6H6kGZuI9/DZg==",
|
"integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==",
|
||||||
"license": "(BSD-3-Clause OR GPL-2.0)",
|
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
@@ -6054,6 +6038,15 @@
|
|||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/simple-swizzle": {
|
||||||
|
"version": "0.2.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz",
|
||||||
|
"integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==",
|
||||||
|
"optional": true,
|
||||||
|
"dependencies": {
|
||||||
|
"is-arrayish": "^0.3.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/source-map": {
|
"node_modules/source-map": {
|
||||||
"version": "0.6.1",
|
"version": "0.6.1",
|
||||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||||
@@ -6240,7 +6233,6 @@
|
|||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz",
|
||||||
"integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==",
|
"integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==",
|
||||||
"license": "MIT",
|
|
||||||
"optional": true
|
"optional": true
|
||||||
},
|
},
|
||||||
"node_modules/thriftrw": {
|
"node_modules/thriftrw": {
|
||||||
@@ -6424,14 +6416,13 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/winston": {
|
"node_modules/winston": {
|
||||||
"version": "3.19.0",
|
"version": "3.17.0",
|
||||||
"resolved": "https://registry.npmjs.org/winston/-/winston-3.19.0.tgz",
|
"resolved": "https://registry.npmjs.org/winston/-/winston-3.17.0.tgz",
|
||||||
"integrity": "sha512-LZNJgPzfKR+/J3cHkxcpHKpKKvGfDZVPS4hfJCc4cCG0CgYzvlD6yE/S3CIL/Yt91ak327YCpiF/0MyeZHEHKA==",
|
"integrity": "sha512-DLiFIXYC5fMPxaRg832S6F5mJYvePtmO5G9v9IgUFPhXm9/GkXarH/TUrBAVzhTCzAj9anE/+GjrgXp/54nOgw==",
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@colors/colors": "^1.6.0",
|
"@colors/colors": "^1.6.0",
|
||||||
"@dabh/diagnostics": "^2.0.8",
|
"@dabh/diagnostics": "^2.0.2",
|
||||||
"async": "^3.2.3",
|
"async": "^3.2.3",
|
||||||
"is-stream": "^2.0.0",
|
"is-stream": "^2.0.0",
|
||||||
"logform": "^2.7.0",
|
"logform": "^2.7.0",
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ export async function main() {
|
|||||||
|
|
||||||
for (const query of queries) {
|
for (const query of queries) {
|
||||||
conversationHistory.push({ role: "user", content: [{ text: query }] });
|
conversationHistory.push({ role: "user", content: [{ text: query }] });
|
||||||
let response = await ai.generate({
|
const response = await ai.generate({
|
||||||
messages: conversationHistory,
|
messages: conversationHistory,
|
||||||
tools: tools,
|
tools: tools,
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -18,8 +18,7 @@
|
|||||||
"node_modules/@cfworker/json-schema": {
|
"node_modules/@cfworker/json-schema": {
|
||||||
"version": "4.1.1",
|
"version": "4.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/@cfworker/json-schema/-/json-schema-4.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/@cfworker/json-schema/-/json-schema-4.1.1.tgz",
|
||||||
"integrity": "sha512-gAmrUZSGtKc3AiBL71iNWxDsyUC5uMaKKGdvzYsBoTW/xi42JQHl7eKV2OYzCUqvc+D2RCcf7EXY2iCyFIk6og==",
|
"integrity": "sha512-gAmrUZSGtKc3AiBL71iNWxDsyUC5uMaKKGdvzYsBoTW/xi42JQHl7eKV2OYzCUqvc+D2RCcf7EXY2iCyFIk6og=="
|
||||||
"peer": true
|
|
||||||
},
|
},
|
||||||
"node_modules/@google/generative-ai": {
|
"node_modules/@google/generative-ai": {
|
||||||
"version": "0.24.1",
|
"version": "0.24.1",
|
||||||
@@ -226,7 +225,6 @@
|
|||||||
"version": "5.2.0",
|
"version": "5.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz",
|
||||||
"integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==",
|
"integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==",
|
||||||
"peer": true,
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=10"
|
"node": ">=10"
|
||||||
},
|
},
|
||||||
@@ -310,7 +308,6 @@
|
|||||||
"version": "6.3.0",
|
"version": "6.3.0",
|
||||||
"resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz",
|
||||||
"integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==",
|
"integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==",
|
||||||
"peer": true,
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=10"
|
"node": ">=10"
|
||||||
},
|
},
|
||||||
@@ -423,7 +420,6 @@
|
|||||||
"version": "1.2.0",
|
"version": "1.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
|
||||||
"integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==",
|
"integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==",
|
||||||
"peer": true,
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
}
|
}
|
||||||
@@ -825,7 +821,6 @@
|
|||||||
"version": "1.0.21",
|
"version": "1.0.21",
|
||||||
"resolved": "https://registry.npmjs.org/js-tiktoken/-/js-tiktoken-1.0.21.tgz",
|
"resolved": "https://registry.npmjs.org/js-tiktoken/-/js-tiktoken-1.0.21.tgz",
|
||||||
"integrity": "sha512-biOj/6M5qdgx5TKjDnFT1ymSpM5tbd3ylwDtrQvFQSu0Z7bBYko2dF+W/aUkXUPuk6IVpRxk/3Q2sHOzGlS36g==",
|
"integrity": "sha512-biOj/6M5qdgx5TKjDnFT1ymSpM5tbd3ylwDtrQvFQSu0Z7bBYko2dF+W/aUkXUPuk6IVpRxk/3Q2sHOzGlS36g==",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"base64-js": "^1.5.1"
|
"base64-js": "^1.5.1"
|
||||||
}
|
}
|
||||||
@@ -878,9 +873,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/langsmith": {
|
"node_modules/langsmith": {
|
||||||
"version": "0.5.2",
|
"version": "0.4.3",
|
||||||
"resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.5.2.tgz",
|
"resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.4.3.tgz",
|
||||||
"integrity": "sha512-CfkcQsiajtTWknAcyItvJsKEQdY2VgDpm6U8pRI9wnM07mevnOv5EF+RcqWGwx37SEUxtyi2RXMwnKW8b06JtA==",
|
"integrity": "sha512-vuBAagBZulXj0rpZhUTxmHhrYIBk53z8e2Q8ty4OHVkahN4ul7Im3OZxD9jsXZB0EuncK1xRYtY8J3BW4vj1zw==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/uuid": "^10.0.0",
|
"@types/uuid": "^10.0.0",
|
||||||
@@ -974,7 +969,6 @@
|
|||||||
"version": "4.2.0",
|
"version": "4.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz",
|
||||||
"integrity": "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==",
|
"integrity": "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==",
|
||||||
"peer": true,
|
|
||||||
"bin": {
|
"bin": {
|
||||||
"mustache": "bin/mustache"
|
"mustache": "bin/mustache"
|
||||||
}
|
}
|
||||||
@@ -1413,6 +1407,7 @@
|
|||||||
"version": "3.25.76",
|
"version": "3.25.76",
|
||||||
"resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
|
"resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
|
||||||
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
|
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
|
||||||
|
"peer": true,
|
||||||
"funding": {
|
"funding": {
|
||||||
"url": "https://github.com/sponsors/colinhacks"
|
"url": "https://github.com/sponsors/colinhacks"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -975,10 +975,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/lodash": {
|
"node_modules/lodash": {
|
||||||
"version": "4.17.23",
|
"version": "4.17.21",
|
||||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz",
|
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
||||||
"integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==",
|
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
|
||||||
"license": "MIT"
|
|
||||||
},
|
},
|
||||||
"node_modules/magic-bytes.js": {
|
"node_modules/magic-bytes.js": {
|
||||||
"version": "1.12.1",
|
"version": "1.12.1",
|
||||||
|
|||||||
@@ -1,57 +1,15 @@
|
|||||||
# [START quickstart]
|
|
||||||
import asyncio
|
|
||||||
|
|
||||||
from google.adk import Agent
|
from google.adk import Agent
|
||||||
from google.adk.apps import App
|
from google.adk.apps import App
|
||||||
from google.adk.runners import InMemoryRunner
|
from toolbox_core import ToolboxSyncClient
|
||||||
from google.adk.tools.toolbox_toolset import ToolboxToolset
|
|
||||||
from google.genai.types import Content, Part
|
|
||||||
|
|
||||||
prompt = """
|
|
||||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
|
||||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
|
||||||
location and price tier. Always mention hotel ids while performing any
|
|
||||||
searches. This is very important for any operations. For any bookings or
|
|
||||||
cancellations, please provide the appropriate confirmation. Be sure to
|
|
||||||
update checkin or checkout dates if mentioned by the user.
|
|
||||||
Don't ask for confirmations from the user.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# TODO(developer): update the TOOLBOX_URL to your toolbox endpoint
|
# TODO(developer): update the TOOLBOX_URL to your toolbox endpoint
|
||||||
toolset = ToolboxToolset(
|
client = ToolboxSyncClient("http://127.0.0.1:5000")
|
||||||
server_url="http://127.0.0.1:5000",
|
|
||||||
)
|
|
||||||
|
|
||||||
root_agent = Agent(
|
root_agent = Agent(
|
||||||
name='hotel_assistant',
|
name='root_agent',
|
||||||
model='gemini-2.5-flash',
|
model='gemini-2.5-flash',
|
||||||
instruction=prompt,
|
instruction="You are a helpful AI assistant designed to provide accurate and useful information.",
|
||||||
tools=[toolset],
|
tools=client.load_toolset(),
|
||||||
)
|
)
|
||||||
|
|
||||||
app = App(root_agent=root_agent, name="my_agent")
|
app = App(root_agent=root_agent, name="my_agent")
|
||||||
# [END quickstart]
|
|
||||||
|
|
||||||
queries = [
|
|
||||||
"Find hotels in Basel with Basel in its name.",
|
|
||||||
"Can you book the Hilton Basel for me?",
|
|
||||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
|
||||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
|
||||||
]
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
runner = InMemoryRunner(app=app)
|
|
||||||
session = await runner.session_service.create_session(
|
|
||||||
app_name=app.name, user_id="test_user"
|
|
||||||
)
|
|
||||||
|
|
||||||
for query in queries:
|
|
||||||
print(f"\nUser: {query}")
|
|
||||||
user_message = Content(parts=[Part.from_text(text=query)])
|
|
||||||
|
|
||||||
async for event in runner.run_async(user_id="test_user", session_id=session.id, new_message=user_message):
|
|
||||||
if event.is_final_response() and event.content and event.content.parts:
|
|
||||||
print(f"Agent: {event.content.parts[0].text}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
|
|||||||
@@ -1,2 +1,3 @@
|
|||||||
google-adk[toolbox]==1.23.0
|
google-adk==1.21.0
|
||||||
|
toolbox-core==0.5.4
|
||||||
pytest==9.0.2
|
pytest==9.0.2
|
||||||
@@ -41,29 +41,31 @@ def golden_keywords():
|
|||||||
class TestExecution:
|
class TestExecution:
|
||||||
"""Test framework execution and output validation."""
|
"""Test framework execution and output validation."""
|
||||||
|
|
||||||
_cached_output = None
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
@pytest.fixture(scope="function")
|
||||||
def script_output(self, capsys):
|
def script_output(self, capsys):
|
||||||
"""Run the quickstart function and return its output."""
|
"""Run the quickstart function and return its output."""
|
||||||
if TestExecution._cached_output is None:
|
|
||||||
|
# TODO: Add better validation for ADK once we have a way to capture its
|
||||||
|
# output.
|
||||||
|
if ORCH_NAME == "adk":
|
||||||
|
return quickstart.app.root_agent.name
|
||||||
|
else:
|
||||||
asyncio.run(quickstart.main())
|
asyncio.run(quickstart.main())
|
||||||
out, err = capsys.readouterr()
|
|
||||||
TestExecution._cached_output = (out, err)
|
|
||||||
|
|
||||||
class Output:
|
return capsys.readouterr()
|
||||||
def __init__(self, out, err):
|
|
||||||
self.out = out
|
|
||||||
self.err = err
|
|
||||||
|
|
||||||
return Output(*TestExecution._cached_output)
|
|
||||||
|
|
||||||
def test_script_runs_without_errors(self, script_output):
|
def test_script_runs_without_errors(self, script_output):
|
||||||
"""Test that the script runs and produces no stderr."""
|
"""Test that the script runs and produces no stderr."""
|
||||||
|
if ORCH_NAME == "adk":
|
||||||
|
return
|
||||||
assert script_output.err == "", f"Script produced stderr: {script_output.err}"
|
assert script_output.err == "", f"Script produced stderr: {script_output.err}"
|
||||||
|
|
||||||
def test_keywords_in_output(self, script_output, golden_keywords):
|
def test_keywords_in_output(self, script_output, golden_keywords):
|
||||||
"""Test that expected keywords are present in the script's output."""
|
"""Test that expected keywords are present in the script's output."""
|
||||||
|
|
||||||
|
if ORCH_NAME == "adk":
|
||||||
|
assert script_output == "root_agent"
|
||||||
|
return
|
||||||
output = script_output.out
|
output = script_output.out
|
||||||
missing_keywords = [kw for kw in golden_keywords if kw not in output]
|
missing_keywords = [kw for kw in golden_keywords if kw not in output]
|
||||||
assert not missing_keywords, f"Missing keywords in output: {missing_keywords}"
|
assert not missing_keywords, f"Missing keywords in output: {missing_keywords}"
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
```bash
|
```bash
|
||||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|
||||||
@@ -33,18 +33,17 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
{{< /notice >}}
|
{{< /notice >}}
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: sources
|
sources:
|
||||||
name: my-pg-source
|
my-pg-source:
|
||||||
type: postgres
|
kind: postgres
|
||||||
host: 127.0.0.1
|
host: 127.0.0.1
|
||||||
port: 5432
|
port: 5432
|
||||||
database: toolbox_db
|
database: toolbox_db
|
||||||
user: toolbox_user
|
user: ${USER_NAME}
|
||||||
password: my-password
|
password: ${PASSWORD}
|
||||||
---
|
tools:
|
||||||
kind: tools
|
search-hotels-by-name:
|
||||||
name: search-hotels-by-name
|
kind: postgres-sql
|
||||||
type: postgres-sql
|
|
||||||
source: my-pg-source
|
source: my-pg-source
|
||||||
description: Search for hotels based on name.
|
description: Search for hotels based on name.
|
||||||
parameters:
|
parameters:
|
||||||
@@ -52,10 +51,8 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
type: string
|
type: string
|
||||||
description: The name of the hotel.
|
description: The name of the hotel.
|
||||||
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
|
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
|
||||||
---
|
search-hotels-by-location:
|
||||||
kind: tools
|
kind: postgres-sql
|
||||||
name: search-hotels-by-location
|
|
||||||
type: postgres-sql
|
|
||||||
source: my-pg-source
|
source: my-pg-source
|
||||||
description: Search for hotels based on location.
|
description: Search for hotels based on location.
|
||||||
parameters:
|
parameters:
|
||||||
@@ -63,10 +60,8 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
type: string
|
type: string
|
||||||
description: The location of the hotel.
|
description: The location of the hotel.
|
||||||
statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%';
|
statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%';
|
||||||
---
|
book-hotel:
|
||||||
kind: tools
|
kind: postgres-sql
|
||||||
name: book-hotel
|
|
||||||
type: postgres-sql
|
|
||||||
source: my-pg-source
|
source: my-pg-source
|
||||||
description: >-
|
description: >-
|
||||||
Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
|
Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
|
||||||
@@ -75,10 +70,8 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
type: string
|
type: string
|
||||||
description: The ID of the hotel to book.
|
description: The ID of the hotel to book.
|
||||||
statement: UPDATE hotels SET booked = B'1' WHERE id = $1;
|
statement: UPDATE hotels SET booked = B'1' WHERE id = $1;
|
||||||
---
|
update-hotel:
|
||||||
kind: tools
|
kind: postgres-sql
|
||||||
name: update-hotel
|
|
||||||
type: postgres-sql
|
|
||||||
source: my-pg-source
|
source: my-pg-source
|
||||||
description: >-
|
description: >-
|
||||||
Update a hotel's check-in and check-out dates by its ID. Returns a message
|
Update a hotel's check-in and check-out dates by its ID. Returns a message
|
||||||
@@ -96,10 +89,8 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
statement: >-
|
statement: >-
|
||||||
UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3
|
UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3
|
||||||
as date) WHERE id = $1;
|
as date) WHERE id = $1;
|
||||||
---
|
cancel-hotel:
|
||||||
kind: tools
|
kind: postgres-sql
|
||||||
name: cancel-hotel
|
|
||||||
type: postgres-sql
|
|
||||||
source: my-pg-source
|
source: my-pg-source
|
||||||
description: Cancel a hotel by its ID.
|
description: Cancel a hotel by its ID.
|
||||||
parameters:
|
parameters:
|
||||||
@@ -107,10 +98,8 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
type: string
|
type: string
|
||||||
description: The ID of the hotel to cancel.
|
description: The ID of the hotel to cancel.
|
||||||
statement: UPDATE hotels SET booked = B'0' WHERE id = $1;
|
statement: UPDATE hotels SET booked = B'0' WHERE id = $1;
|
||||||
---
|
toolsets:
|
||||||
kind: toolsets
|
my-toolset:
|
||||||
name: my-toolset
|
|
||||||
tools:
|
|
||||||
- search-hotels-by-name
|
- search-hotels-by-name
|
||||||
- search-hotels-by-location
|
- search-hotels-by-location
|
||||||
- book-hotel
|
- book-hotel
|
||||||
|
|||||||
@@ -54,7 +54,6 @@ instance, database and users:
|
|||||||
* `create_instance`
|
* `create_instance`
|
||||||
* `create_user`
|
* `create_user`
|
||||||
* `clone_instance`
|
* `clone_instance`
|
||||||
* `restore_backup`
|
|
||||||
|
|
||||||
## Install MCP Toolbox
|
## Install MCP Toolbox
|
||||||
|
|
||||||
@@ -302,7 +301,6 @@ instances and interacting with your database:
|
|||||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for SQL Server instance.
|
* **clone_instance**: Creates a clone of an existing Cloud SQL for SQL Server instance.
|
||||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||||
* **restore_backup**: Restores a backup of a Cloud SQL instance.
|
|
||||||
|
|
||||||
{{< notice note >}}
|
{{< notice note >}}
|
||||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||||
|
|||||||
@@ -54,7 +54,6 @@ database and users:
|
|||||||
* `create_instance`
|
* `create_instance`
|
||||||
* `create_user`
|
* `create_user`
|
||||||
* `clone_instance`
|
* `clone_instance`
|
||||||
* `restore_backup`
|
|
||||||
|
|
||||||
## Install MCP Toolbox
|
## Install MCP Toolbox
|
||||||
|
|
||||||
@@ -302,7 +301,6 @@ instances and interacting with your database:
|
|||||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for MySQL instance.
|
* **clone_instance**: Creates a clone of an existing Cloud SQL for MySQL instance.
|
||||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||||
* **restore_backup**: Restores a backup of a Cloud SQL instance.
|
|
||||||
|
|
||||||
{{< notice note >}}
|
{{< notice note >}}
|
||||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||||
|
|||||||
@@ -54,7 +54,6 @@ instance, database and users:
|
|||||||
* `create_instance`
|
* `create_instance`
|
||||||
* `create_user`
|
* `create_user`
|
||||||
* `clone_instance`
|
* `clone_instance`
|
||||||
* `restore_backup`
|
|
||||||
|
|
||||||
## Install MCP Toolbox
|
## Install MCP Toolbox
|
||||||
|
|
||||||
@@ -302,7 +301,6 @@ instances and interacting with your database:
|
|||||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for PostgreSQL instance.
|
* **clone_instance**: Creates a clone of an existing Cloud SQL for PostgreSQL instance.
|
||||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||||
* **restore_backup**: Restores a backup of a Cloud SQL instance.
|
|
||||||
|
|
||||||
{{< notice note >}}
|
{{< notice note >}}
|
||||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||||
|
|||||||
@@ -100,19 +100,19 @@ After you install Looker in the MCP Store, resources and tools from the server a
|
|||||||
|
|
||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="linux/amd64" lang="bash" >}}
|
{{< tab header="linux/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="windows/amd64" lang="bash" >}}
|
{{< tab header="windows/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -45,19 +45,19 @@ instance:
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="linux/amd64" lang="bash" >}}
|
{{< tab header="linux/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="windows/amd64" lang="bash" >}}
|
{{< tab header="windows/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -43,19 +43,19 @@ expose your developer assistant tools to a MySQL instance:
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="linux/amd64" lang="bash" >}}
|
{{< tab header="linux/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="windows/amd64" lang="bash" >}}
|
{{< tab header="windows/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -44,19 +44,19 @@ expose your developer assistant tools to a Neo4j instance:
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="linux/amd64" lang="bash" >}}
|
{{< tab header="linux/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="windows/amd64" lang="bash" >}}
|
{{< tab header="windows/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ to expose your developer assistant tools to a Postgres instance:
|
|||||||
|
|
||||||
{{< notice tip >}}
|
{{< notice tip >}}
|
||||||
This guide can be used with [AlloyDB
|
This guide can be used with [AlloyDB
|
||||||
Omni](https://cloud.google.com/alloydb/omni/docs/overview).
|
Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||||
{{< /notice >}}
|
{{< /notice >}}
|
||||||
|
|
||||||
## Set up the database
|
## Set up the database
|
||||||
@@ -40,10 +40,10 @@ Omni](https://cloud.google.com/alloydb/omni/docs/overview).
|
|||||||
1. Create or select a PostgreSQL instance.
|
1. Create or select a PostgreSQL instance.
|
||||||
|
|
||||||
* [Install PostgreSQL locally](https://www.postgresql.org/download/)
|
* [Install PostgreSQL locally](https://www.postgresql.org/download/)
|
||||||
* [Install AlloyDB Omni](https://cloud.google.com/alloydb/omni/docs/quickstart)
|
* [Install AlloyDB Omni](https://cloud.google.com/alloydb/omni/current/docs/quickstart)
|
||||||
|
|
||||||
1. Create or reuse [a database
|
1. Create or reuse [a database
|
||||||
user](https://docs.cloud.google.com/alloydb/omni/containers/current/docs/database-users/manage-users)
|
user](https://cloud.google.com/alloydb/omni/current/docs/database-users/manage-users)
|
||||||
and have the username and password ready.
|
and have the username and password ready.
|
||||||
|
|
||||||
## Install MCP Toolbox
|
## Install MCP Toolbox
|
||||||
@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/docs/overview).
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="linux/amd64" lang="bash" >}}
|
{{< tab header="linux/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="windows/amd64" lang="bash" >}}
|
{{< tab header="windows/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -43,19 +43,19 @@ to expose your developer assistant tools to a SQLite instance:
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="linux/amd64" lang="bash" >}}
|
{{< tab header="linux/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="windows/amd64" lang="bash" >}}
|
{{< tab header="windows/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -20,7 +20,6 @@ The native SDKs can be combined with MCP clients in many cases.
|
|||||||
|
|
||||||
Toolbox currently supports the following versions of MCP specification:
|
Toolbox currently supports the following versions of MCP specification:
|
||||||
|
|
||||||
* [2025-11-25](https://modelcontextprotocol.io/specification/2025-11-25)
|
|
||||||
* [2025-06-18](https://modelcontextprotocol.io/specification/2025-06-18)
|
* [2025-06-18](https://modelcontextprotocol.io/specification/2025-06-18)
|
||||||
* [2025-03-26](https://modelcontextprotocol.io/specification/2025-03-26)
|
* [2025-03-26](https://modelcontextprotocol.io/specification/2025-03-26)
|
||||||
* [2024-11-05](https://modelcontextprotocol.io/specification/2024-11-05)
|
* [2024-11-05](https://modelcontextprotocol.io/specification/2024-11-05)
|
||||||
|
|||||||
@@ -46,10 +46,10 @@ with the necessary configuration for deployment to Vertex AI Agent Engine.
|
|||||||
process will generate deployment configuration files (like a `Makefile` and
|
process will generate deployment configuration files (like a `Makefile` and
|
||||||
`Dockerfile`) in your project directory.
|
`Dockerfile`) in your project directory.
|
||||||
|
|
||||||
4. Add `google-adk[toolbox]` as a dependency to the new project:
|
4. Add `toolbox-core` as a dependency to the new project:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
uv add google-adk[toolbox]
|
uv add toolbox-core
|
||||||
```
|
```
|
||||||
|
|
||||||
## Step 3: Configure Google Cloud Authentication
|
## Step 3: Configure Google Cloud Authentication
|
||||||
@@ -83,32 +83,34 @@ Toolbox instead of the local address.
|
|||||||
|
|
||||||
2. Open your agent file (`my_agent/agent.py`).
|
2. Open your agent file (`my_agent/agent.py`).
|
||||||
|
|
||||||
3. Update the `ToolboxToolset` initialization to point to your Cloud Run service URL. Replace the existing initialization code with the following:
|
3. Update the `ToolboxSyncClient` initialization to use your Cloud Run URL.
|
||||||
|
|
||||||
{{% alert color="info" title="Note" %}}
|
{{% alert color="info" %}}
|
||||||
Since Cloud Run services are secured by default, you also need to provide a workload identity.
|
Since Cloud Run services are secured by default, you also need to provide an
|
||||||
|
authentication token.
|
||||||
{{% /alert %}}
|
{{% /alert %}}
|
||||||
|
|
||||||
|
Replace your existing client initialization code with the following:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from google.adk import Agent
|
from google.adk import Agent
|
||||||
from google.adk.apps import App
|
from google.adk.apps import App
|
||||||
from google.adk.tools.toolbox_toolset import ToolboxToolset
|
from toolbox_core import ToolboxSyncClient, auth_methods
|
||||||
from toolbox_adk import CredentialStrategy
|
|
||||||
|
|
||||||
# TODO(developer): Replace with your Toolbox Cloud Run Service URL
|
# TODO(developer): Replace with your Toolbox Cloud Run Service URL
|
||||||
TOOLBOX_URL = "https://your-toolbox-service-xyz.a.run.app"
|
TOOLBOX_URL = "https://your-toolbox-service-xyz.a.run.app"
|
||||||
|
|
||||||
# Initialize the toolset with Workload Identity (generates ID token for the URL)
|
# Initialize the client with the Cloud Run URL and Auth headers
|
||||||
toolset = ToolboxToolset(
|
client = ToolboxSyncClient(
|
||||||
server_url=TOOLBOX_URL,
|
TOOLBOX_URL,
|
||||||
credentials=CredentialStrategy.workload_identity(target_audience=TOOLBOX_URL)
|
client_headers={"Authorization": auth_methods.get_google_id_token(TOOLBOX_URL)}
|
||||||
)
|
)
|
||||||
|
|
||||||
root_agent = Agent(
|
root_agent = Agent(
|
||||||
name='root_agent',
|
name='root_agent',
|
||||||
model='gemini-2.5-flash',
|
model='gemini-2.5-flash',
|
||||||
instruction="You are a helpful AI assistant designed to provide accurate and useful information.",
|
instruction="You are a helpful AI assistant designed to provide accurate and useful information.",
|
||||||
tools=[toolset],
|
tools=client.load_toolset(),
|
||||||
)
|
)
|
||||||
|
|
||||||
app = App(root_agent=root_agent, name="my_agent")
|
app = App(root_agent=root_agent, name="my_agent")
|
||||||
@@ -129,14 +131,14 @@ app = App(root_agent=root_agent, name="my_agent")
|
|||||||
Run the deployment command:
|
Run the deployment command:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
make deploy
|
make backend
|
||||||
```
|
```
|
||||||
|
|
||||||
This command will build your agent's container image and deploy it to Vertex AI.
|
This command will build your agent's container image and deploy it to Vertex AI.
|
||||||
|
|
||||||
## Step 6: Test your Deployment
|
## Step 6: Test your Deployment
|
||||||
|
|
||||||
Once the deployment command (`make deploy`) completes, it will output the URL
|
Once the deployment command (`make backend`) completes, it will output the URL
|
||||||
for the Agent Engine Playground. You can click on this URL to open the
|
for the Agent Engine Playground. You can click on this URL to open the
|
||||||
Playground in your browser and start chatting with your agent to test the tools.
|
Playground in your browser and start chatting with your agent to test the tools.
|
||||||
|
|
||||||
|
|||||||
@@ -207,7 +207,6 @@ You can connect to Toolbox Cloud Run instances directly through the SDK.
|
|||||||
{{< tab header="Python" lang="python" >}}
|
{{< tab header="Python" lang="python" >}}
|
||||||
import asyncio
|
import asyncio
|
||||||
from toolbox_core import ToolboxClient, auth_methods
|
from toolbox_core import ToolboxClient, auth_methods
|
||||||
from toolbox_core.protocol import Protocol
|
|
||||||
|
|
||||||
# Replace with the Cloud Run service URL generated in the previous step
|
# Replace with the Cloud Run service URL generated in the previous step
|
||||||
URL = "https://cloud-run-url.app"
|
URL = "https://cloud-run-url.app"
|
||||||
@@ -218,7 +217,6 @@ async def main():
|
|||||||
async with ToolboxClient(
|
async with ToolboxClient(
|
||||||
URL,
|
URL,
|
||||||
client_headers={"Authorization": auth_token_provider},
|
client_headers={"Authorization": auth_token_provider},
|
||||||
protocol=Protocol.TOOLBOX,
|
|
||||||
) as toolbox:
|
) as toolbox:
|
||||||
toolset = await toolbox.load_toolset()
|
toolset = await toolbox.load_toolset()
|
||||||
# ...
|
# ...
|
||||||
@@ -283,5 +281,3 @@ contain the specific error message needed to diagnose the problem.
|
|||||||
Manager, it means the Toolbox service account is missing permissions.
|
Manager, it means the Toolbox service account is missing permissions.
|
||||||
- Ensure the `toolbox-identity` service account has the **Secret Manager
|
- Ensure the `toolbox-identity` service account has the **Secret Manager
|
||||||
Secret Accessor** (`roles/secretmanager.secretAccessor`) IAM role.
|
Secret Accessor** (`roles/secretmanager.secretAccessor`) IAM role.
|
||||||
|
|
||||||
- **Cloud Run Connections via IAP:** Currently we do not support Cloud Run connections via [IAP](https://docs.cloud.google.com/iap/docs/concepts-overview). Please disable IAP if you are using it.
|
|
||||||
@@ -1,112 +0,0 @@
|
|||||||
---
|
|
||||||
title: "Generate Agent Skills"
|
|
||||||
type: docs
|
|
||||||
weight: 10
|
|
||||||
description: >
|
|
||||||
How to generate agent skills from a toolset.
|
|
||||||
---
|
|
||||||
|
|
||||||
The `skills-generate` command allows you to convert a **toolset** into an **Agent Skill**. A toolset is a collection of tools, and the generated skill will contain metadata and execution scripts for all tools within that toolset, complying with the [Agent Skill specification](https://agentskills.io/specification).
|
|
||||||
|
|
||||||
## Before you begin
|
|
||||||
|
|
||||||
1. Make sure you have the `toolbox` executable in your PATH.
|
|
||||||
2. Make sure you have [Node.js](https://nodejs.org/) installed on your system.
|
|
||||||
|
|
||||||
## Generating a Skill from a Toolset
|
|
||||||
|
|
||||||
A skill package consists of a `SKILL.md` file (with required YAML frontmatter) and a set of Node.js scripts. Each tool defined in your toolset maps to a corresponding script in the generated Node.js scripts (`.js`) that work across different platforms (Linux, macOS, Windows).
|
|
||||||
|
|
||||||
|
|
||||||
### Command Usage
|
|
||||||
|
|
||||||
The basic syntax for the command is:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
toolbox <tool-source> skills-generate \
|
|
||||||
--name <skill-name> \
|
|
||||||
--toolset <toolset-name> \
|
|
||||||
--description <description> \
|
|
||||||
--output-dir <output-directory>
|
|
||||||
```
|
|
||||||
|
|
||||||
- `<tool-source>`: Can be `--tools-file`, `--tools-files`, `--tools-folder`, and `--prebuilt`. See the [CLI Reference](../reference/cli.md) for details.
|
|
||||||
- `--name`: Name of the generated skill.
|
|
||||||
- `--description`: Description of the generated skill.
|
|
||||||
- `--toolset`: (Optional) Name of the toolset to convert into a skill. If not provided, all tools will be included.
|
|
||||||
- `--output-dir`: (Optional) Directory to output generated skills (default: "skills").
|
|
||||||
|
|
||||||
{{< notice note >}}
|
|
||||||
**Note:** The `<skill-name>` must follow the Agent Skill [naming convention](https://agentskills.io/specification): it must contain only lowercase alphanumeric characters and hyphens, cannot start or end with a hyphen, and cannot contain consecutive hyphens (e.g., `my-skill`, `data-processing`).
|
|
||||||
{{< /notice >}}
|
|
||||||
|
|
||||||
### Example: Custom Tools File
|
|
||||||
|
|
||||||
1. Create a `tools.yaml` file with a toolset and some tools:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
tools:
|
|
||||||
tool_a:
|
|
||||||
description: "First tool"
|
|
||||||
run:
|
|
||||||
command: "echo 'Tool A'"
|
|
||||||
tool_b:
|
|
||||||
description: "Second tool"
|
|
||||||
run:
|
|
||||||
command: "echo 'Tool B'"
|
|
||||||
toolsets:
|
|
||||||
my_toolset:
|
|
||||||
tools:
|
|
||||||
- tool_a
|
|
||||||
- tool_b
|
|
||||||
```
|
|
||||||
|
|
||||||
2. Generate the skill:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
toolbox --tools-file tools.yaml skills-generate \
|
|
||||||
--name "my-skill" \
|
|
||||||
--toolset "my_toolset" \
|
|
||||||
--description "A skill containing multiple tools" \
|
|
||||||
--output-dir "generated-skills"
|
|
||||||
```
|
|
||||||
|
|
||||||
3. The generated skill directory structure:
|
|
||||||
|
|
||||||
```text
|
|
||||||
generated-skills/
|
|
||||||
└── my-skill/
|
|
||||||
├── SKILL.md
|
|
||||||
├── assets/
|
|
||||||
│ ├── tool_a.yaml
|
|
||||||
│ └── tool_b.yaml
|
|
||||||
└── scripts/
|
|
||||||
├── tool_a.js
|
|
||||||
└── tool_b.js
|
|
||||||
```
|
|
||||||
|
|
||||||
In this example, the skill contains two Node.js scripts (`tool_a.js` and `tool_b.js`), each mapping to a tool in the original toolset.
|
|
||||||
|
|
||||||
### Example: Prebuilt Configuration
|
|
||||||
|
|
||||||
You can also generate skills from prebuilt toolsets:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
toolbox --prebuilt alloydb-postgres-admin skills-generate \
|
|
||||||
--name "alloydb-postgres-admin" \
|
|
||||||
--description "skill for performing administrative operations on alloydb"
|
|
||||||
```
|
|
||||||
|
|
||||||
## Installing the Generated Skill in Gemini CLI
|
|
||||||
|
|
||||||
Once you have generated a skill, you can install it into the Gemini CLI using the `gemini skills install` command.
|
|
||||||
|
|
||||||
### Installation Command
|
|
||||||
|
|
||||||
Provide the path to the directory containing the generated skill:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
gemini skills install /path/to/generated-skills/my-skill
|
|
||||||
```
|
|
||||||
|
|
||||||
Alternatively, use ~/.gemini/skills as the `--output-dir` to generate the skill straight to the Gemini CLI.
|
|
||||||
@@ -1,75 +0,0 @@
|
|||||||
---
|
|
||||||
title: "Invoke Tools via CLI"
|
|
||||||
type: docs
|
|
||||||
weight: 10
|
|
||||||
description: >
|
|
||||||
Learn how to invoke your tools directly from the command line using the `invoke` command.
|
|
||||||
---
|
|
||||||
|
|
||||||
The `invoke` command allows you to invoke tools defined in your configuration directly from the CLI. This is useful for:
|
|
||||||
|
|
||||||
- **Ephemeral Invocation:** Executing a tool without spinning up a full MCP server/client.
|
|
||||||
- **Debugging:** Isolating tool execution logic and testing with various parameter combinations.
|
|
||||||
|
|
||||||
{{< notice tip >}}
|
|
||||||
**Keep configurations minimal:** The `invoke` command initializes *all* resources (sources, tools, etc.) defined in your configuration files during execution. To ensure fast response times, consider using a minimal configuration file containing only the tools you need for the specific invocation.
|
|
||||||
{{< /notice >}}
|
|
||||||
|
|
||||||
## Before you begin
|
|
||||||
|
|
||||||
1. Make sure you have the `toolbox` binary installed or built.
|
|
||||||
2. Make sure you have a valid tool configuration file (e.g., `tools.yaml`).
|
|
||||||
|
|
||||||
### Command Usage
|
|
||||||
|
|
||||||
The basic syntax for the command is:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
toolbox <tool-source> invoke <tool-name> [params]
|
|
||||||
```
|
|
||||||
|
|
||||||
- `<tool-source>`: Can be `--tools-file`, `--tools-files`, `--tools-folder`, and `--prebuilt`. See the [CLI Reference](../reference/cli.md) for details.
|
|
||||||
- `<tool-name>`: The name of the tool you want to call. This must match the name defined in your `tools.yaml`.
|
|
||||||
- `[params]`: (Optional) A JSON string representing the arguments for the tool.
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
### 1. Calling a Tool without Parameters
|
|
||||||
|
|
||||||
If your tool takes no parameters, simply provide the tool name:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
toolbox --tools-file tools.yaml invoke my-simple-tool
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Calling a Tool with Parameters
|
|
||||||
|
|
||||||
For tools that require arguments, pass them as a JSON string. Ensure you escape quotes correctly for your shell.
|
|
||||||
|
|
||||||
**Example: A tool that takes parameters**
|
|
||||||
|
|
||||||
Assuming a tool named `mytool` taking `a` and `b`:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
toolbox --tools-file tools.yaml invoke mytool '{"a": 10, "b": 20}'
|
|
||||||
```
|
|
||||||
|
|
||||||
**Example: A tool that queries a database**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
toolbox --tools-file tools.yaml invoke db-query '{"sql": "SELECT * FROM users LIMIT 5"}'
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Using Prebuilt Configurations
|
|
||||||
|
|
||||||
You can also use the `--prebuilt` flag to load prebuilt toolsets.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
toolbox --prebuilt cloudsql-postgres invoke cloudsql-postgres-list-instances
|
|
||||||
```
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
- **Tool not found:** Ensure the `<tool-name>` matches exactly what is in your YAML file and that the file is correctly loaded via `--tools-file`.
|
|
||||||
- **Invalid parameters:** Double-check your JSON syntax. The error message will usually indicate if the JSON parsing failed or if the parameters didn't match the tool's schema.
|
|
||||||
- **Auth errors:** The `invoke` command currently does not support flows requiring client-side authorization (like OAuth flow initiation via the CLI). It works best for tools using service-side authentication (e.g., Application Default Credentials).
|
|
||||||
@@ -16,7 +16,7 @@ description: >
|
|||||||
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
|
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
|
||||||
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
|
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
|
||||||
| `-p` | `--port` | Port the server will listen on. | `5000` |
|
| `-p` | `--port` | Port the server will listen on. | `5000` |
|
||||||
| | `--prebuilt` | Use one or more prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
|
| | `--prebuilt` | Use a prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
|
||||||
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
|
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
|
||||||
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
|
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
|
||||||
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
|
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
|
||||||
@@ -27,53 +27,8 @@ description: >
|
|||||||
| | `--ui` | Launches the Toolbox UI web server. | |
|
| | `--ui` | Launches the Toolbox UI web server. | |
|
||||||
| | `--allowed-origins` | Specifies a list of origins permitted to access this server for CORs access. | `*` |
|
| | `--allowed-origins` | Specifies a list of origins permitted to access this server for CORs access. | `*` |
|
||||||
| | `--allowed-hosts` | Specifies a list of hosts permitted to access this server to prevent DNS rebinding attacks. | `*` |
|
| | `--allowed-hosts` | Specifies a list of hosts permitted to access this server to prevent DNS rebinding attacks. | `*` |
|
||||||
| | `--user-agent-metadata` | Appends additional metadata to the User-Agent. | |
|
|
||||||
| `-v` | `--version` | version for toolbox | |
|
| `-v` | `--version` | version for toolbox | |
|
||||||
|
|
||||||
## Sub Commands
|
|
||||||
|
|
||||||
<details>
|
|
||||||
<summary><code>invoke</code></summary>
|
|
||||||
|
|
||||||
Executes a tool directly with the provided parameters. This is useful for testing tool configurations and parameters without needing a full client setup.
|
|
||||||
|
|
||||||
**Syntax:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
toolbox invoke <tool-name> [params]
|
|
||||||
```
|
|
||||||
|
|
||||||
**Arguments:**
|
|
||||||
|
|
||||||
- `tool-name`: The name of the tool to execute (as defined in your configuration).
|
|
||||||
- `params`: (Optional) A JSON string containing the parameters for the tool.
|
|
||||||
|
|
||||||
For more detailed instructions, see [Invoke Tools via CLI](../how-to/invoke_tool.md).
|
|
||||||
|
|
||||||
</details>
|
|
||||||
|
|
||||||
<details>
|
|
||||||
<summary><code>skills-generate</code></summary>
|
|
||||||
|
|
||||||
Generates a skill package from a specified toolset. Each tool in the toolset will have a corresponding Node.js execution script in the generated skill.
|
|
||||||
|
|
||||||
**Syntax:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
toolbox skills-generate --name <name> --description <description> --toolset <toolset> --output-dir <output>
|
|
||||||
```
|
|
||||||
|
|
||||||
**Flags:**
|
|
||||||
|
|
||||||
- `--name`: Name of the generated skill.
|
|
||||||
- `--description`: Description of the generated skill.
|
|
||||||
- `--toolset`: (Optional) Name of the toolset to convert into a skill. If not provided, all tools will be included.
|
|
||||||
- `--output-dir`: (Optional) Directory to output generated skills (default: "skills").
|
|
||||||
|
|
||||||
For more detailed instructions, see [Generate Agent Skills](../how-to/generate_skill.md).
|
|
||||||
|
|
||||||
</details>
|
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
### Transport Configuration
|
### Transport Configuration
|
||||||
@@ -95,11 +50,6 @@ For more detailed instructions, see [Generate Agent Skills](../how-to/generate_s
|
|||||||
|
|
||||||
# Server with prebuilt + custom tools configurations
|
# Server with prebuilt + custom tools configurations
|
||||||
./toolbox --tools-file tools.yaml --prebuilt alloydb-postgres
|
./toolbox --tools-file tools.yaml --prebuilt alloydb-postgres
|
||||||
|
|
||||||
# Server with multiple prebuilt tools configurations
|
|
||||||
./toolbox --prebuilt alloydb-postgres,alloydb-postgres-admin
|
|
||||||
# OR
|
|
||||||
./toolbox --prebuilt alloydb-postgres --prebuilt alloydb-postgres-admin
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Tool Configuration Sources
|
### Tool Configuration Sources
|
||||||
@@ -120,7 +70,7 @@ The CLI supports multiple mutually exclusive ways to specify tool configurations
|
|||||||
|
|
||||||
**Prebuilt Configurations:**
|
**Prebuilt Configurations:**
|
||||||
|
|
||||||
- `--prebuilt`: Use one or more predefined configurations for specific database types (e.g.,
|
- `--prebuilt`: Use predefined configurations for specific database types (e.g.,
|
||||||
'bigquery', 'postgres', 'spanner'). See [Prebuilt Tools
|
'bigquery', 'postgres', 'spanner'). See [Prebuilt Tools
|
||||||
Reference](prebuilt-tools.md) for allowed values.
|
Reference](prebuilt-tools.md) for allowed values.
|
||||||
|
|
||||||
|
|||||||
@@ -16,9 +16,6 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
|||||||
{{< notice tip >}}
|
{{< notice tip >}}
|
||||||
You can now use `--prebuilt` along `--tools-file`, `--tools-files`, or
|
You can now use `--prebuilt` along `--tools-file`, `--tools-files`, or
|
||||||
`--tools-folder` to combine prebuilt configs with custom tools.
|
`--tools-folder` to combine prebuilt configs with custom tools.
|
||||||
|
|
||||||
You can also combine multiple prebuilt configs.
|
|
||||||
|
|
||||||
See [Usage Examples](../reference/cli.md#examples).
|
See [Usage Examples](../reference/cli.md#examples).
|
||||||
{{< /notice >}}
|
{{< /notice >}}
|
||||||
|
|
||||||
@@ -100,43 +97,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
(timeseries metrics) for queries running in an AlloyDB instance using a
|
(timeseries metrics) for queries running in an AlloyDB instance using a
|
||||||
PromQL query.
|
PromQL query.
|
||||||
|
|
||||||
## AlloyDB Omni
|
|
||||||
|
|
||||||
* `--prebuilt` value: `alloydb-omni`
|
|
||||||
* **Environment Variables:**
|
|
||||||
* `ALLOYDB_OMNI_HOST`: (Optional) The hostname or IP address (Default: localhost).
|
|
||||||
* `ALLOYDB_OMNI_PORT`: (Optional) The port number (Default: 5432).
|
|
||||||
* `ALLOYDB_OMNI_DATABASE`: The name of the database to connect to.
|
|
||||||
* `ALLOYDB_OMNI_USER`: The database username.
|
|
||||||
* `ALLOYDB_OMNI_PASSWORD`: (Optional) The password for the database user.
|
|
||||||
* `ALLOYDB_OMNI_QUERY_PARAMS`: (Optional) Connection query parameters.
|
|
||||||
* **Tools:**
|
|
||||||
* `execute_sql`: Executes a SQL query.
|
|
||||||
* `list_tables`: Lists tables in the database.
|
|
||||||
* `list_autovacuum_configurations`: Lists autovacuum configurations in the
|
|
||||||
database.
|
|
||||||
* `list_columnar_configurations`: List AlloyDB Omni columnar-related configurations.
|
|
||||||
* `list_columnar_recommended_columns`: Lists columns that AlloyDB Omni recommends adding to the columnar engine.
|
|
||||||
* `list_memory_configurations`: Lists memory-related configurations in the
|
|
||||||
database.
|
|
||||||
* `list_top_bloated_tables`: List top bloated tables in the database.
|
|
||||||
* `list_replication_slots`: Lists replication slots in the database.
|
|
||||||
* `list_invalid_indexes`: Lists invalid indexes in the database.
|
|
||||||
* `get_query_plan`: Generate the execution plan of a statement.
|
|
||||||
* `list_views`: Lists views in the database from pg_views with a default
|
|
||||||
limit of 50 rows. Returns schemaname, viewname and the ownername.
|
|
||||||
* `list_schemas`: Lists schemas in the database.
|
|
||||||
* `database_overview`: Fetches the current state of the PostgreSQL server.
|
|
||||||
* `list_triggers`: Lists triggers in the database.
|
|
||||||
* `list_indexes`: List available user indexes in a PostgreSQL database.
|
|
||||||
* `list_sequences`: List sequences in a PostgreSQL database.
|
|
||||||
* `list_publication_tables`: List publication tables in a PostgreSQL database.
|
|
||||||
* `list_tablespaces`: Lists tablespaces in the database.
|
|
||||||
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
|
|
||||||
* `list_database_stats`: Lists the key performance and activity statistics for
|
|
||||||
each database in the AlloyDB instance.
|
|
||||||
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
|
|
||||||
|
|
||||||
## BigQuery
|
## BigQuery
|
||||||
|
|
||||||
* `--prebuilt` value: `bigquery`
|
* `--prebuilt` value: `bigquery`
|
||||||
@@ -234,7 +194,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
* `create_instance`
|
* `create_instance`
|
||||||
* `create_user`
|
* `create_user`
|
||||||
* `clone_instance`
|
* `clone_instance`
|
||||||
* `restore_backup`
|
|
||||||
|
|
||||||
* **Tools:**
|
* **Tools:**
|
||||||
* `create_instance`: Creates a new Cloud SQL for MySQL instance.
|
* `create_instance`: Creates a new Cloud SQL for MySQL instance.
|
||||||
@@ -246,7 +205,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for MySQL instance.
|
* `clone_instance`: Creates a clone for an existing Cloud SQL for MySQL instance.
|
||||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||||
* `restore_backup`: Restores a backup of a Cloud SQL instance.
|
|
||||||
|
|
||||||
## Cloud SQL for PostgreSQL
|
## Cloud SQL for PostgreSQL
|
||||||
|
|
||||||
@@ -326,7 +284,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
* `create_instance`
|
* `create_instance`
|
||||||
* `create_user`
|
* `create_user`
|
||||||
* `clone_instance`
|
* `clone_instance`
|
||||||
* `restore_backup`
|
|
||||||
* **Tools:**
|
* **Tools:**
|
||||||
* `create_instance`: Creates a new Cloud SQL for PostgreSQL instance.
|
* `create_instance`: Creates a new Cloud SQL for PostgreSQL instance.
|
||||||
* `get_instance`: Gets information about a Cloud SQL instance.
|
* `get_instance`: Gets information about a Cloud SQL instance.
|
||||||
@@ -337,7 +294,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for PostgreSQL instance.
|
* `clone_instance`: Creates a clone for an existing Cloud SQL for PostgreSQL instance.
|
||||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||||
* `restore_backup`: Restores a backup of a Cloud SQL instance.
|
|
||||||
|
|
||||||
## Cloud SQL for SQL Server
|
## Cloud SQL for SQL Server
|
||||||
|
|
||||||
@@ -391,7 +347,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
* `create_instance`
|
* `create_instance`
|
||||||
* `create_user`
|
* `create_user`
|
||||||
* `clone_instance`
|
* `clone_instance`
|
||||||
* `restore_backup`
|
|
||||||
* **Tools:**
|
* **Tools:**
|
||||||
* `create_instance`: Creates a new Cloud SQL for SQL Server instance.
|
* `create_instance`: Creates a new Cloud SQL for SQL Server instance.
|
||||||
* `get_instance`: Gets information about a Cloud SQL instance.
|
* `get_instance`: Gets information about a Cloud SQL instance.
|
||||||
@@ -402,7 +357,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for SQL Server instance.
|
* `clone_instance`: Creates a clone for an existing Cloud SQL for SQL Server instance.
|
||||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||||
* `restore_backup`: Restores a backup of a Cloud SQL instance.
|
|
||||||
|
|
||||||
## Dataplex
|
## Dataplex
|
||||||
|
|
||||||
@@ -414,10 +368,10 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
entries.
|
entries.
|
||||||
* **Dataplex Editor** (`roles/dataplex.editor`) to modify entries.
|
* **Dataplex Editor** (`roles/dataplex.editor`) to modify entries.
|
||||||
* **Tools:**
|
* **Tools:**
|
||||||
* `search_entries`: Searches for entries in Dataplex Catalog.
|
* `dataplex_search_entries`: Searches for entries in Dataplex Catalog.
|
||||||
* `lookup_entry`: Retrieves a specific entry from Dataplex
|
* `dataplex_lookup_entry`: Retrieves a specific entry from Dataplex
|
||||||
Catalog.
|
Catalog.
|
||||||
* `search_aspect_types`: Finds aspect types relevant to the
|
* `dataplex_search_aspect_types`: Finds aspect types relevant to the
|
||||||
query.
|
query.
|
||||||
|
|
||||||
## Firestore
|
## Firestore
|
||||||
@@ -488,7 +442,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
* `create_project_file`: Create a new LookML file.
|
* `create_project_file`: Create a new LookML file.
|
||||||
* `update_project_file`: Update an existing LookML file.
|
* `update_project_file`: Update an existing LookML file.
|
||||||
* `delete_project_file`: Delete a LookML file.
|
* `delete_project_file`: Delete a LookML file.
|
||||||
* `validate_project`: Check the syntax of a LookML project.
|
|
||||||
* `get_connections`: Get the available connections in a Looker instance.
|
* `get_connections`: Get the available connections in a Looker instance.
|
||||||
* `get_connection_schemas`: Get the available schemas in a connection.
|
* `get_connection_schemas`: Get the available schemas in a connection.
|
||||||
* `get_connection_databases`: Get the available databases in a connection.
|
* `get_connection_databases`: Get the available databases in a connection.
|
||||||
|
|||||||
@@ -28,18 +28,16 @@ The following configurations are placed at the top level of a `tools.yaml` file.
|
|||||||
{{< notice tip >}}
|
{{< notice tip >}}
|
||||||
If you are accessing Toolbox with multiple applications, each
|
If you are accessing Toolbox with multiple applications, each
|
||||||
application should register their own Client ID even if they use the same
|
application should register their own Client ID even if they use the same
|
||||||
"type" of auth provider.
|
"kind" of auth provider.
|
||||||
{{< /notice >}}
|
{{< /notice >}}
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: authServices
|
authServices:
|
||||||
name: my_auth_app_1
|
my_auth_app_1:
|
||||||
type: google
|
kind: google
|
||||||
clientId: ${YOUR_CLIENT_ID_1}
|
clientId: ${YOUR_CLIENT_ID_1}
|
||||||
---
|
my_auth_app_2:
|
||||||
kind: authServices
|
kind: google
|
||||||
name: my_auth_app_2
|
|
||||||
type: google
|
|
||||||
clientId: ${YOUR_CLIENT_ID_2}
|
clientId: ${YOUR_CLIENT_ID_2}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@@ -40,9 +40,9 @@ id-token][provided-claims] can be used for the parameter.
|
|||||||
## Example
|
## Example
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: authServices
|
authServices:
|
||||||
name: my-google-auth
|
my-google-auth:
|
||||||
type: google
|
kind: google
|
||||||
clientId: ${YOUR_GOOGLE_CLIENT_ID}
|
clientId: ${YOUR_GOOGLE_CLIENT_ID}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -55,5 +55,5 @@ instead of hardcoding your secrets into the configuration file.
|
|||||||
|
|
||||||
| **field** | **type** | **required** | **description** |
|
| **field** | **type** | **required** | **description** |
|
||||||
|-----------|:--------:|:------------:|------------------------------------------------------------------|
|
|-----------|:--------:|:------------:|------------------------------------------------------------------|
|
||||||
| type | string | true | Must be "google". |
|
| kind | string | true | Must be "google". |
|
||||||
| clientId | string | true | Client ID of your application from registering your application. |
|
| clientId | string | true | Client ID of your application from registering your application. |
|
||||||
|
|||||||
@@ -3,14 +3,13 @@ title: "EmbeddingModels"
|
|||||||
type: docs
|
type: docs
|
||||||
weight: 2
|
weight: 2
|
||||||
description: >
|
description: >
|
||||||
EmbeddingModels represent services that transform text into vector embeddings
|
EmbeddingModels represent services that transform text into vector embeddings for semantic search.
|
||||||
for semantic search.
|
|
||||||
---
|
---
|
||||||
|
|
||||||
EmbeddingModels represent services that generate vector representations of text
|
EmbeddingModels represent services that generate vector representations of text
|
||||||
data. In the MCP Toolbox, these models enable **Semantic Queries**, allowing
|
data. In the MCP Toolbox, these models enable **Semantic Queries**,
|
||||||
[Tools](../tools/) to automatically convert human-readable text into numerical
|
allowing [Tools](../tools/) to automatically convert human-readable text into
|
||||||
vectors before using them in a query.
|
numerical vectors before using them in a query.
|
||||||
|
|
||||||
This is primarily used in two scenarios:
|
This is primarily used in two scenarios:
|
||||||
|
|
||||||
@@ -20,33 +19,14 @@ This is primarily used in two scenarios:
|
|||||||
- **Semantic Search**: Converting a natural language query into a vector to
|
- **Semantic Search**: Converting a natural language query into a vector to
|
||||||
perform similarity searches.
|
perform similarity searches.
|
||||||
|
|
||||||
## Hidden Parameter Duplication (valueFromParam)
|
|
||||||
|
|
||||||
When building tools for vector ingestion, you often need the same input string
|
|
||||||
twice:
|
|
||||||
|
|
||||||
1. To store the original text in a TEXT column.
|
|
||||||
1. To generate the vector embedding for a VECTOR column.
|
|
||||||
|
|
||||||
Requesting an Agent (LLM) to output the exact same string twice is inefficient
|
|
||||||
and error-prone. The `valueFromParam` field solves this by allowing a parameter
|
|
||||||
to inherit its value from another parameter in the same tool.
|
|
||||||
|
|
||||||
### Key Behaviors
|
|
||||||
|
|
||||||
1. Hidden from Manifest: Parameters with valueFromParam set are excluded from
|
|
||||||
the tool definition sent to the Agent. The Agent does not know this parameter
|
|
||||||
exists.
|
|
||||||
1. Auto-Filled: When the tool is executed, the Toolbox automatically copies the
|
|
||||||
value from the referenced parameter before processing embeddings.
|
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
||||||
The following configuration defines an embedding model and applies it to
|
The following configuration defines an embedding model and applies it to
|
||||||
specific tool parameters.
|
specific tool parameters.
|
||||||
|
|
||||||
{{< notice tip >}} Use environment variable replacement with the format
|
{{< notice tip >}}
|
||||||
${ENV_NAME} instead of hardcoding your API keys into the configuration file.
|
Use environment variable replacement with the format ${ENV_NAME}
|
||||||
|
instead of hardcoding your API keys into the configuration file.
|
||||||
{{< /notice >}}
|
{{< /notice >}}
|
||||||
|
|
||||||
### Step 1 - Define an Embedding Model
|
### Step 1 - Define an Embedding Model
|
||||||
@@ -54,24 +34,26 @@ ${ENV_NAME} instead of hardcoding your API keys into the configuration file.
|
|||||||
Define an embedding model in the `embeddingModels` section:
|
Define an embedding model in the `embeddingModels` section:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: embeddingModels
|
embeddingModels:
|
||||||
name: gemini-model # Name of the embedding model
|
gemini-model: # Name of the embedding model
|
||||||
type: gemini
|
kind: gemini
|
||||||
model: gemini-embedding-001
|
model: gemini-embedding-001
|
||||||
apiKey: ${GOOGLE_API_KEY}
|
apiKey: ${GOOGLE_API_KEY}
|
||||||
dimension: 768
|
dimension: 768
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Step 2 - Embed Tool Parameters
|
### Step 2 - Embed Tool Parameters
|
||||||
|
|
||||||
Use the defined embedding model, embed your query parameters using the
|
Use the defined embedding model, embed your query parameters using the
|
||||||
`embeddedBy` field. Only string-typed parameters can be embedded:
|
`embeddedBy` field. Only string-typed
|
||||||
|
parameters can be embedded:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
|
tools:
|
||||||
# Vector ingestion tool
|
# Vector ingestion tool
|
||||||
kind: tools
|
insert_embedding:
|
||||||
name: insert_embedding
|
kind: postgres-sql
|
||||||
type: postgres-sql
|
|
||||||
source: my-pg-instance
|
source: my-pg-instance
|
||||||
statement: |
|
statement: |
|
||||||
INSERT INTO documents (content, embedding)
|
INSERT INTO documents (content, embedding)
|
||||||
@@ -79,18 +61,14 @@ statement: |
|
|||||||
parameters:
|
parameters:
|
||||||
- name: content
|
- name: content
|
||||||
type: string
|
type: string
|
||||||
description: The raw text content to be stored in the database.
|
|
||||||
- name: vector_string
|
- name: vector_string
|
||||||
type: string
|
type: string
|
||||||
# This parameter is hidden from the LLM.
|
description: The text to be vectorized and stored.
|
||||||
# It automatically copies the value from 'content' and embeds it.
|
embeddedBy: gemini-model # refers to the name of a defined embedding model
|
||||||
valueFromParam: content
|
|
||||||
embeddedBy: gemini-model
|
|
||||||
---
|
|
||||||
# Semantic search tool
|
# Semantic search tool
|
||||||
kind: tools
|
search_embedding:
|
||||||
name: search_embedding
|
kind: postgres-sql
|
||||||
type: postgres-sql
|
|
||||||
source: my-pg-instance
|
source: my-pg-instance
|
||||||
statement: |
|
statement: |
|
||||||
SELECT id, content, embedding <-> $1 AS distance
|
SELECT id, content, embedding <-> $1 AS distance
|
||||||
|
|||||||
@@ -50,9 +50,9 @@ information.
|
|||||||
## Example
|
## Example
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: embeddingModels
|
embeddingModels:
|
||||||
name: gemini-model
|
gemini-model:
|
||||||
type: gemini
|
kind: gemini
|
||||||
model: gemini-embedding-001
|
model: gemini-embedding-001
|
||||||
apiKey: ${GOOGLE_API_KEY}
|
apiKey: ${GOOGLE_API_KEY}
|
||||||
dimension: 768
|
dimension: 768
|
||||||
@@ -67,7 +67,7 @@ instead of hardcoding your secrets into the configuration file.
|
|||||||
|
|
||||||
| **field** | **type** | **required** | **description** |
|
| **field** | **type** | **required** | **description** |
|
||||||
|-----------|:--------:|:------------:|--------------------------------------------------------------|
|
|-----------|:--------:|:------------:|--------------------------------------------------------------|
|
||||||
| type | string | true | Must be `gemini`. |
|
| kind | string | true | Must be `gemini`. |
|
||||||
| model | string | true | The Gemini model ID to use (e.g., `gemini-embedding-001`). |
|
| model | string | true | The Gemini model ID to use (e.g., `gemini-embedding-001`). |
|
||||||
| apiKey | string | false | Your API Key from Google AI Studio. |
|
| apiKey | string | false | Your API Key from Google AI Studio. |
|
||||||
| dimension | integer | false | The number of dimensions in the output vector (e.g., `768`). |
|
| dimension | integer | false | The number of dimensions in the output vector (e.g., `768`). |
|
||||||
|
|||||||
@@ -16,8 +16,8 @@ can be sent to a Large Language Model (LLM). The Toolbox server implements the
|
|||||||
specification, allowing clients to discover and retrieve these prompts.
|
specification, allowing clients to discover and retrieve these prompts.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: prompts
|
prompts:
|
||||||
name: code_review
|
code_review:
|
||||||
description: "Asks the LLM to analyze code quality and suggest improvements."
|
description: "Asks the LLM to analyze code quality and suggest improvements."
|
||||||
messages:
|
messages:
|
||||||
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
||||||
@@ -31,7 +31,7 @@ arguments:
|
|||||||
| **field** | **type** | **required** | **description** |
|
| **field** | **type** | **required** | **description** |
|
||||||
|-------------|--------------------------------|--------------|--------------------------------------------------------------------------|
|
|-------------|--------------------------------|--------------|--------------------------------------------------------------------------|
|
||||||
| description | string | No | A brief explanation of what the prompt does. |
|
| description | string | No | A brief explanation of what the prompt does. |
|
||||||
| type | string | No | The type of prompt. Defaults to `"custom"`. |
|
| kind | string | No | The kind of prompt. Defaults to `"custom"`. |
|
||||||
| messages | [][Message](#message-schema) | Yes | A list of one or more message objects that make up the prompt's content. |
|
| messages | [][Message](#message-schema) | Yes | A list of one or more message objects that make up the prompt's content. |
|
||||||
| arguments | [][Argument](#argument-schema) | No | A list of arguments that can be interpolated into the prompt's content. |
|
| arguments | [][Argument](#argument-schema) | No | A list of arguments that can be interpolated into the prompt's content. |
|
||||||
|
|
||||||
|
|||||||
@@ -17,8 +17,8 @@ Here is an example of a simple prompt that takes a single argument, code, and
|
|||||||
asks an LLM to review it.
|
asks an LLM to review it.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: prompts
|
prompts:
|
||||||
name: code_review
|
code_review:
|
||||||
description: "Asks the LLM to analyze code quality and suggest improvements."
|
description: "Asks the LLM to analyze code quality and suggest improvements."
|
||||||
messages:
|
messages:
|
||||||
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
||||||
@@ -33,8 +33,8 @@ You can define prompts with multiple messages to set up more complex
|
|||||||
conversational contexts, like a role-playing scenario.
|
conversational contexts, like a role-playing scenario.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: prompts
|
prompts:
|
||||||
name: roleplay_scenario
|
roleplay_scenario:
|
||||||
description: "Sets up a roleplaying scenario with initial messages."
|
description: "Sets up a roleplaying scenario with initial messages."
|
||||||
arguments:
|
arguments:
|
||||||
- name: "character"
|
- name: "character"
|
||||||
@@ -54,7 +54,7 @@ messages:
|
|||||||
|
|
||||||
| **field** | **type** | **required** | **description** |
|
| **field** | **type** | **required** | **description** |
|
||||||
|-------------|--------------------------------|--------------|--------------------------------------------------------------------------|
|
|-------------|--------------------------------|--------------|--------------------------------------------------------------------------|
|
||||||
| type | string | No | The type of prompt. Must be `"custom"`. |
|
| kind | string | No | The kind of prompt. Must be `"custom"`. |
|
||||||
| description | string | No | A brief explanation of what the prompt does. |
|
| description | string | No | A brief explanation of what the prompt does. |
|
||||||
| messages | [][Message](#message-schema) | Yes | A list of one or more message objects that make up the prompt's content. |
|
| messages | [][Message](#message-schema) | Yes | A list of one or more message objects that make up the prompt's content. |
|
||||||
| arguments | [][Argument](#argument-schema) | No | A list of arguments that can be interpolated into the prompt's content. |
|
| arguments | [][Argument](#argument-schema) | No | A list of arguments that can be interpolated into the prompt's content. |
|
||||||
|
|||||||
@@ -17,9 +17,9 @@ instead of hardcoding your secrets into the configuration file.
|
|||||||
{{< /notice >}}
|
{{< /notice >}}
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: sources
|
sources:
|
||||||
name: my-cloud-sql-source
|
my-cloud-sql-source:
|
||||||
type: cloud-sql-postgres
|
kind: cloud-sql-postgres
|
||||||
project: my-project-id
|
project: my-project-id
|
||||||
region: us-central1
|
region: us-central1
|
||||||
instance: my-instance-name
|
instance: my-instance-name
|
||||||
|
|||||||
@@ -25,13 +25,12 @@ Authentication can be handled in two ways:
|
|||||||
## Example
|
## Example
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: sources
|
sources:
|
||||||
name: my-alloydb-admin
|
my-alloydb-admin:
|
||||||
type: alloydb-admin
|
kind: alloy-admin
|
||||||
---
|
|
||||||
kind: sources
|
my-oauth-alloydb-admin:
|
||||||
name: my-oauth-alloydb-admin
|
kind: alloydb-admin
|
||||||
type: alloydb-admin
|
|
||||||
useClientOAuth: true
|
useClientOAuth: true
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -39,6 +38,6 @@ useClientOAuth: true
|
|||||||
|
|
||||||
| **field** | **type** | **required** | **description** |
|
| **field** | **type** | **required** | **description** |
|
||||||
| -------------- | :------: | :----------: | ---------------------------------------------------------------------------------------------------------------------------------------------- |
|
| -------------- | :------: | :----------: | ---------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||||
| type | string | true | Must be "alloydb-admin". |
|
| kind | string | true | Must be "alloydb-admin". |
|
||||||
| defaultProject | string | false | The Google Cloud project ID to use for AlloyDB infrastructure tools. |
|
| defaultProject | string | false | The Google Cloud project ID to use for AlloyDB infrastructure tools. |
|
||||||
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |
|
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |
|
||||||
|
|||||||
@@ -176,9 +176,9 @@ To connect using IAM authentication:
|
|||||||
## Example
|
## Example
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: sources
|
sources:
|
||||||
name: my-alloydb-pg-source
|
my-alloydb-pg-source:
|
||||||
type: alloydb-postgres
|
kind: alloydb-postgres
|
||||||
project: my-project-id
|
project: my-project-id
|
||||||
region: us-central1
|
region: us-central1
|
||||||
cluster: my-cluster
|
cluster: my-cluster
|
||||||
@@ -194,20 +194,11 @@ Use environment variable replacement with the format ${ENV_NAME}
|
|||||||
instead of hardcoding your secrets into the configuration file.
|
instead of hardcoding your secrets into the configuration file.
|
||||||
{{< /notice >}}
|
{{< /notice >}}
|
||||||
|
|
||||||
### Managed Connection Pooling
|
|
||||||
|
|
||||||
Toolbox automatically supports [Managed Connection Pooling][alloydb-mcp]. If your AlloyDB instance has Managed Connection Pooling enabled, the connection will immediately benefit from increased throughput and reduced latency.
|
|
||||||
|
|
||||||
The interface is identical, so there's no additional configuration required on the client. For more information on configuring your instance, see the [AlloyDB Managed Connection Pooling documentation][alloydb-mcp-docs].
|
|
||||||
|
|
||||||
[alloydb-mcp]: https://cloud.google.com/blog/products/databases/alloydb-managed-connection-pooling
|
|
||||||
[alloydb-mcp-docs]: https://cloud.google.com/alloydb/docs/configure-managed-connection-pooling
|
|
||||||
|
|
||||||
## Reference
|
## Reference
|
||||||
|
|
||||||
| **field** | **type** | **required** | **description** |
|
| **field** | **type** | **required** | **description** |
|
||||||
|-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------------------|
|
|-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------------------|
|
||||||
| type | string | true | Must be "alloydb-postgres". |
|
| kind | string | true | Must be "alloydb-postgres". |
|
||||||
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
|
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
|
||||||
| region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). |
|
| region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). |
|
||||||
| cluster | string | true | Name of the AlloyDB cluster (e.g. "my-cluster"). |
|
| cluster | string | true | Name of the AlloyDB cluster (e.g. "my-cluster"). |
|
||||||
|
|||||||
@@ -121,9 +121,9 @@ identity used has been granted the correct IAM permissions.
|
|||||||
Initialize a BigQuery source that uses ADC:
|
Initialize a BigQuery source that uses ADC:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: sources
|
sources:
|
||||||
name: my-bigquery-source
|
my-bigquery-source:
|
||||||
type: "bigquery"
|
kind: "bigquery"
|
||||||
project: "my-project-id"
|
project: "my-project-id"
|
||||||
# location: "US" # Optional: Specifies the location for query jobs.
|
# location: "US" # Optional: Specifies the location for query jobs.
|
||||||
# writeMode: "allowed" # One of: allowed, blocked, protected. Defaults to "allowed".
|
# writeMode: "allowed" # One of: allowed, blocked, protected. Defaults to "allowed".
|
||||||
@@ -140,9 +140,9 @@ project: "my-project-id"
|
|||||||
Initialize a BigQuery source that uses the client's access token:
|
Initialize a BigQuery source that uses the client's access token:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: sources
|
sources:
|
||||||
name: my-bigquery-client-auth-source
|
my-bigquery-client-auth-source:
|
||||||
type: "bigquery"
|
kind: "bigquery"
|
||||||
project: "my-project-id"
|
project: "my-project-id"
|
||||||
useClientOAuth: true
|
useClientOAuth: true
|
||||||
# location: "US" # Optional: Specifies the location for query jobs.
|
# location: "US" # Optional: Specifies the location for query jobs.
|
||||||
@@ -161,7 +161,7 @@ useClientOAuth: true
|
|||||||
|
|
||||||
| **field** | **type** | **required** | **description** |
|
| **field** | **type** | **required** | **description** |
|
||||||
|---------------------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
|---------------------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||||
| type | string | true | Must be "bigquery". |
|
| kind | string | true | Must be "bigquery". |
|
||||||
| project | string | true | Id of the Google Cloud project to use for billing and as the default project for BigQuery resources. |
|
| project | string | true | Id of the Google Cloud project to use for billing and as the default project for BigQuery resources. |
|
||||||
| location | string | false | Specifies the location (e.g., 'us', 'asia-northeast1') in which to run the query job. This location must match the location of any tables referenced in the query. Defaults to the table's location or 'US' if the location cannot be determined. [Learn More](https://cloud.google.com/bigquery/docs/locations) |
|
| location | string | false | Specifies the location (e.g., 'us', 'asia-northeast1') in which to run the query job. This location must match the location of any tables referenced in the query. Defaults to the table's location or 'US' if the location cannot be determined. [Learn More](https://cloud.google.com/bigquery/docs/locations) |
|
||||||
| writeMode | string | false | Controls the write behavior for tools. `allowed` (default): All queries are permitted. `blocked`: Only `SELECT` statements are allowed for the `bigquery-execute-sql` tool. `protected`: Enables session-based execution where all tools associated with this source instance share the same [BigQuery session](https://cloud.google.com/bigquery/docs/sessions-intro). This allows for stateful operations using temporary tables (e.g., `CREATE TEMP TABLE`). For `bigquery-execute-sql`, `SELECT` statements can be used on all tables, but write operations are restricted to the session's temporary dataset. For tools like `bigquery-sql`, `bigquery-forecast`, and `bigquery-analyze-contribution`, the `writeMode` restrictions do not apply, but they will operate within the shared session. **Note:** The `protected` mode cannot be used with `useClientOAuth: true`. It is also not recommended for multi-user server environments, as all users would share the same session. A session is terminated automatically after 24 hours of inactivity or after 7 days, whichever comes first. A new session is created on the next request, and any temporary data from the previous session will be lost. |
|
| writeMode | string | false | Controls the write behavior for tools. `allowed` (default): All queries are permitted. `blocked`: Only `SELECT` statements are allowed for the `bigquery-execute-sql` tool. `protected`: Enables session-based execution where all tools associated with this source instance share the same [BigQuery session](https://cloud.google.com/bigquery/docs/sessions-intro). This allows for stateful operations using temporary tables (e.g., `CREATE TEMP TABLE`). For `bigquery-execute-sql`, `SELECT` statements can be used on all tables, but write operations are restricted to the session's temporary dataset. For tools like `bigquery-sql`, `bigquery-forecast`, and `bigquery-analyze-contribution`, the `writeMode` restrictions do not apply, but they will operate within the shared session. **Note:** The `protected` mode cannot be used with `useClientOAuth: true`. It is also not recommended for multi-user server environments, as all users would share the same session. A session is terminated automatically after 24 hours of inactivity or after 7 days, whichever comes first. A new session is created on the next request, and any temporary data from the previous session will be lost. |
|
||||||
|
|||||||
@@ -59,9 +59,9 @@ applying IAM permissions and roles to an identity.
|
|||||||
## Example
|
## Example
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: sources
|
sources:
|
||||||
name: my-bigtable-source
|
my-bigtable-source:
|
||||||
type: "bigtable"
|
kind: "bigtable"
|
||||||
project: "my-project-id"
|
project: "my-project-id"
|
||||||
instance: "test-instance"
|
instance: "test-instance"
|
||||||
```
|
```
|
||||||
@@ -70,6 +70,6 @@ instance: "test-instance"
|
|||||||
|
|
||||||
| **field** | **type** | **required** | **description** |
|
| **field** | **type** | **required** | **description** |
|
||||||
|-----------|:--------:|:------------:|-------------------------------------------------------------------------------|
|
|-----------|:--------:|:------------:|-------------------------------------------------------------------------------|
|
||||||
| type | string | true | Must be "bigtable". |
|
| kind | string | true | Must be "bigtable". |
|
||||||
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
|
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
|
||||||
| instance | string | true | Name of the Bigtable instance. |
|
| instance | string | true | Name of the Bigtable instance. |
|
||||||
|
|||||||
@@ -23,9 +23,9 @@ distributed architectures, and a flexible approach to schema definition.
|
|||||||
## Example
|
## Example
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: sources
|
sources:
|
||||||
name: my-cassandra-source
|
my-cassandra-source:
|
||||||
type: cassandra
|
kind: cassandra
|
||||||
hosts:
|
hosts:
|
||||||
- 127.0.0.1
|
- 127.0.0.1
|
||||||
keyspace: my_keyspace
|
keyspace: my_keyspace
|
||||||
@@ -47,7 +47,7 @@ instead of hardcoding your secrets into the configuration file.
|
|||||||
|
|
||||||
| **field** | **type** | **required** | **description** |
|
| **field** | **type** | **required** | **description** |
|
||||||
|------------------------|:--------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------------------|
|
|------------------------|:--------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||||
| type | string | true | Must be "cassandra". |
|
| kind | string | true | Must be "cassandra". |
|
||||||
| hosts | string[] | true | List of IP addresses to connect to (e.g., ["192.168.1.1:9042", "192.168.1.2:9042","192.168.1.3:9042"]). The default port is 9042 if not specified. |
|
| hosts | string[] | true | List of IP addresses to connect to (e.g., ["192.168.1.1:9042", "192.168.1.2:9042","192.168.1.3:9042"]). The default port is 9042 if not specified. |
|
||||||
| keyspace | string | true | Name of the Cassandra keyspace to connect to (e.g., "my_keyspace"). |
|
| keyspace | string | true | Name of the Cassandra keyspace to connect to (e.g., "my_keyspace"). |
|
||||||
| protoVersion | integer | false | Protocol version for the Cassandra connection (e.g., 4). |
|
| protoVersion | integer | false | Protocol version for the Cassandra connection (e.g., 4). |
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user