diff --git a/.ci/integration.cloudbuild.yaml b/.ci/integration.cloudbuild.yaml index 604be42499..96bbbd8b9b 100644 --- a/.ci/integration.cloudbuild.yaml +++ b/.ci/integration.cloudbuild.yaml @@ -171,6 +171,23 @@ steps: alloydbainl \ alloydbainl + - id: "alloydb-omni" + name: golang:1 + waitFor: ["compile-test-binary"] + entrypoint: /bin/bash + env: + - "GOPATH=/gopath" + volumes: + - name: "go" + path: "/gopath" + args: + - -c + - | + .ci/test_with_coverage.sh \ + "AlloyDB Omni" \ + alloydbomni \ + postgres + - id: "bigtable" name: golang:1 waitFor: ["compile-test-binary"] @@ -295,6 +312,25 @@ steps: cloudhealthcare \ cloudhealthcare + - id: "cloud-logging-admin" + name: golang:1 + waitFor: ["compile-test-binary"] + entrypoint: /bin/bash + env: + - "GOPATH=/gopath" + - "LOGADMIN_PROJECT=$PROJECT_ID" + secretEnv: ["CLIENT_ID"] + volumes: + - name: "go" + path: "/gopath" + args: + - -c + - | + .ci/test_with_coverage.sh \ + "Cloud Logging Admin" \ + cloudloggingadmin \ + cloudloggingadmin + - id: "postgres" name: golang:1 waitFor: ["compile-test-binary"] @@ -318,6 +354,30 @@ steps: postgressql \ postgresexecutesql + - id: "cockroachdb" + name: golang:1 + waitFor: ["compile-test-binary"] + entrypoint: /bin/bash + env: + - "GOPATH=/gopath" + - "COCKROACHDB_DATABASE=$_DATABASE_NAME" + - "COCKROACHDB_PORT=$_COCKROACHDB_PORT" + - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL" + secretEnv: ["COCKROACHDB_USER", "COCKROACHDB_HOST","CLIENT_ID"] + volumes: + - name: "go" + path: "/gopath" + args: + - -c + - | + .ci/test_with_coverage.sh \ + "CockroachDB" \ + cockroachdb \ + cockroachdbsql \ + cockroachdbexecutesql \ + cockroachdblisttables \ + cockroachdblistschemas + - id: "spanner" name: golang:1 waitFor: ["compile-test-binary"] @@ -657,7 +717,7 @@ steps: "Looker" \ looker \ looker - + - id: "mindsdb" name: golang:1 waitFor: ["compile-test-binary"] @@ -845,7 +905,7 @@ steps: "Snowflake" \ snowflake \ snowflake - + - id: "cassandra" name: golang:1 waitFor: ["compile-test-binary"] @@ -887,16 +947,16 @@ steps: tar -C /usr/local -xzf go.tar.gz export PATH="/usr/local/go/bin:$$PATH" - go test -v ./internal/sources/oracle/... \ + go test -v ./tests/oracle/... \ -coverprofile=oracle_coverage.out \ -coverpkg=./internal/sources/oracle/...,./internal/tools/oracle/... - + # Coverage check total_coverage=$(go tool cover -func=oracle_coverage.out | grep "total:" | awk '{print $3}') echo "Oracle total coverage: $total_coverage" coverage_numeric=$(echo "$total_coverage" | sed 's/%//') - if awk -v cov="$coverage_numeric" 'BEGIN {exit !(cov < 20)}'; then - echo "Coverage failure: $total_coverage is below 20%." + if awk -v cov="$coverage_numeric" 'BEGIN {exit !(cov < 60)}'; then + echo "Coverage failure: $total_coverage is below 60%." exit 1 fi @@ -1093,6 +1153,11 @@ availableSecrets: env: MARIADB_HOST - versionName: projects/$PROJECT_ID/secrets/mongodb_uri/versions/latest env: MONGODB_URI + - versionName: projects/$PROJECT_ID/secrets/cockroachdb_user/versions/latest + env: COCKROACHDB_USER + - versionName: projects/$PROJECT_ID/secrets/cockroachdb_host/versions/latest + env: COCKROACHDB_HOST + options: logging: CLOUD_LOGGING_ONLY @@ -1153,6 +1218,9 @@ substitutions: _SINGLESTORE_PORT: "3308" _SINGLESTORE_DATABASE: "singlestore" _SINGLESTORE_USER: "root" + _COCKROACHDB_HOST: 127.0.0.1 + _COCKROACHDB_PORT: "26257" + _COCKROACHDB_USER: "root" _MARIADB_PORT: "3307" _MARIADB_DATABASE: test_database _SNOWFLAKE_DATABASE: "test" diff --git a/.ci/quickstart_test/run_go_tests.sh b/.ci/quickstart_test/run_go_tests.sh deleted file mode 100644 index 43874931b2..0000000000 --- a/.ci/quickstart_test/run_go_tests.sh +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -#!/bin/bash - -set -e - -TABLE_NAME="hotels_go" -QUICKSTART_GO_DIR="docs/en/getting-started/quickstart/go" -SQL_FILE=".ci/quickstart_test/setup_hotels_sample.sql" - -PROXY_PID="" -TOOLBOX_PID="" - -install_system_packages() { - apt-get update && apt-get install -y \ - postgresql-client \ - wget \ - gettext-base \ - netcat-openbsd -} - -start_cloud_sql_proxy() { - wget "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy - chmod +x /usr/local/bin/cloud-sql-proxy - cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" & - PROXY_PID=$! - - for i in {1..30}; do - if nc -z 127.0.0.1 5432; then - echo "Cloud SQL Proxy is up and running." - return - fi - sleep 1 - done - - echo "Cloud SQL Proxy failed to start within the timeout period." - exit 1 -} - -setup_toolbox() { - TOOLBOX_YAML="/tools.yaml" - echo "${TOOLS_YAML_CONTENT}" > "$TOOLBOX_YAML" - if [ ! -f "$TOOLBOX_YAML" ]; then echo "Failed to create tools.yaml"; exit 1; fi - wget "https://storage.googleapis.com/genai-toolbox/v${VERSION}/linux/amd64/toolbox" -O "/toolbox" - chmod +x "/toolbox" - /toolbox --tools-file "$TOOLBOX_YAML" & - TOOLBOX_PID=$! - sleep 2 -} - -setup_orch_table() { - export TABLE_NAME - envsubst < "$SQL_FILE" | psql -h "$PGHOST" -p "$PGPORT" -U "$DB_USER" -d "$DATABASE_NAME" -} - -run_orch_test() { - local orch_dir="$1" - local orch_name - orch_name=$(basename "$orch_dir") - - if [ "$orch_name" == "openAI" ]; then - echo -e "\nSkipping framework '${orch_name}': Temporarily excluded." - return - fi - - ( - set -e - setup_orch_table - - echo "--- Preparing module for $orch_name ---" - cd "$orch_dir" - - if [ -f "go.mod" ]; then - go mod tidy - fi - - cd .. - - export ORCH_NAME="$orch_name" - - echo "--- Running tests for $orch_name ---" - go test -v ./... - ) -} - -cleanup_all() { - echo "--- Final cleanup: Shutting down processes and dropping table ---" - if [ -n "$TOOLBOX_PID" ]; then - kill $TOOLBOX_PID || true - fi - if [ -n "$PROXY_PID" ]; then - kill $PROXY_PID || true - fi -} -trap cleanup_all EXIT - -# Main script execution -install_system_packages -start_cloud_sql_proxy - -export PGHOST=127.0.0.1 -export PGPORT=5432 -export PGPASSWORD="$DB_PASSWORD" -export GOOGLE_API_KEY="$GOOGLE_API_KEY" - -setup_toolbox - -for ORCH_DIR in "$QUICKSTART_GO_DIR"/*/; do - if [ ! -d "$ORCH_DIR" ]; then - continue - fi - run_orch_test "$ORCH_DIR" -done diff --git a/.ci/quickstart_test/run_js_tests.sh b/.ci/quickstart_test/run_js_tests.sh deleted file mode 100644 index 71816542f5..0000000000 --- a/.ci/quickstart_test/run_js_tests.sh +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -#!/bin/bash - -set -e - -TABLE_NAME="hotels_js" -QUICKSTART_JS_DIR="docs/en/getting-started/quickstart/js" -SQL_FILE=".ci/quickstart_test/setup_hotels_sample.sql" - -# Initialize process IDs to empty at the top of the script -PROXY_PID="" -TOOLBOX_PID="" - -install_system_packages() { - apt-get update && apt-get install -y \ - postgresql-client \ - wget \ - gettext-base \ - netcat-openbsd -} - -start_cloud_sql_proxy() { - wget "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy - chmod +x /usr/local/bin/cloud-sql-proxy - cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" & - PROXY_PID=$! - - for i in {1..30}; do - if nc -z 127.0.0.1 5432; then - echo "Cloud SQL Proxy is up and running." - return - fi - sleep 1 - done - - echo "Cloud SQL Proxy failed to start within the timeout period." - exit 1 -} - -setup_toolbox() { - TOOLBOX_YAML="/tools.yaml" - echo "${TOOLS_YAML_CONTENT}" > "$TOOLBOX_YAML" - if [ ! -f "$TOOLBOX_YAML" ]; then echo "Failed to create tools.yaml"; exit 1; fi - wget "https://storage.googleapis.com/genai-toolbox/v${VERSION}/linux/amd64/toolbox" -O "/toolbox" - chmod +x "/toolbox" - /toolbox --tools-file "$TOOLBOX_YAML" & - TOOLBOX_PID=$! - sleep 2 -} - -setup_orch_table() { - export TABLE_NAME - envsubst < "$SQL_FILE" | psql -h "$PGHOST" -p "$PGPORT" -U "$DB_USER" -d "$DATABASE_NAME" -} - -run_orch_test() { - local orch_dir="$1" - local orch_name - orch_name=$(basename "$orch_dir") - - ( - set -e - echo "--- Preparing environment for $orch_name ---" - setup_orch_table - - cd "$orch_dir" - echo "Installing dependencies for $orch_name..." - if [ -f "package-lock.json" ]; then - npm ci - else - npm install - fi - - cd .. - - echo "--- Running tests for $orch_name ---" - export ORCH_NAME="$orch_name" - node --test quickstart.test.js - - echo "--- Cleaning environment for $orch_name ---" - rm -rf "${orch_name}/node_modules" - ) -} - -cleanup_all() { - echo "--- Final cleanup: Shutting down processes and dropping table ---" - if [ -n "$TOOLBOX_PID" ]; then - kill $TOOLBOX_PID || true - fi - if [ -n "$PROXY_PID" ]; then - kill $PROXY_PID || true - fi -} -trap cleanup_all EXIT - -# Main script execution -install_system_packages -start_cloud_sql_proxy - -export PGHOST=127.0.0.1 -export PGPORT=5432 -export PGPASSWORD="$DB_PASSWORD" -export GOOGLE_API_KEY="$GOOGLE_API_KEY" - -setup_toolbox - -for ORCH_DIR in "$QUICKSTART_JS_DIR"/*/; do - if [ ! -d "$ORCH_DIR" ]; then - continue - fi - run_orch_test "$ORCH_DIR" -done diff --git a/.ci/quickstart_test/run_py_tests.sh b/.ci/quickstart_test/run_py_tests.sh deleted file mode 100644 index 3d559838d9..0000000000 --- a/.ci/quickstart_test/run_py_tests.sh +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -#!/bin/bash - -set -e - -TABLE_NAME="hotels_python" -QUICKSTART_PYTHON_DIR="docs/en/getting-started/quickstart/python" -SQL_FILE=".ci/quickstart_test/setup_hotels_sample.sql" - -PROXY_PID="" -TOOLBOX_PID="" - -install_system_packages() { - apt-get update && apt-get install -y \ - postgresql-client \ - python3-venv \ - wget \ - gettext-base \ - netcat-openbsd -} - -start_cloud_sql_proxy() { - wget "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy - chmod +x /usr/local/bin/cloud-sql-proxy - cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" & - PROXY_PID=$! - - for i in {1..30}; do - if nc -z 127.0.0.1 5432; then - echo "Cloud SQL Proxy is up and running." - return - fi - sleep 1 - done - - echo "Cloud SQL Proxy failed to start within the timeout period." - exit 1 -} - -setup_toolbox() { - TOOLBOX_YAML="/tools.yaml" - echo "${TOOLS_YAML_CONTENT}" > "$TOOLBOX_YAML" - if [ ! -f "$TOOLBOX_YAML" ]; then echo "Failed to create tools.yaml"; exit 1; fi - wget "https://storage.googleapis.com/genai-toolbox/v${VERSION}/linux/amd64/toolbox" -O "/toolbox" - chmod +x "/toolbox" - /toolbox --tools-file "$TOOLBOX_YAML" & - TOOLBOX_PID=$! - sleep 2 -} - -setup_orch_table() { - export TABLE_NAME - envsubst < "$SQL_FILE" | psql -h "$PGHOST" -p "$PGPORT" -U "$DB_USER" -d "$DATABASE_NAME" -} - -run_orch_test() { - local orch_dir="$1" - local orch_name - orch_name=$(basename "$orch_dir") - ( - set -e - setup_orch_table - cd "$orch_dir" - local VENV_DIR=".venv" - python3 -m venv "$VENV_DIR" - source "$VENV_DIR/bin/activate" - pip install -r requirements.txt - echo "--- Running tests for $orch_name ---" - cd .. - ORCH_NAME="$orch_name" pytest - rm -rf "$VENV_DIR" - ) -} - -cleanup_all() { - echo "--- Final cleanup: Shutting down processes and dropping table ---" - if [ -n "$TOOLBOX_PID" ]; then - kill $TOOLBOX_PID || true - fi - if [ -n "$PROXY_PID" ]; then - kill $PROXY_PID || true - fi -} -trap cleanup_all EXIT - -# Main script execution -install_system_packages -start_cloud_sql_proxy - -export PGHOST=127.0.0.1 -export PGPORT=5432 -export PGPASSWORD="$DB_PASSWORD" -export GOOGLE_API_KEY="$GOOGLE_API_KEY" - -setup_toolbox - -for ORCH_DIR in "$QUICKSTART_PYTHON_DIR"/*/; do - if [ ! -d "$ORCH_DIR" ]; then - continue - fi - run_orch_test "$ORCH_DIR" -done diff --git a/.ci/sample_tests/pre_post_processing/py.integration.cloudbuild.yaml b/.ci/sample_tests/pre_post_processing/py.integration.cloudbuild.yaml new file mode 100644 index 0000000000..5844226428 --- /dev/null +++ b/.ci/sample_tests/pre_post_processing/py.integration.cloudbuild.yaml @@ -0,0 +1,57 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +steps: + - name: "${_IMAGE}" + id: "py-pre-post-processing-test" + entrypoint: "bash" + args: + - -c + - | + set -ex + chmod +x .ci/sample_tests/run_tests.sh + .ci/sample_tests/run_tests.sh + env: + - "CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}" + - "GCP_PROJECT=${_GCP_PROJECT}" + - "DATABASE_NAME=${_DATABASE_NAME}" + - "DB_USER=${_DB_USER}" + - "TARGET_ROOT=${_TARGET_ROOT}" + - "TARGET_LANG=${_TARGET_LANG}" + - "TABLE_NAME=${_TABLE_NAME}" + - "SQL_FILE=${_SQL_FILE}" + - "AGENT_FILE_PATTERN=${_AGENT_FILE_PATTERN}" + secretEnv: ["TOOLS_YAML_CONTENT", "GOOGLE_API_KEY", "DB_PASSWORD"] + +availableSecrets: + secretManager: + - versionName: projects/${_GCP_PROJECT}/secrets/${_TOOLS_YAML_SECRET}/versions/5 + env: "TOOLS_YAML_CONTENT" + - versionName: projects/${_GCP_PROJECT_NUMBER}/secrets/${_API_KEY_SECRET}/versions/latest + env: "GOOGLE_API_KEY" + - versionName: projects/${_GCP_PROJECT}/secrets/${_DB_PASS_SECRET}/versions/latest + env: "DB_PASSWORD" + +timeout: 1200s + +substitutions: + _TARGET_LANG: "python" + _IMAGE: "gcr.io/google.com/cloudsdktool/cloud-sdk:537.0.0" + _TARGET_ROOT: "docs/en/samples/pre_post_processing/python" + _TABLE_NAME: "hotels_py_pre_post_processing" + _SQL_FILE: ".ci/sample_tests/setup_hotels.sql" + _AGENT_FILE_PATTERN: "agent.py" + +options: + logging: CLOUD_LOGGING_ONLY \ No newline at end of file diff --git a/.ci/quickstart_test/go.integration.cloudbuild.yaml b/.ci/sample_tests/quickstart/go.integration.cloudbuild.yaml similarity index 84% rename from .ci/quickstart_test/go.integration.cloudbuild.yaml rename to .ci/sample_tests/quickstart/go.integration.cloudbuild.yaml index cf9128ce60..24b14bfc26 100644 --- a/.ci/quickstart_test/go.integration.cloudbuild.yaml +++ b/.ci/sample_tests/quickstart/go.integration.cloudbuild.yaml @@ -23,13 +23,18 @@ steps: - | set -ex export VERSION=$(cat ./cmd/version.txt) - chmod +x .ci/quickstart_test/run_go_tests.sh - .ci/quickstart_test/run_go_tests.sh + chmod +x .ci/sample_tests/run_tests.sh + .ci/sample_tests/run_tests.sh env: - 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}' - 'GCP_PROJECT=${_GCP_PROJECT}' - 'DATABASE_NAME=${_DATABASE_NAME}' - 'DB_USER=${_DB_USER}' + - 'TARGET_ROOT=docs/en/getting-started/quickstart/go' + - 'TARGET_LANG=go' + - 'TABLE_NAME=hotels_go' + - 'SQL_FILE=.ci/sample_tests/setup_hotels.sql' + - 'AGENT_FILE_PATTERN=quickstart.go' secretEnv: ['TOOLS_YAML_CONTENT', 'GOOGLE_API_KEY', 'DB_PASSWORD'] availableSecrets: diff --git a/.ci/quickstart_test/js.integration.cloudbuild.yaml b/.ci/sample_tests/quickstart/js.integration.cloudbuild.yaml similarity index 84% rename from .ci/quickstart_test/js.integration.cloudbuild.yaml rename to .ci/sample_tests/quickstart/js.integration.cloudbuild.yaml index cbf4e8547f..6e5aac6b07 100644 --- a/.ci/quickstart_test/js.integration.cloudbuild.yaml +++ b/.ci/sample_tests/quickstart/js.integration.cloudbuild.yaml @@ -23,13 +23,18 @@ steps: - | set -ex export VERSION=$(cat ./cmd/version.txt) - chmod +x .ci/quickstart_test/run_js_tests.sh - .ci/quickstart_test/run_js_tests.sh + chmod +x .ci/sample_tests/run_tests.sh + .ci/sample_tests/run_tests.sh env: - 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}' - 'GCP_PROJECT=${_GCP_PROJECT}' - 'DATABASE_NAME=${_DATABASE_NAME}' - 'DB_USER=${_DB_USER}' + - 'TARGET_ROOT=docs/en/getting-started/quickstart/js' + - 'TARGET_LANG=js' + - 'TABLE_NAME=hotels_js' + - 'SQL_FILE=.ci/sample_tests/setup_hotels.sql' + - 'AGENT_FILE_PATTERN=quickstart.js' secretEnv: ['TOOLS_YAML_CONTENT', 'GOOGLE_API_KEY', 'DB_PASSWORD'] availableSecrets: diff --git a/.ci/quickstart_test/py.integration.cloudbuild.yaml b/.ci/sample_tests/quickstart/py.integration.cloudbuild.yaml similarity index 84% rename from .ci/quickstart_test/py.integration.cloudbuild.yaml rename to .ci/sample_tests/quickstart/py.integration.cloudbuild.yaml index 8fd3834d6c..da8daf678f 100644 --- a/.ci/quickstart_test/py.integration.cloudbuild.yaml +++ b/.ci/sample_tests/quickstart/py.integration.cloudbuild.yaml @@ -23,13 +23,18 @@ steps: - | set -ex export VERSION=$(cat ./cmd/version.txt) - chmod +x .ci/quickstart_test/run_py_tests.sh - .ci/quickstart_test/run_py_tests.sh + chmod +x .ci/sample_tests/run_tests.sh + .ci/sample_tests/run_tests.sh env: - 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}' - 'GCP_PROJECT=${_GCP_PROJECT}' - 'DATABASE_NAME=${_DATABASE_NAME}' - 'DB_USER=${_DB_USER}' + - 'TARGET_ROOT=docs/en/getting-started/quickstart/python' + - 'TARGET_LANG=python' + - 'TABLE_NAME=hotels_python' + - 'SQL_FILE=.ci/sample_tests/setup_hotels.sql' + - 'AGENT_FILE_PATTERN=quickstart.py' secretEnv: ['TOOLS_YAML_CONTENT', 'GOOGLE_API_KEY', 'DB_PASSWORD'] availableSecrets: diff --git a/.ci/sample_tests/run_tests.sh b/.ci/sample_tests/run_tests.sh new file mode 100644 index 0000000000..5de9234eb0 --- /dev/null +++ b/.ci/sample_tests/run_tests.sh @@ -0,0 +1,202 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e + +# --- Configuration (from Environment Variables) --- +# TARGET_ROOT: The directory to search for tests (e.g., docs/en/getting-started/quickstart/js) +# TARGET_LANG: python, js, go +# TABLE_NAME: Database table name to use +# SQL_FILE: Path to the SQL setup file +# AGENT_FILE_PATTERN: Filename to look for (e.g., quickstart.js or agent.py) + +VERSION=$(cat ./cmd/version.txt) + +# Process IDs & Logs +PROXY_PID="" +TOOLBOX_PID="" +PROXY_LOG="cloud_sql_proxy.log" +TOOLBOX_LOG="toolbox_server.log" + +install_system_packages() { + echo "Installing system packages..." + apt-get update && apt-get install -y \ + postgresql-client \ + wget \ + gettext-base \ + netcat-openbsd + + if [[ "$TARGET_LANG" == "python" ]]; then + apt-get install -y python3-venv + fi +} + +start_cloud_sql_proxy() { + echo "Starting Cloud SQL Proxy..." + wget -q "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy + chmod +x /usr/local/bin/cloud-sql-proxy + cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" > "$PROXY_LOG" 2>&1 & + PROXY_PID=$! + + # Health Check + for i in {1..30}; do + if nc -z 127.0.0.1 5432; then + echo "Cloud SQL Proxy is up and running." + return + fi + sleep 1 + done + echo "ERROR: Cloud SQL Proxy failed to start. Logs:" + cat "$PROXY_LOG" + exit 1 +} + +setup_toolbox() { + echo "Setting up Toolbox server..." + TOOLBOX_YAML="/tools.yaml" + echo "${TOOLS_YAML_CONTENT}" > "$TOOLBOX_YAML" + wget -q "https://storage.googleapis.com/genai-toolbox/v${VERSION}/linux/amd64/toolbox" -O "/toolbox" + chmod +x "/toolbox" + /toolbox --tools-file "$TOOLBOX_YAML" > "$TOOLBOX_LOG" 2>&1 & + TOOLBOX_PID=$! + + # Health Check + for i in {1..15}; do + if nc -z 127.0.0.1 5000; then + echo "Toolbox server is up and running." + return + fi + sleep 1 + done + echo "ERROR: Toolbox server failed to start. Logs:" + cat "$TOOLBOX_LOG" + exit 1 +} + +setup_db_table() { + echo "Setting up database table $TABLE_NAME using $SQL_FILE..." + export TABLE_NAME + envsubst < "$SQL_FILE" | psql -h 127.0.0.1 -p 5432 -U "$DB_USER" -d "$DATABASE_NAME" +} + +run_python_test() { + local dir=$1 + local name=$(basename "$dir") + echo "--- Running Python Test: $name ---" + ( + cd "$dir" + python3 -m venv .venv + source .venv/bin/activate + pip install -q -r requirements.txt pytest + + cd .. + local test_file=$(find . -maxdepth 1 -name "*test.py" | head -n 1) + if [ -n "$test_file" ]; then + echo "Found native test: $test_file. Running pytest..." + export ORCH_NAME="$name" + export PYTHONPATH="../" + pytest "$test_file" + else + echo "No native test found. running agent directly..." + export PYTHONPATH="../" + python3 "${name}/${AGENT_FILE_PATTERN}" + fi + rm -rf "${name}/.venv" + ) +} + +run_js_test() { + local dir=$1 + local name=$(basename "$dir") + echo "--- Running JS Test: $name ---" + ( + cd "$dir" + if [ -f "package-lock.json" ]; then npm ci -q; else npm install -q; fi + + cd .. + # Looking for a JS test file in the parent directory + local test_file=$(find . -maxdepth 1 -name "*test.js" | head -n 1) + if [ -n "$test_file" ]; then + echo "Found native test: $test_file. Running node --test..." + export ORCH_NAME="$name" + node --test "$test_file" + else + echo "No native test found. running agent directly..." + node "${name}/${AGENT_FILE_PATTERN}" + fi + rm -rf "${name}/node_modules" + ) +} + +run_go_test() { + local dir=$1 + local name=$(basename "$dir") + + if [ "$name" == "openAI" ]; then + echo -e "\nSkipping framework '${name}': Temporarily excluded." + return + fi + + echo "--- Running Go Test: $name ---" + ( + cd "$dir" + if [ -f "go.mod" ]; then + go mod tidy + fi + + cd .. + local test_file=$(find . -maxdepth 1 -name "*test.go" | head -n 1) + if [ -n "$test_file" ]; then + echo "Found native test: $test_file. Running go test..." + export ORCH_NAME="$name" + go test -v ./... + else + echo "No native test found. running agent directly..." + cd "$name" + go run "." + fi + ) +} + +cleanup() { + echo "Cleaning up background processes..." + [ -n "$TOOLBOX_PID" ] && kill "$TOOLBOX_PID" || true + [ -n "$PROXY_PID" ] && kill "$PROXY_PID" || true +} +trap cleanup EXIT + +# --- Execution --- +install_system_packages +start_cloud_sql_proxy + +export PGHOST=127.0.0.1 +export PGPORT=5432 +export PGPASSWORD="$DB_PASSWORD" +export GOOGLE_API_KEY="$GOOGLE_API_KEY" + +setup_toolbox +setup_db_table + +echo "Scanning $TARGET_ROOT for tests with pattern $AGENT_FILE_PATTERN..." + +find "$TARGET_ROOT" -name "$AGENT_FILE_PATTERN" | while read -r agent_file; do + sample_dir=$(dirname "$agent_file") + if [[ "$TARGET_LANG" == "python" ]]; then + run_python_test "$sample_dir" + elif [[ "$TARGET_LANG" == "js" ]]; then + run_js_test "$sample_dir" + elif [[ "$TARGET_LANG" == "go" ]]; then + run_go_test "$sample_dir" + fi +done diff --git a/.ci/quickstart_test/setup_hotels_sample.sql b/.ci/sample_tests/setup_hotels.sql similarity index 100% rename from .ci/quickstart_test/setup_hotels_sample.sql rename to .ci/sample_tests/setup_hotels.sql diff --git a/.github/workflows/deploy_dev_docs.yaml b/.github/workflows/deploy_dev_docs.yaml index d51207e1ad..d71f1db273 100644 --- a/.github/workflows/deploy_dev_docs.yaml +++ b/.github/workflows/deploy_dev_docs.yaml @@ -40,7 +40,7 @@ jobs: group: docs-deployment cancel-in-progress: false steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 with: fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod @@ -56,7 +56,7 @@ jobs: node-version: "22" - name: Cache dependencies - uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5 + uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5 with: path: ~/.npm key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} diff --git a/.github/workflows/deploy_previous_version_docs.yaml b/.github/workflows/deploy_previous_version_docs.yaml index 5c238d18b4..1c642262e7 100644 --- a/.github/workflows/deploy_previous_version_docs.yaml +++ b/.github/workflows/deploy_previous_version_docs.yaml @@ -30,14 +30,14 @@ jobs: steps: - name: Checkout main branch (for latest templates and theme) - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 with: ref: 'main' submodules: 'recursive' fetch-depth: 0 - name: Checkout old content from tag into a temporary directory - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 with: ref: ${{ github.event.inputs.version_tag }} path: 'old_version_source' # Checkout into a temp subdir diff --git a/.github/workflows/deploy_versioned_docs.yaml b/.github/workflows/deploy_versioned_docs.yaml index 42d0bd1a20..67e809935e 100644 --- a/.github/workflows/deploy_versioned_docs.yaml +++ b/.github/workflows/deploy_versioned_docs.yaml @@ -30,7 +30,7 @@ jobs: cancel-in-progress: false steps: - name: Checkout Code at Tag - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 with: ref: ${{ github.event.release.tag_name }} diff --git a/.github/workflows/docs_preview_clean.yaml b/.github/workflows/docs_preview_clean.yaml index ba44bfcc8b..a3a1f07857 100644 --- a/.github/workflows/docs_preview_clean.yaml +++ b/.github/workflows/docs_preview_clean.yaml @@ -34,7 +34,7 @@ jobs: group: "preview-${{ github.event.number }}" cancel-in-progress: true steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 with: ref: versioned-gh-pages diff --git a/.github/workflows/docs_preview_deploy.yaml b/.github/workflows/docs_preview_deploy.yaml index 769b4c5dc5..fda0e4895f 100644 --- a/.github/workflows/docs_preview_deploy.yaml +++ b/.github/workflows/docs_preview_deploy.yaml @@ -49,7 +49,7 @@ jobs: group: "preview-${{ github.event.number }}" cancel-in-progress: true steps: - - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 with: # Checkout the PR's HEAD commit (supports forks). ref: ${{ github.event.pull_request.head.sha }} @@ -67,7 +67,7 @@ jobs: node-version: "22" - name: Cache dependencies - uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5 + uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5 with: path: ~/.npm key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} diff --git a/.github/workflows/link_checker_workflow.yaml b/.github/workflows/link_checker_workflow.yaml index 189016dbc4..591221d16e 100644 --- a/.github/workflows/link_checker_workflow.yaml +++ b/.github/workflows/link_checker_workflow.yaml @@ -22,39 +22,47 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Repository - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - name: Restore lychee cache - uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5 + uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5 with: path: .lycheecache key: cache-lychee-${{ github.sha }} restore-keys: cache-lychee- - name: Link Checker + id: lychee-check uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 # v2 + continue-on-error: true with: args: > - --verbose + --quiet --no-progress --cache --max-cache-age 1d --exclude '^neo4j\+.*' --exclude '^bolt://.*' README.md docs/ - output: /tmp/foo.txt - fail: true - jobSummary: true - debug: true + output: lychee-report.md + format: markdown + fail: true + jobSummary: false + debug: false env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - # This step only runs if the 'lychee_check' step fails, ensuring the - # context note only appears when the developer needs to troubleshoot. - - name: Display Link Context Note on Failure - if: ${{ failure() }} + + - name: Display Failure Report + # Run this ONLY if the link checker failed + if: steps.lychee-check.outcome == 'failure' run: | echo "## Link Resolution Note" >> $GITHUB_STEP_SUMMARY echo "Local links and directory changes work differently on GitHub than on the docsite." >> $GITHUB_STEP_SUMMARY echo "You must ensure fixes pass the **GitHub check** and also work with **\`hugo server\`**." >> $GITHUB_STEP_SUMMARY + echo "See [Link Checking and Fixing with Lychee](https://github.com/googleapis/genai-toolbox/blob/main/DEVELOPER.md#link-checking-and-fixing-with-lychee) for more details." >> $GITHUB_STEP_SUMMARY echo "---" >> $GITHUB_STEP_SUMMARY + echo "### Broken Links Found" >> $GITHUB_STEP_SUMMARY + cat ./lychee-report.md >> $GITHUB_STEP_SUMMARY + + exit 1 diff --git a/.github/workflows/publish-mcp.yml b/.github/workflows/publish-mcp.yml index dc84fbb759..32264b393c 100644 --- a/.github/workflows/publish-mcp.yml +++ b/.github/workflows/publish-mcp.yml @@ -29,7 +29,7 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - name: Wait for image in Artifact Registry shell: bash diff --git a/.hugo/hugo.toml b/.hugo/hugo.toml index e3fb75803c..76e253dc0a 100644 --- a/.hugo/hugo.toml +++ b/.hugo/hugo.toml @@ -51,6 +51,10 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick # Add a new version block here before every release # The order of versions in this file is mirrored into the dropdown +[[params.versions]] + version = "v0.27.0" + url = "https://googleapis.github.io/genai-toolbox/v0.27.0/" + [[params.versions]] version = "v0.26.0" url = "https://googleapis.github.io/genai-toolbox/v0.26.0/" diff --git a/.lycheeignore b/.lycheeignore index baec3c4449..236e2c8394 100644 --- a/.lycheeignore +++ b/.lycheeignore @@ -23,8 +23,7 @@ https://cloud.dgraph.io/login https://dgraph.io/docs # MySQL Community downloads and main site (often protected by bot mitigation) -https://dev.mysql.com/downloads/installer/ -https://www.mysql.com/ +^https?://(.*\.)?mysql\.com/.* # Claude desktop download link https://claude.ai/download @@ -37,9 +36,9 @@ https://dev.mysql.com/doc/refman/8.4/en/sql-prepared-statements.html https://dev.mysql.com/doc/refman/8.4/en/user-names.html # npmjs links can occasionally trigger rate limiting during high-frequency CI builds -https://www.npmjs.com/package/@toolbox-sdk/core -https://www.npmjs.com/package/@toolbox-sdk/adk +^https?://(www\.)?npmjs\.com/.* + https://www.oceanbase.com/ # Ignore social media and blog profiles to reduce external request overhead -https://medium.com/@mcp_toolbox +https://medium.com/@mcp_toolbox \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 6d18812102..dc9077015d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,31 @@ # Changelog +## [0.27.0](https://github.com/googleapis/genai-toolbox/compare/v0.26.0...v0.27.0) (2026-02-12) + + +### ⚠ BREAKING CHANGES + +* Update configuration file v2 ([#2369](https://github.com/googleapis/genai-toolbox/issues/2369))([293c1d6](https://github.com/googleapis/genai-toolbox/commit/293c1d6889c39807855ba5e01d4c13ba2a4c50ce)) +* Update/add detailed telemetry for mcp endpoint compliant with OTEL semantic convention ([#1987](https://github.com/googleapis/genai-toolbox/issues/1987)) ([478a0bd](https://github.com/googleapis/genai-toolbox/commit/478a0bdb59288c1213f83862f95a698b4c2c0aab)) + +### Features + +* **cli/invoke:** Add support for direct tool invocation from CLI ([#2353](https://github.com/googleapis/genai-toolbox/issues/2353)) ([6e49ba4](https://github.com/googleapis/genai-toolbox/commit/6e49ba436ef2390c13feaf902b29f5907acffb57)) +* **cli/skills:** Add support for generating agent skills from toolset ([#2392](https://github.com/googleapis/genai-toolbox/issues/2392)) ([80ef346](https://github.com/googleapis/genai-toolbox/commit/80ef34621453b77bdf6a6016c354f102a17ada04)) +* **cloud-logging-admin:** Add source, tools, integration test and docs ([#2137](https://github.com/googleapis/genai-toolbox/issues/2137)) ([252fc30](https://github.com/googleapis/genai-toolbox/commit/252fc3091af10d25d8d7af7e047b5ac87a5dd041)) +* **cockroachdb:** Add CockroachDB integration with cockroach-go ([#2006](https://github.com/googleapis/genai-toolbox/issues/2006)) ([1fdd99a](https://github.com/googleapis/genai-toolbox/commit/1fdd99a9b609a5e906acce414226ff44d75d5975)) +* **prebuiltconfigs/alloydb-omni:** Implement Alloydb omni dataplane tools ([#2340](https://github.com/googleapis/genai-toolbox/issues/2340)) ([e995349](https://github.com/googleapis/genai-toolbox/commit/e995349ea0756c700d188b8f04e9459121219f0c)) +* **server:** Add Tool call error categories ([#2387](https://github.com/googleapis/genai-toolbox/issues/2387)) ([32cb4db](https://github.com/googleapis/genai-toolbox/commit/32cb4db712d27579c1bf29e61cbd0bed02286c28)) +* **tools/looker:** support `looker-validate-project` tool ([#2430](https://github.com/googleapis/genai-toolbox/issues/2430)) ([a15a128](https://github.com/googleapis/genai-toolbox/commit/a15a12873f936b0102aeb9500cc3bcd71bb38c34)) + + + +### Bug Fixes + +* **dataplex:** Capture GCP HTTP errors in MCP Toolbox ([#2347](https://github.com/googleapis/genai-toolbox/issues/2347)) ([1d7c498](https://github.com/googleapis/genai-toolbox/commit/1d7c4981164c34b4d7bc8edecfd449f57ad11e15)) +* **sources/cockroachdb:** Update kind to type ([#2465](https://github.com/googleapis/genai-toolbox/issues/2465)) ([2d341ac](https://github.com/googleapis/genai-toolbox/commit/2d341acaa61c3c1fe908fceee8afbd90fb646d3a)) +* Surface Dataplex API errors in MCP results ([#2347](https://github.com/googleapis/genai-toolbox/pull/2347))([1d7c498](https://github.com/googleapis/genai-toolbox/commit/1d7c4981164c34b4d7bc8edecfd449f57ad11e15)) + ## [0.26.0](https://github.com/googleapis/genai-toolbox/compare/v0.25.0...v0.26.0) (2026-01-22) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 27b829148d..712f7aa71c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -92,11 +92,11 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s `newdb.go`. Create a `Config` struct to include all the necessary parameters for connecting to the database (e.g., host, port, username, password, database name) and a `Source` struct to store necessary parameters for tools (e.g., - Name, Kind, connection object, additional config). + Name, Type, connection object, additional config). * **Implement the [`SourceConfig`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/sources/sources.go#L57) interface**. This interface requires two methods: - * `SourceConfigKind() string`: Returns a unique string identifier for your + * `SourceConfigType() string`: Returns a unique string identifier for your data source (e.g., `"newdb"`). * `Initialize(ctx context.Context, tracer trace.Tracer) (Source, error)`: Creates a new instance of your data source and establishes a connection to @@ -104,7 +104,7 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s * **Implement the [`Source`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/sources/sources.go#L63) interface**. This interface requires one method: - * `SourceKind() string`: Returns the same string identifier as `SourceConfigKind()`. + * `SourceType() string`: Returns the same string identifier as `SourceConfigType()`. * **Implement `init()`** to register the new Source. * **Implement Unit Tests** in a file named `newdb_test.go`. @@ -126,7 +126,7 @@ tools. * **Implement the [`ToolConfig`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/tools/tools.go#L61) interface**. This interface requires one method: - * `ToolConfigKind() string`: Returns a unique string identifier for your tool + * `ToolConfigType() string`: Returns a unique string identifier for your tool (e.g., `"newdb-tool"`). * `Initialize(sources map[string]Source) (Tool, error)`: Creates a new instance of your tool and validates that it can connect to the specified @@ -243,7 +243,7 @@ resources. | style | Update src code, with only formatting and whitespace updates (e.g. code formatter or linter changes). | Pull requests should always add scope whenever possible. The scope is - formatted as `/` (e.g., `sources/postgres`, or + formatted as `/` (e.g., `sources/postgres`, or `tools/mssql-sql`). Ideally, **each PR covers only one scope**, if this is diff --git a/DEVELOPER.md b/DEVELOPER.md index ce9f827070..9836d6c9be 100644 --- a/DEVELOPER.md +++ b/DEVELOPER.md @@ -47,12 +47,13 @@ Before you begin, ensure you have the following: ### Tool Naming Conventions This section details the purpose and conventions for MCP Toolbox's tools naming -properties, **tool name** and **tool kind**. +properties, **tool name** and **tool type**. ``` -cancel_hotel: <- tool name - kind: postgres-sql <- tool kind - source: my_pg_source +kind: tools +name: cancel_hotel <- tool name +type: postgres-sql <- tool type +source: my_pg_source ``` #### Tool Name @@ -76,17 +77,17 @@ The following guidelines apply to tool names: to a function) until they can be validated through extensive testing to ensure they do not negatively impact agent's performances. -#### Tool Kind +#### Tool Type -Tool kind serves as a category or type that a user can assign to a tool. +Tool type serves as a category or type that a user can assign to a tool. -The following guidelines apply to tool kinds: +The following guidelines apply to tool types: -* Should user hyphens over underscores (e.g. `firestore-list-collections` or +* Should use hyphens over underscores (e.g. `firestore-list-collections` or `firestore_list_colelctions`). * Should use product name in name (e.g. `firestore-list-collections` over `list-collections`). -* Changes to tool kind are breaking changes and should be avoided. +* Changes to tool type are breaking changes and should be avoided. ## Testing diff --git a/README.md b/README.md index 86a186bb8a..160b9456e2 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,8 @@ # MCP Toolbox for Databases +googleapis%2Fgenai-toolbox | Trendshift + [![Docs](https://img.shields.io/badge/docs-MCP_Toolbox-blue)](https://googleapis.github.io/genai-toolbox/) [![Discord](https://img.shields.io/badge/Discord-%235865F2.svg?style=flat&logo=discord&logoColor=white)](https://discord.gg/Dmm69peqjh) [![Medium](https://img.shields.io/badge/Medium-12100E?style=flat&logo=medium&logoColor=white)](https://medium.com/@mcp_toolbox) @@ -105,7 +107,7 @@ redeploying your application. ## Getting Started -### (Non-production) Running Toolbox +### Quickstart: Running Toolbox using NPX You can run Toolbox directly with a [configuration file](#configuration): @@ -140,7 +142,7 @@ To install Toolbox as a binary: > > ```sh > # see releases page for other versions -> export VERSION=0.26.0 +> export VERSION=0.27.0 > curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox > chmod +x toolbox > ``` @@ -153,7 +155,7 @@ To install Toolbox as a binary: > > ```sh > # see releases page for other versions -> export VERSION=0.26.0 +> export VERSION=0.27.0 > curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox > chmod +x toolbox > ``` @@ -166,7 +168,7 @@ To install Toolbox as a binary: > > ```sh > # see releases page for other versions -> export VERSION=0.26.0 +> export VERSION=0.27.0 > curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox > chmod +x toolbox > ``` @@ -179,7 +181,7 @@ To install Toolbox as a binary: > > ```cmd > :: see releases page for other versions -> set VERSION=0.26.0 +> set VERSION=0.27.0 > curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe" > ``` > @@ -191,7 +193,7 @@ To install Toolbox as a binary: > > ```powershell > # see releases page for other versions -> $VERSION = "0.26.0" +> $VERSION = "0.27.0" > curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe" > ``` > @@ -204,7 +206,7 @@ You can also install Toolbox as a container: ```sh # see releases page for other versions -export VERSION=0.26.0 +export VERSION=0.27.0 docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION ``` @@ -228,7 +230,7 @@ To install from source, ensure you have the latest version of [Go installed](https://go.dev/doc/install), and then run the following command: ```sh -go install github.com/googleapis/genai-toolbox@v0.26.0 +go install github.com/googleapis/genai-toolbox@v0.27.0 ``` @@ -938,14 +940,14 @@ Toolbox should have access to. Most tools will have at least one source to execute against. ```yaml -sources: - my-pg-source: - kind: postgres - host: 127.0.0.1 - port: 5432 - database: toolbox_db - user: toolbox_user - password: my-password +kind: sources +name: my-pg-source +type: postgres +host: 127.0.0.1 +port: 5432 +database: toolbox_db +user: toolbox_user +password: my-password ``` For more details on configuring different types of sources, see the @@ -954,19 +956,19 @@ For more details on configuring different types of sources, see the ### Tools The `tools` section of a `tools.yaml` define the actions an agent can take: what -kind of tool it is, which source(s) it affects, what parameters it uses, etc. +type of tool it is, which source(s) it affects, what parameters it uses, etc. ```yaml -tools: - search-hotels-by-name: - kind: postgres-sql - source: my-pg-source - description: Search for hotels based on name. - parameters: - - name: name - type: string - description: The name of the hotel. - statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%'; +kind: tools +name: search-hotels-by-name +type: postgres-sql +source: my-pg-source +description: Search for hotels based on name. +parameters: + - name: name + type: string + description: The name of the hotel. +statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%'; ``` For more details on configuring different types of tools, see the diff --git a/cmd/internal/imports.go b/cmd/internal/imports.go new file mode 100644 index 0000000000..24c58d7b85 --- /dev/null +++ b/cmd/internal/imports.go @@ -0,0 +1,257 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package internal + +import ( + // Import prompt packages for side effect of registration + _ "github.com/googleapis/genai-toolbox/internal/prompts/custom" + + // Import tool packages for side effect of registration + _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreatecluster" + _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreateinstance" + _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreateuser" + _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetcluster" + _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetinstance" + _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetuser" + _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistclusters" + _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistinstances" + _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistusers" + _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbwaitforoperation" + _ "github.com/googleapis/genai-toolbox/internal/tools/alloydbainl" + _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryanalyzecontribution" + _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryconversationalanalytics" + _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryexecutesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryforecast" + _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygetdatasetinfo" + _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygettableinfo" + _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylistdatasetids" + _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylisttableids" + _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysearchcatalog" + _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql" + _ "github.com/googleapis/genai-toolbox/internal/tools/bigtable" + _ "github.com/googleapis/genai-toolbox/internal/tools/cassandra/cassandracql" + _ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouseexecutesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselistdatabases" + _ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselisttables" + _ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhousesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudgda" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirfetchpage" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatienteverything" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatientsearch" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdataset" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdicomstore" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdicomstoremetrics" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirresource" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirstore" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirstoremetrics" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarelistdicomstores" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarelistfhirstores" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcareretrieverendereddicominstance" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicominstances" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomseries" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomstudies" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminlistlognames" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminlistresourcetypes" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminquerylogs" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcloneinstance" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatebackup" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatedatabase" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreateusers" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistdatabases" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistinstances" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlrestorebackup" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmssql/cloudsqlmssqlcreateinstance" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmysql/cloudsqlmysqlcreateinstance" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlpg/cloudsqlpgcreateinstances" + _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlpg/cloudsqlpgupgradeprecheck" + _ "github.com/googleapis/genai-toolbox/internal/tools/cockroachdb/cockroachdbexecutesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/cockroachdb/cockroachdblistschemas" + _ "github.com/googleapis/genai-toolbox/internal/tools/cockroachdb/cockroachdblisttables" + _ "github.com/googleapis/genai-toolbox/internal/tools/cockroachdb/cockroachdbsql" + _ "github.com/googleapis/genai-toolbox/internal/tools/couchbase" + _ "github.com/googleapis/genai-toolbox/internal/tools/dataform/dataformcompilelocal" + _ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexlookupentry" + _ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchaspecttypes" + _ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchentries" + _ "github.com/googleapis/genai-toolbox/internal/tools/dgraph" + _ "github.com/googleapis/genai-toolbox/internal/tools/elasticsearch/elasticsearchesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/firebird/firebirdexecutesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/firebird/firebirdsql" + _ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreadddocuments" + _ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoredeletedocuments" + _ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetdocuments" + _ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetrules" + _ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorelistcollections" + _ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequery" + _ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequerycollection" + _ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreupdatedocument" + _ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules" + _ "github.com/googleapis/genai-toolbox/internal/tools/http" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardelement" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardfilter" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerconversationalanalytics" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookercreateprojectfile" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdeleteprojectfile" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdevmode" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergenerateembedurl" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiondatabases" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnections" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectionschemas" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiontablecolumns" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiontables" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdashboards" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdimensions" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetexplores" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetfilters" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetlooks" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmeasures" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmodels" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetparameters" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojectfile" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojectfiles" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojects" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthanalyze" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthpulse" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthvacuum" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakedashboard" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakelook" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquery" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquerysql" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerqueryurl" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrundashboard" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrunlook" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerupdateprojectfile" + _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookervalidateproject" + _ "github.com/googleapis/genai-toolbox/internal/tools/mindsdb/mindsdbexecutesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/mindsdb/mindsdbsql" + _ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbaggregate" + _ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeletemany" + _ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeleteone" + _ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfind" + _ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfindone" + _ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertmany" + _ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertone" + _ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdatemany" + _ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdateone" + _ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlexecutesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqllisttables" + _ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlsql" + _ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlexecutesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlgetqueryplan" + _ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllistactivequeries" + _ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttablefragmentation" + _ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttables" + _ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttablesmissinguniqueindexes" + _ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlsql" + _ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jcypher" + _ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jexecutecypher" + _ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema" + _ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbaseexecutesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbasesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/oracle/oracleexecutesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/oracle/oraclesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresdatabaseoverview" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresexecutesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresgetcolumncardinality" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistactivequeries" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistavailableextensions" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistdatabasestats" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistindexes" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistinstalledextensions" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistlocks" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpgsettings" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpublicationtables" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistquerystats" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistroles" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistschemas" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistsequences" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresliststoredprocedure" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablespaces" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablestats" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttriggers" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistviews" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslongrunningtransactions" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresreplicationstats" + _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql" + _ "github.com/googleapis/genai-toolbox/internal/tools/redis" + _ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcancelbatch" + _ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatepysparkbatch" + _ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatesparkbatch" + _ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkgetbatch" + _ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparklistbatches" + _ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoreexecutesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoresql" + _ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakeexecutesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlistgraphs" + _ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlisttables" + _ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannersql" + _ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqliteexecutesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbexecutesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbsql" + _ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinoexecutesql" + _ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinosql" + _ "github.com/googleapis/genai-toolbox/internal/tools/utility/wait" + _ "github.com/googleapis/genai-toolbox/internal/tools/valkey" + _ "github.com/googleapis/genai-toolbox/internal/tools/yugabytedbsql" + + _ "github.com/googleapis/genai-toolbox/internal/sources/alloydbadmin" + _ "github.com/googleapis/genai-toolbox/internal/sources/alloydbpg" + _ "github.com/googleapis/genai-toolbox/internal/sources/bigquery" + _ "github.com/googleapis/genai-toolbox/internal/sources/bigtable" + _ "github.com/googleapis/genai-toolbox/internal/sources/cassandra" + _ "github.com/googleapis/genai-toolbox/internal/sources/clickhouse" + _ "github.com/googleapis/genai-toolbox/internal/sources/cloudgda" + _ "github.com/googleapis/genai-toolbox/internal/sources/cloudhealthcare" + _ "github.com/googleapis/genai-toolbox/internal/sources/cloudloggingadmin" + _ "github.com/googleapis/genai-toolbox/internal/sources/cloudmonitoring" + _ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin" + _ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmssql" + _ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmysql" + _ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg" + _ "github.com/googleapis/genai-toolbox/internal/sources/cockroachdb" + _ "github.com/googleapis/genai-toolbox/internal/sources/couchbase" + _ "github.com/googleapis/genai-toolbox/internal/sources/dataplex" + _ "github.com/googleapis/genai-toolbox/internal/sources/dgraph" + _ "github.com/googleapis/genai-toolbox/internal/sources/elasticsearch" + _ "github.com/googleapis/genai-toolbox/internal/sources/firebird" + _ "github.com/googleapis/genai-toolbox/internal/sources/firestore" + _ "github.com/googleapis/genai-toolbox/internal/sources/http" + _ "github.com/googleapis/genai-toolbox/internal/sources/looker" + _ "github.com/googleapis/genai-toolbox/internal/sources/mindsdb" + _ "github.com/googleapis/genai-toolbox/internal/sources/mongodb" + _ "github.com/googleapis/genai-toolbox/internal/sources/mssql" + _ "github.com/googleapis/genai-toolbox/internal/sources/mysql" + _ "github.com/googleapis/genai-toolbox/internal/sources/neo4j" + _ "github.com/googleapis/genai-toolbox/internal/sources/oceanbase" + _ "github.com/googleapis/genai-toolbox/internal/sources/oracle" + _ "github.com/googleapis/genai-toolbox/internal/sources/postgres" + _ "github.com/googleapis/genai-toolbox/internal/sources/redis" + _ "github.com/googleapis/genai-toolbox/internal/sources/serverlessspark" + _ "github.com/googleapis/genai-toolbox/internal/sources/singlestore" + _ "github.com/googleapis/genai-toolbox/internal/sources/snowflake" + _ "github.com/googleapis/genai-toolbox/internal/sources/spanner" + _ "github.com/googleapis/genai-toolbox/internal/sources/sqlite" + _ "github.com/googleapis/genai-toolbox/internal/sources/tidb" + _ "github.com/googleapis/genai-toolbox/internal/sources/trino" + _ "github.com/googleapis/genai-toolbox/internal/sources/valkey" + _ "github.com/googleapis/genai-toolbox/internal/sources/yugabytedb" +) diff --git a/cmd/internal/invoke/command.go b/cmd/internal/invoke/command.go new file mode 100644 index 0000000000..81837402f1 --- /dev/null +++ b/cmd/internal/invoke/command.go @@ -0,0 +1,139 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package invoke + +import ( + "context" + "encoding/json" + "fmt" + + "github.com/googleapis/genai-toolbox/cmd/internal" + "github.com/googleapis/genai-toolbox/internal/server" + "github.com/googleapis/genai-toolbox/internal/server/resources" + "github.com/googleapis/genai-toolbox/internal/util/parameters" + "github.com/spf13/cobra" +) + +func NewCommand(opts *internal.ToolboxOptions) *cobra.Command { + cmd := &cobra.Command{ + Use: "invoke [params]", + Short: "Execute a tool directly", + Long: `Execute a tool directly with parameters. +Params must be a JSON string. +Example: + toolbox invoke my-tool '{"param1": "value1"}'`, + Args: cobra.MinimumNArgs(1), + RunE: func(c *cobra.Command, args []string) error { + return runInvoke(c, args, opts) + }, + } + return cmd +} + +func runInvoke(cmd *cobra.Command, args []string, opts *internal.ToolboxOptions) error { + ctx, cancel := context.WithCancel(cmd.Context()) + defer cancel() + + ctx, shutdown, err := opts.Setup(ctx) + if err != nil { + return err + } + defer func() { + _ = shutdown(ctx) + }() + + _, err = opts.LoadConfig(ctx) + if err != nil { + return err + } + + // Initialize Resources + sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, opts.Cfg) + if err != nil { + errMsg := fmt.Errorf("failed to initialize resources: %w", err) + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + + resourceMgr := resources.NewResourceManager(sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap) + + // Execute Tool + toolName := args[0] + tool, ok := resourceMgr.GetTool(toolName) + if !ok { + errMsg := fmt.Errorf("tool %q not found", toolName) + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + + var paramsInput string + if len(args) > 1 { + paramsInput = args[1] + } + + params := make(map[string]any) + if paramsInput != "" { + if err := json.Unmarshal([]byte(paramsInput), ¶ms); err != nil { + errMsg := fmt.Errorf("params must be a valid JSON string: %w", err) + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + } + + parsedParams, err := parameters.ParseParams(tool.GetParameters(), params, nil) + if err != nil { + errMsg := fmt.Errorf("invalid parameters: %w", err) + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + + parsedParams, err = tool.EmbedParams(ctx, parsedParams, resourceMgr.GetEmbeddingModelMap()) + if err != nil { + errMsg := fmt.Errorf("error embedding parameters: %w", err) + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + + // Client Auth not supported for ephemeral CLI call + requiresAuth, err := tool.RequiresClientAuthorization(resourceMgr) + if err != nil { + errMsg := fmt.Errorf("failed to check auth requirements: %w", err) + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + if requiresAuth { + errMsg := fmt.Errorf("client authorization is not supported") + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + + result, err := tool.Invoke(ctx, resourceMgr, parsedParams, "") + if err != nil { + errMsg := fmt.Errorf("tool execution failed: %w", err) + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + + // Print Result + output, err := json.MarshalIndent(result, "", " ") + if err != nil { + errMsg := fmt.Errorf("failed to marshal result: %w", err) + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + fmt.Fprintln(opts.IOStreams.Out, string(output)) + + return nil +} diff --git a/cmd/internal/invoke/command_test.go b/cmd/internal/invoke/command_test.go new file mode 100644 index 0000000000..3eab850acf --- /dev/null +++ b/cmd/internal/invoke/command_test.go @@ -0,0 +1,153 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package invoke + +import ( + "bytes" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/googleapis/genai-toolbox/cmd/internal" + _ "github.com/googleapis/genai-toolbox/internal/sources/bigquery" + _ "github.com/googleapis/genai-toolbox/internal/sources/sqlite" + _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql" + _ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql" + "github.com/spf13/cobra" +) + +func invokeCommand(args []string) (string, error) { + parentCmd := &cobra.Command{Use: "toolbox"} + + buf := new(bytes.Buffer) + opts := internal.NewToolboxOptions(internal.WithIOStreams(buf, buf)) + internal.PersistentFlags(parentCmd, opts) + + cmd := NewCommand(opts) + parentCmd.AddCommand(cmd) + parentCmd.SetArgs(args) + + err := parentCmd.Execute() + return buf.String(), err +} + +func TestInvokeTool(t *testing.T) { + // Create a temporary tools file + tmpDir := t.TempDir() + + toolsFileContent := ` +sources: + my-sqlite: + kind: sqlite + database: test.db +tools: + hello-sqlite: + kind: sqlite-sql + source: my-sqlite + description: "hello tool" + statement: "SELECT 'hello' as greeting" + echo-tool: + kind: sqlite-sql + source: my-sqlite + description: "echo tool" + statement: "SELECT ? as msg" + parameters: + - name: message + type: string + description: message to echo +` + + toolsFilePath := filepath.Join(tmpDir, "tools.yaml") + if err := os.WriteFile(toolsFilePath, []byte(toolsFileContent), 0644); err != nil { + t.Fatalf("failed to write tools file: %v", err) + } + + tcs := []struct { + desc string + args []string + want string + wantErr bool + errStr string + }{ + { + desc: "success - basic tool call", + args: []string{"invoke", "hello-sqlite", "--tools-file", toolsFilePath}, + want: `"greeting": "hello"`, + }, + { + desc: "success - tool call with parameters", + args: []string{"invoke", "echo-tool", `{"message": "world"}`, "--tools-file", toolsFilePath}, + want: `"msg": "world"`, + }, + { + desc: "error - tool not found", + args: []string{"invoke", "non-existent", "--tools-file", toolsFilePath}, + wantErr: true, + errStr: `tool "non-existent" not found`, + }, + { + desc: "error - invalid JSON params", + args: []string{"invoke", "echo-tool", `invalid-json`, "--tools-file", toolsFilePath}, + wantErr: true, + errStr: `params must be a valid JSON string`, + }, + } + + for _, tc := range tcs { + t.Run(tc.desc, func(t *testing.T) { + got, err := invokeCommand(tc.args) + if (err != nil) != tc.wantErr { + t.Fatalf("got error %v, wantErr %v", err, tc.wantErr) + } + if tc.wantErr && !strings.Contains(err.Error(), tc.errStr) { + t.Fatalf("got error %v, want error containing %q", err, tc.errStr) + } + if !tc.wantErr && !strings.Contains(got, tc.want) { + t.Fatalf("got %q, want it to contain %q", got, tc.want) + } + }) + } +} + +func TestInvokeTool_AuthUnsupported(t *testing.T) { + tmpDir := t.TempDir() + toolsFileContent := ` +sources: + my-bq: + kind: bigquery + project: my-project + useClientOAuth: true +tools: + bq-tool: + kind: bigquery-sql + source: my-bq + description: "bq tool" + statement: "SELECT 1" +` + toolsFilePath := filepath.Join(tmpDir, "auth_tools.yaml") + if err := os.WriteFile(toolsFilePath, []byte(toolsFileContent), 0644); err != nil { + t.Fatalf("failed to write tools file: %v", err) + } + + args := []string{"invoke", "bq-tool", "--tools-file", toolsFilePath} + _, err := invokeCommand(args) + if err == nil { + t.Fatal("expected error for tool requiring client auth, but got nil") + } + if !strings.Contains(err.Error(), "client authorization is not supported") { + t.Fatalf("unexpected error message: %v", err) + } +} diff --git a/cmd/internal/options.go b/cmd/internal/options.go new file mode 100644 index 0000000000..ea07771493 --- /dev/null +++ b/cmd/internal/options.go @@ -0,0 +1,251 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package internal + +import ( + "context" + "fmt" + "io" + "os" + "slices" + "strings" + + "github.com/googleapis/genai-toolbox/internal/log" + "github.com/googleapis/genai-toolbox/internal/prebuiltconfigs" + "github.com/googleapis/genai-toolbox/internal/server" + "github.com/googleapis/genai-toolbox/internal/telemetry" + "github.com/googleapis/genai-toolbox/internal/util" +) + +type IOStreams struct { + In io.Reader + Out io.Writer + ErrOut io.Writer +} + +// ToolboxOptions holds dependencies shared by all commands. +type ToolboxOptions struct { + IOStreams IOStreams + Logger log.Logger + Cfg server.ServerConfig + ToolsFile string + ToolsFiles []string + ToolsFolder string + PrebuiltConfigs []string +} + +// Option defines a function that modifies the ToolboxOptions struct. +type Option func(*ToolboxOptions) + +// NewToolboxOptions creates a new instance with defaults, then applies any +// provided options. +func NewToolboxOptions(opts ...Option) *ToolboxOptions { + o := &ToolboxOptions{ + IOStreams: IOStreams{ + In: os.Stdin, + Out: os.Stdout, + ErrOut: os.Stderr, + }, + } + + for _, opt := range opts { + opt(o) + } + return o +} + +// Apply allows you to update an EXISTING ToolboxOptions instance. +// This is useful for "late binding". +func (o *ToolboxOptions) Apply(opts ...Option) { + for _, opt := range opts { + opt(o) + } +} + +// WithIOStreams updates the IO streams. +func WithIOStreams(out, err io.Writer) Option { + return func(o *ToolboxOptions) { + o.IOStreams.Out = out + o.IOStreams.ErrOut = err + } +} + +// Setup create logger and telemetry instrumentations. +func (opts *ToolboxOptions) Setup(ctx context.Context) (context.Context, func(context.Context) error, error) { + // If stdio, set logger's out stream (usually DEBUG and INFO logs) to + // errStream + loggerOut := opts.IOStreams.Out + if opts.Cfg.Stdio { + loggerOut = opts.IOStreams.ErrOut + } + + // Handle logger separately from config + logger, err := log.NewLogger(opts.Cfg.LoggingFormat.String(), opts.Cfg.LogLevel.String(), loggerOut, opts.IOStreams.ErrOut) + if err != nil { + return ctx, nil, fmt.Errorf("unable to initialize logger: %w", err) + } + + ctx = util.WithLogger(ctx, logger) + opts.Logger = logger + + // Set up OpenTelemetry + otelShutdown, err := telemetry.SetupOTel(ctx, opts.Cfg.Version, opts.Cfg.TelemetryOTLP, opts.Cfg.TelemetryGCP, opts.Cfg.TelemetryServiceName) + if err != nil { + errMsg := fmt.Errorf("error setting up OpenTelemetry: %w", err) + logger.ErrorContext(ctx, errMsg.Error()) + return ctx, nil, errMsg + } + + shutdownFunc := func(ctx context.Context) error { + err := otelShutdown(ctx) + if err != nil { + errMsg := fmt.Errorf("error shutting down OpenTelemetry: %w", err) + logger.ErrorContext(ctx, errMsg.Error()) + return err + } + return nil + } + + instrumentation, err := telemetry.CreateTelemetryInstrumentation(opts.Cfg.Version) + if err != nil { + errMsg := fmt.Errorf("unable to create telemetry instrumentation: %w", err) + logger.ErrorContext(ctx, errMsg.Error()) + return ctx, shutdownFunc, errMsg + } + + ctx = util.WithInstrumentation(ctx, instrumentation) + + return ctx, shutdownFunc, nil +} + +// LoadConfig checks and merge files that should be loaded into the server +func (opts *ToolboxOptions) LoadConfig(ctx context.Context) (bool, error) { + // Determine if Custom Files should be loaded + // Check for explicit custom flags + isCustomConfigured := opts.ToolsFile != "" || len(opts.ToolsFiles) > 0 || opts.ToolsFolder != "" + + // Determine if default 'tools.yaml' should be used (No prebuilt AND No custom flags) + useDefaultToolsFile := len(opts.PrebuiltConfigs) == 0 && !isCustomConfigured + + if useDefaultToolsFile { + opts.ToolsFile = "tools.yaml" + isCustomConfigured = true + } + + logger, err := util.LoggerFromContext(ctx) + if err != nil { + return isCustomConfigured, err + } + + var allToolsFiles []ToolsFile + + // Load Prebuilt Configuration + + if len(opts.PrebuiltConfigs) > 0 { + slices.Sort(opts.PrebuiltConfigs) + sourcesList := strings.Join(opts.PrebuiltConfigs, ", ") + logMsg := fmt.Sprintf("Using prebuilt tool configurations for: %s", sourcesList) + logger.InfoContext(ctx, logMsg) + + for _, configName := range opts.PrebuiltConfigs { + buf, err := prebuiltconfigs.Get(configName) + if err != nil { + logger.ErrorContext(ctx, err.Error()) + return isCustomConfigured, err + } + + // Parse into ToolsFile struct + parsed, err := parseToolsFile(ctx, buf) + if err != nil { + errMsg := fmt.Errorf("unable to parse prebuilt tool configuration for '%s': %w", configName, err) + logger.ErrorContext(ctx, errMsg.Error()) + return isCustomConfigured, errMsg + } + allToolsFiles = append(allToolsFiles, parsed) + } + } + + // Load Custom Configurations + if isCustomConfigured { + // Enforce exclusivity among custom flags (tools-file vs tools-files vs tools-folder) + if (opts.ToolsFile != "" && len(opts.ToolsFiles) > 0) || + (opts.ToolsFile != "" && opts.ToolsFolder != "") || + (len(opts.ToolsFiles) > 0 && opts.ToolsFolder != "") { + errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously") + logger.ErrorContext(ctx, errMsg.Error()) + return isCustomConfigured, errMsg + } + + var customTools ToolsFile + var err error + + if len(opts.ToolsFiles) > 0 { + // Use tools-files + logger.InfoContext(ctx, fmt.Sprintf("Loading and merging %d tool configuration files", len(opts.ToolsFiles))) + customTools, err = LoadAndMergeToolsFiles(ctx, opts.ToolsFiles) + } else if opts.ToolsFolder != "" { + // Use tools-folder + logger.InfoContext(ctx, fmt.Sprintf("Loading and merging all YAML files from directory: %s", opts.ToolsFolder)) + customTools, err = LoadAndMergeToolsFolder(ctx, opts.ToolsFolder) + } else { + // Use single file (tools-file or default `tools.yaml`) + buf, readFileErr := os.ReadFile(opts.ToolsFile) + if readFileErr != nil { + errMsg := fmt.Errorf("unable to read tool file at %q: %w", opts.ToolsFile, readFileErr) + logger.ErrorContext(ctx, errMsg.Error()) + return isCustomConfigured, errMsg + } + customTools, err = parseToolsFile(ctx, buf) + if err != nil { + err = fmt.Errorf("unable to parse tool file at %q: %w", opts.ToolsFile, err) + } + } + + if err != nil { + logger.ErrorContext(ctx, err.Error()) + return isCustomConfigured, err + } + allToolsFiles = append(allToolsFiles, customTools) + } + + // Modify version string based on loaded configurations + if len(opts.PrebuiltConfigs) > 0 { + tag := "prebuilt" + if isCustomConfigured { + tag = "custom" + } + // prebuiltConfigs is already sorted above + for _, configName := range opts.PrebuiltConfigs { + opts.Cfg.Version += fmt.Sprintf("+%s.%s", tag, configName) + } + } + + // Merge Everything + // This will error if custom tools collide with prebuilt tools + finalToolsFile, err := mergeToolsFiles(allToolsFiles...) + if err != nil { + logger.ErrorContext(ctx, err.Error()) + return isCustomConfigured, err + } + + opts.Cfg.SourceConfigs = finalToolsFile.Sources + opts.Cfg.AuthServiceConfigs = finalToolsFile.AuthServices + opts.Cfg.EmbeddingModelConfigs = finalToolsFile.EmbeddingModels + opts.Cfg.ToolConfigs = finalToolsFile.Tools + opts.Cfg.ToolsetConfigs = finalToolsFile.Toolsets + opts.Cfg.PromptConfigs = finalToolsFile.Prompts + + return isCustomConfigured, nil +} diff --git a/cmd/options_test.go b/cmd/internal/options_test.go similarity index 62% rename from cmd/options_test.go rename to cmd/internal/options_test.go index e0ab779b52..6e7c0a05ed 100644 --- a/cmd/options_test.go +++ b/cmd/internal/options_test.go @@ -12,57 +12,38 @@ // See the License for the specific language governing permissions and // limitations under the License. -package cmd +package internal import ( "errors" "io" "testing" - - "github.com/spf13/cobra" ) -func TestCommandOptions(t *testing.T) { +func TestToolboxOptions(t *testing.T) { w := io.Discard tcs := []struct { desc string - isValid func(*Command) error + isValid func(*ToolboxOptions) error option Option }{ { desc: "with logger", - isValid: func(c *Command) error { - if c.outStream != w || c.errStream != w { + isValid: func(o *ToolboxOptions) error { + if o.IOStreams.Out != w || o.IOStreams.ErrOut != w { return errors.New("loggers do not match") } return nil }, - option: WithStreams(w, w), + option: WithIOStreams(w, w), }, } for _, tc := range tcs { t.Run(tc.desc, func(t *testing.T) { - got, err := invokeProxyWithOption(tc.option) - if err != nil { - t.Fatal(err) - } + got := NewToolboxOptions(tc.option) if err := tc.isValid(got); err != nil { t.Errorf("option did not initialize command correctly: %v", err) } }) } } - -func invokeProxyWithOption(o Option) (*Command, error) { - c := NewCommand(o) - // Keep the test output quiet - c.SilenceUsage = true - c.SilenceErrors = true - // Disable execute behavior - c.RunE = func(*cobra.Command, []string) error { - return nil - } - - err := c.Execute() - return c, err -} diff --git a/cmd/internal/persistent_flags.go b/cmd/internal/persistent_flags.go new file mode 100644 index 0000000000..3874521a15 --- /dev/null +++ b/cmd/internal/persistent_flags.go @@ -0,0 +1,46 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package internal + +import ( + "fmt" + "strings" + + "github.com/googleapis/genai-toolbox/internal/prebuiltconfigs" + "github.com/spf13/cobra" +) + +// PersistentFlags sets up flags that are available for all commands and +// subcommands +// It is also used to set up persistent flags during subcommand unit tests +func PersistentFlags(parentCmd *cobra.Command, opts *ToolboxOptions) { + persistentFlags := parentCmd.PersistentFlags() + + persistentFlags.StringVar(&opts.ToolsFile, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.") + persistentFlags.StringSliceVar(&opts.ToolsFiles, "tools-files", []string{}, "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file, or --tools-folder.") + persistentFlags.StringVar(&opts.ToolsFolder, "tools-folder", "", "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file, or --tools-files.") + persistentFlags.Var(&opts.Cfg.LogLevel, "log-level", "Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'.") + persistentFlags.Var(&opts.Cfg.LoggingFormat, "logging-format", "Specify logging format to use. Allowed: 'standard' or 'JSON'.") + persistentFlags.BoolVar(&opts.Cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.") + persistentFlags.StringVar(&opts.Cfg.TelemetryOTLP, "telemetry-otlp", "", "Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318')") + persistentFlags.StringVar(&opts.Cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.") + // Fetch prebuilt tools sources to customize the help description + prebuiltHelp := fmt.Sprintf( + "Use a prebuilt tool configuration by source type. Allowed: '%s'. Can be specified multiple times.", + strings.Join(prebuiltconfigs.GetPrebuiltSources(), "', '"), + ) + persistentFlags.StringSliceVar(&opts.PrebuiltConfigs, "prebuilt", []string{}, prebuiltHelp) + persistentFlags.StringSliceVar(&opts.Cfg.UserAgentMetadata, "user-agent-metadata", []string{}, "Appends additional metadata to the User-Agent.") +} diff --git a/cmd/internal/skills/command.go b/cmd/internal/skills/command.go new file mode 100644 index 0000000000..d06c42c2b7 --- /dev/null +++ b/cmd/internal/skills/command.go @@ -0,0 +1,214 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package skills + +import ( + "context" + _ "embed" + "fmt" + "os" + "path/filepath" + "sort" + + "github.com/googleapis/genai-toolbox/cmd/internal" + "github.com/googleapis/genai-toolbox/internal/server" + "github.com/googleapis/genai-toolbox/internal/server/resources" + "github.com/googleapis/genai-toolbox/internal/tools" + + "github.com/spf13/cobra" +) + +// skillsCmd is the command for generating skills. +type skillsCmd struct { + *cobra.Command + name string + description string + toolset string + outputDir string +} + +// NewCommand creates a new Command. +func NewCommand(opts *internal.ToolboxOptions) *cobra.Command { + cmd := &skillsCmd{} + cmd.Command = &cobra.Command{ + Use: "skills-generate", + Short: "Generate skills from tool configurations", + RunE: func(c *cobra.Command, args []string) error { + return run(cmd, opts) + }, + } + + cmd.Flags().StringVar(&cmd.name, "name", "", "Name of the generated skill.") + cmd.Flags().StringVar(&cmd.description, "description", "", "Description of the generated skill") + cmd.Flags().StringVar(&cmd.toolset, "toolset", "", "Name of the toolset to convert into a skill. If not provided, all tools will be included.") + cmd.Flags().StringVar(&cmd.outputDir, "output-dir", "skills", "Directory to output generated skills") + + _ = cmd.MarkFlagRequired("name") + _ = cmd.MarkFlagRequired("description") + return cmd.Command +} + +func run(cmd *skillsCmd, opts *internal.ToolboxOptions) error { + ctx, cancel := context.WithCancel(cmd.Context()) + defer cancel() + + ctx, shutdown, err := opts.Setup(ctx) + if err != nil { + return err + } + defer func() { + _ = shutdown(ctx) + }() + + _, err = opts.LoadConfig(ctx) + if err != nil { + return err + } + + if err := os.MkdirAll(cmd.outputDir, 0755); err != nil { + errMsg := fmt.Errorf("error creating output directory: %w", err) + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + + opts.Logger.InfoContext(ctx, fmt.Sprintf("Generating skill '%s'...", cmd.name)) + + // Initialize toolbox and collect tools + allTools, err := cmd.collectTools(ctx, opts) + if err != nil { + errMsg := fmt.Errorf("error collecting tools: %w", err) + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + + if len(allTools) == 0 { + opts.Logger.InfoContext(ctx, "No tools found to generate.") + return nil + } + + // Generate the combined skill directory + skillPath := filepath.Join(cmd.outputDir, cmd.name) + if err := os.MkdirAll(skillPath, 0755); err != nil { + errMsg := fmt.Errorf("error creating skill directory: %w", err) + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + + // Generate assets directory + assetsPath := filepath.Join(skillPath, "assets") + if err := os.MkdirAll(assetsPath, 0755); err != nil { + errMsg := fmt.Errorf("error creating assets dir: %w", err) + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + + // Generate scripts directory + scriptsPath := filepath.Join(skillPath, "scripts") + if err := os.MkdirAll(scriptsPath, 0755); err != nil { + errMsg := fmt.Errorf("error creating scripts dir: %w", err) + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + + // Iterate over keys to ensure deterministic order + var toolNames []string + for name := range allTools { + toolNames = append(toolNames, name) + } + sort.Strings(toolNames) + + for _, toolName := range toolNames { + // Generate YAML config in asset directory + minimizedContent, err := generateToolConfigYAML(opts.Cfg, toolName) + if err != nil { + errMsg := fmt.Errorf("error generating filtered config for %s: %w", toolName, err) + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + + specificToolsFileName := fmt.Sprintf("%s.yaml", toolName) + if minimizedContent != nil { + destPath := filepath.Join(assetsPath, specificToolsFileName) + if err := os.WriteFile(destPath, minimizedContent, 0644); err != nil { + errMsg := fmt.Errorf("error writing filtered config for %s: %w", toolName, err) + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + } + + // Generate wrapper script in scripts directory + scriptContent, err := generateScriptContent(toolName, specificToolsFileName) + if err != nil { + errMsg := fmt.Errorf("error generating script content for %s: %w", toolName, err) + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + + scriptFilename := filepath.Join(scriptsPath, fmt.Sprintf("%s.js", toolName)) + if err := os.WriteFile(scriptFilename, []byte(scriptContent), 0755); err != nil { + errMsg := fmt.Errorf("error writing script %s: %w", scriptFilename, err) + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + } + + // Generate SKILL.md + skillContent, err := generateSkillMarkdown(cmd.name, cmd.description, allTools) + if err != nil { + errMsg := fmt.Errorf("error generating SKILL.md content: %w", err) + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + skillMdPath := filepath.Join(skillPath, "SKILL.md") + if err := os.WriteFile(skillMdPath, []byte(skillContent), 0644); err != nil { + errMsg := fmt.Errorf("error writing SKILL.md: %w", err) + opts.Logger.ErrorContext(ctx, errMsg.Error()) + return errMsg + } + + opts.Logger.InfoContext(ctx, fmt.Sprintf("Successfully generated skill '%s' with %d tools.", cmd.name, len(allTools))) + + return nil +} + +func (c *skillsCmd) collectTools(ctx context.Context, opts *internal.ToolboxOptions) (map[string]tools.Tool, error) { + // Initialize Resources + sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, opts.Cfg) + if err != nil { + return nil, fmt.Errorf("failed to initialize resources: %w", err) + } + + resourceMgr := resources.NewResourceManager(sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap) + + result := make(map[string]tools.Tool) + + if c.toolset == "" { + return toolsMap, nil + } + + ts, ok := resourceMgr.GetToolset(c.toolset) + if !ok { + return nil, fmt.Errorf("toolset %q not found", c.toolset) + } + + for _, t := range ts.Tools { + if t != nil { + tool := *t + result[tool.McpManifest().Name] = tool + } + } + + return result, nil +} diff --git a/cmd/internal/skills/command_test.go b/cmd/internal/skills/command_test.go new file mode 100644 index 0000000000..e7ddeafd1c --- /dev/null +++ b/cmd/internal/skills/command_test.go @@ -0,0 +1,195 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package skills + +import ( + "bytes" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/googleapis/genai-toolbox/cmd/internal" + _ "github.com/googleapis/genai-toolbox/internal/sources/sqlite" + _ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql" + "github.com/spf13/cobra" +) + +func invokeCommand(args []string) (string, error) { + parentCmd := &cobra.Command{Use: "toolbox"} + + buf := new(bytes.Buffer) + opts := internal.NewToolboxOptions(internal.WithIOStreams(buf, buf)) + internal.PersistentFlags(parentCmd, opts) + + cmd := NewCommand(opts) + parentCmd.AddCommand(cmd) + parentCmd.SetArgs(args) + + err := parentCmd.Execute() + return buf.String(), err +} + +func TestGenerateSkill(t *testing.T) { + // Create a temporary directory for tests + tmpDir := t.TempDir() + outputDir := filepath.Join(tmpDir, "skills") + + // Create a tools.yaml file with a sqlite tool + toolsFileContent := ` +sources: + my-sqlite: + kind: sqlite + database: test.db +tools: + hello-sqlite: + kind: sqlite-sql + source: my-sqlite + description: "hello tool" + statement: "SELECT 'hello' as greeting" +` + + toolsFilePath := filepath.Join(tmpDir, "tools.yaml") + if err := os.WriteFile(toolsFilePath, []byte(toolsFileContent), 0644); err != nil { + t.Fatalf("failed to write tools file: %v", err) + } + + args := []string{ + "skills-generate", + "--tools-file", toolsFilePath, + "--output-dir", outputDir, + "--name", "hello-sqlite", + "--description", "hello tool", + } + + got, err := invokeCommand(args) + if err != nil { + t.Fatalf("command failed: %v\nOutput: %s", err, got) + } + + // Verify generated directory structure + skillPath := filepath.Join(outputDir, "hello-sqlite") + if _, err := os.Stat(skillPath); os.IsNotExist(err) { + t.Fatalf("skill directory not created: %s", skillPath) + } + + // Check SKILL.md + skillMarkdown := filepath.Join(skillPath, "SKILL.md") + content, err := os.ReadFile(skillMarkdown) + if err != nil { + t.Fatalf("failed to read SKILL.md: %v", err) + } + + expectedFrontmatter := `--- +name: hello-sqlite +description: hello tool +---` + if !strings.HasPrefix(string(content), expectedFrontmatter) { + t.Errorf("SKILL.md does not have expected frontmatter format.\nExpected prefix:\n%s\nGot:\n%s", expectedFrontmatter, string(content)) + } + + if !strings.Contains(string(content), "## Usage") { + t.Errorf("SKILL.md does not contain '## Usage' section") + } + + if !strings.Contains(string(content), "## Scripts") { + t.Errorf("SKILL.md does not contain '## Scripts' section") + } + + if !strings.Contains(string(content), "### hello-sqlite") { + t.Errorf("SKILL.md does not contain '### hello-sqlite' tool header") + } + + // Check script file + scriptFilename := "hello-sqlite.js" + scriptPath := filepath.Join(skillPath, "scripts", scriptFilename) + if _, err := os.Stat(scriptPath); os.IsNotExist(err) { + t.Fatalf("script file not created: %s", scriptPath) + } + + scriptContent, err := os.ReadFile(scriptPath) + if err != nil { + t.Fatalf("failed to read script file: %v", err) + } + if !strings.Contains(string(scriptContent), "hello-sqlite") { + t.Errorf("script file does not contain expected tool name") + } + + // Check assets + assetPath := filepath.Join(skillPath, "assets", "hello-sqlite.yaml") + if _, err := os.Stat(assetPath); os.IsNotExist(err) { + t.Fatalf("asset file not created: %s", assetPath) + } + assetContent, err := os.ReadFile(assetPath) + if err != nil { + t.Fatalf("failed to read asset file: %v", err) + } + if !strings.Contains(string(assetContent), "hello-sqlite") { + t.Errorf("asset file does not contain expected tool name") + } +} + +func TestGenerateSkill_NoConfig(t *testing.T) { + tmpDir := t.TempDir() + outputDir := filepath.Join(tmpDir, "skills") + + args := []string{ + "skills-generate", + "--output-dir", outputDir, + "--name", "test", + "--description", "test", + } + + _, err := invokeCommand(args) + if err == nil { + t.Fatal("expected command to fail when no configuration is provided and tools.yaml is missing") + } + + // Should not have created the directory if no config was processed + if _, err := os.Stat(outputDir); !os.IsNotExist(err) { + t.Errorf("output directory should not have been created") + } +} + +func TestGenerateSkill_MissingArguments(t *testing.T) { + tmpDir := t.TempDir() + toolsFilePath := filepath.Join(tmpDir, "tools.yaml") + if err := os.WriteFile(toolsFilePath, []byte("tools: {}"), 0644); err != nil { + t.Fatalf("failed to write tools file: %v", err) + } + + tests := []struct { + name string + args []string + }{ + { + name: "missing name", + args: []string{"skills-generate", "--tools-file", toolsFilePath, "--description", "test"}, + }, + { + name: "missing description", + args: []string{"skills-generate", "--tools-file", toolsFilePath, "--name", "test"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := invokeCommand(tt.args) + if err == nil { + t.Fatalf("expected command to fail due to missing arguments, but it succeeded\nOutput: %s", got) + } + }) + } +} diff --git a/cmd/internal/skills/generator.go b/cmd/internal/skills/generator.go new file mode 100644 index 0000000000..a9e20fc9e3 --- /dev/null +++ b/cmd/internal/skills/generator.go @@ -0,0 +1,296 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package skills + +import ( + "bytes" + "encoding/json" + "fmt" + "sort" + "strings" + "text/template" + + "github.com/goccy/go-yaml" + "github.com/googleapis/genai-toolbox/internal/server" + "github.com/googleapis/genai-toolbox/internal/sources" + "github.com/googleapis/genai-toolbox/internal/tools" + "github.com/googleapis/genai-toolbox/internal/util/parameters" +) + +const skillTemplate = `--- +name: {{.SkillName}} +description: {{.SkillDescription}} +--- + +## Usage + +All scripts can be executed using Node.js. Replace ` + "`" + `` + "`" + ` and ` + "`" + `` + "`" + ` with actual values. + +**Bash:** +` + "`" + `node scripts/.js '{"": ""}'` + "`" + ` + +**PowerShell:** +` + "`" + `node scripts/.js '{\"\": \"\"}'` + "`" + ` + +## Scripts + +{{range .Tools}} +### {{.Name}} + +{{.Description}} + +{{.ParametersSchema}} + +--- +{{end}} +` + +type toolTemplateData struct { + Name string + Description string + ParametersSchema string +} + +type skillTemplateData struct { + SkillName string + SkillDescription string + Tools []toolTemplateData +} + +// generateSkillMarkdown generates the content of the SKILL.md file. +// It includes usage instructions and a reference section for each tool in the skill, +// detailing its description and parameters. +func generateSkillMarkdown(skillName, skillDescription string, toolsMap map[string]tools.Tool) (string, error) { + var toolsData []toolTemplateData + + // Order tools based on name + var toolNames []string + for name := range toolsMap { + toolNames = append(toolNames, name) + } + sort.Strings(toolNames) + + for _, name := range toolNames { + tool := toolsMap[name] + manifest := tool.Manifest() + + parametersSchema, err := formatParameters(manifest.Parameters) + if err != nil { + return "", err + } + + toolsData = append(toolsData, toolTemplateData{ + Name: name, + Description: manifest.Description, + ParametersSchema: parametersSchema, + }) + } + + data := skillTemplateData{ + SkillName: skillName, + SkillDescription: skillDescription, + Tools: toolsData, + } + + tmpl, err := template.New("markdown").Parse(skillTemplate) + if err != nil { + return "", fmt.Errorf("error parsing markdown template: %w", err) + } + + var buf strings.Builder + if err := tmpl.Execute(&buf, data); err != nil { + return "", fmt.Errorf("error executing markdown template: %w", err) + } + + return buf.String(), nil +} + +const nodeScriptTemplate = `#!/usr/bin/env node + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); + +const toolName = "{{.Name}}"; +const toolsFileName = "{{.ToolsFileName}}"; + +function getToolboxPath() { + try { + const checkCommand = process.platform === 'win32' ? 'where toolbox' : 'which toolbox'; + const globalPath = execSync(checkCommand, { stdio: 'pipe', encoding: 'utf-8' }).trim(); + if (globalPath) { + return globalPath.split('\n')[0].trim(); + } + } catch (e) { + // Ignore error; + } + const localPath = path.resolve(__dirname, '../../../toolbox'); + if (fs.existsSync(localPath)) { + return localPath; + } + throw new Error("Toolbox binary not found"); +} + +let toolboxBinary; +try { + toolboxBinary = getToolboxPath(); +} catch (err) { + console.error("Error:", err.message); + process.exit(1); +} + +let configArgs = []; +if (toolsFileName) { + configArgs.push("--tools-file", path.join(__dirname, "..", "assets", toolsFileName)); +} + +const args = process.argv.slice(2); +const toolboxArgs = [...configArgs, "invoke", toolName, ...args]; + +const child = spawn(toolboxBinary, toolboxArgs, { stdio: 'inherit' }); + +child.on('close', (code) => { + process.exit(code); +}); + +child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); +}); +` + +type scriptData struct { + Name string + ToolsFileName string +} + +// generateScriptContent creates the content for a Node.js wrapper script. +// This script invokes the toolbox CLI with the appropriate configuration +// (using a generated tools file) and arguments to execute the specific tool. +func generateScriptContent(name string, toolsFileName string) (string, error) { + data := scriptData{ + Name: name, + ToolsFileName: toolsFileName, + } + + tmpl, err := template.New("script").Parse(nodeScriptTemplate) + if err != nil { + return "", fmt.Errorf("error parsing script template: %w", err) + } + + var buf strings.Builder + if err := tmpl.Execute(&buf, data); err != nil { + return "", fmt.Errorf("error executing script template: %w", err) + } + + return buf.String(), nil +} + +// formatParameters converts a list of parameter manifests into a formatted JSON schema string. +// This schema is used in the skill documentation to describe the input parameters for a tool. +func formatParameters(params []parameters.ParameterManifest) (string, error) { + if len(params) == 0 { + return "", nil + } + + properties := make(map[string]interface{}) + var required []string + + for _, p := range params { + paramMap := map[string]interface{}{ + "type": p.Type, + "description": p.Description, + } + if p.Default != nil { + paramMap["default"] = p.Default + } + properties[p.Name] = paramMap + if p.Required { + required = append(required, p.Name) + } + } + + schema := map[string]interface{}{ + "type": "object", + "properties": properties, + } + if len(required) > 0 { + schema["required"] = required + } + + schemaJSON, err := json.MarshalIndent(schema, "", " ") + if err != nil { + return "", fmt.Errorf("error generating parameters schema: %w", err) + } + + return fmt.Sprintf("#### Parameters\n\n```json\n%s\n```", string(schemaJSON)), nil +} + +// generateToolConfigYAML generates the YAML configuration for a single tool and its dependency (source). +// It extracts the relevant tool and source configurations from the server config and formats them +// into a YAML document suitable for inclusion in the skill's assets. +func generateToolConfigYAML(cfg server.ServerConfig, toolName string) ([]byte, error) { + toolCfg, ok := cfg.ToolConfigs[toolName] + if !ok { + return nil, fmt.Errorf("error finding tool config: %s", toolName) + } + + var buf bytes.Buffer + encoder := yaml.NewEncoder(&buf) + + // Process Tool Config + toolWrapper := struct { + Kind string `yaml:"kind"` + Config tools.ToolConfig `yaml:",inline"` + }{ + Kind: "tools", + Config: toolCfg, + } + + if err := encoder.Encode(toolWrapper); err != nil { + return nil, fmt.Errorf("error encoding tool config: %w", err) + } + + // Process Source Config + var toolMap map[string]interface{} + b, err := yaml.Marshal(toolCfg) + if err != nil { + return nil, fmt.Errorf("error marshaling tool config: %w", err) + } + if err := yaml.Unmarshal(b, &toolMap); err != nil { + return nil, fmt.Errorf("error unmarshaling tool config map: %w", err) + } + + if sourceName, ok := toolMap["source"].(string); ok && sourceName != "" { + sourceCfg, ok := cfg.SourceConfigs[sourceName] + if !ok { + return nil, fmt.Errorf("error finding source config: %s", sourceName) + } + + sourceWrapper := struct { + Kind string `yaml:"kind"` + Config sources.SourceConfig `yaml:",inline"` + }{ + Kind: "sources", + Config: sourceCfg, + } + + if err := encoder.Encode(sourceWrapper); err != nil { + return nil, fmt.Errorf("error encoding source config: %w", err) + } + } + + return buf.Bytes(), nil +} diff --git a/cmd/internal/skills/generator_test.go b/cmd/internal/skills/generator_test.go new file mode 100644 index 0000000000..bd3a462180 --- /dev/null +++ b/cmd/internal/skills/generator_test.go @@ -0,0 +1,347 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package skills + +import ( + "context" + "strings" + "testing" + + "github.com/googleapis/genai-toolbox/internal/server" + "github.com/googleapis/genai-toolbox/internal/sources" + "github.com/googleapis/genai-toolbox/internal/tools" + "github.com/googleapis/genai-toolbox/internal/util/parameters" + "go.opentelemetry.io/otel/trace" +) + +type MockToolConfig struct { + Name string `yaml:"name"` + Type string `yaml:"type"` + Source string `yaml:"source"` + Other string `yaml:"other"` + Parameters parameters.Parameters `yaml:"parameters"` +} + +func (m MockToolConfig) ToolConfigType() string { + return m.Type +} + +func (m MockToolConfig) Initialize(map[string]sources.Source) (tools.Tool, error) { + return nil, nil +} + +type MockSourceConfig struct { + Name string `yaml:"name"` + Type string `yaml:"type"` + ConnectionString string `yaml:"connection_string"` +} + +func (m MockSourceConfig) SourceConfigType() string { + return m.Type +} + +func (m MockSourceConfig) Initialize(context.Context, trace.Tracer) (sources.Source, error) { + return nil, nil +} + +func TestFormatParameters(t *testing.T) { + tests := []struct { + name string + params []parameters.ParameterManifest + wantContains []string + wantErr bool + }{ + { + name: "empty parameters", + params: []parameters.ParameterManifest{}, + wantContains: []string{""}, + }, + { + name: "single required string parameter", + params: []parameters.ParameterManifest{ + { + Name: "param1", + Description: "A test parameter", + Type: "string", + Required: true, + }, + }, + wantContains: []string{ + "## Parameters", + "```json", + `"type": "object"`, + `"properties": {`, + `"param1": {`, + `"type": "string"`, + `"description": "A test parameter"`, + `"required": [`, + `"param1"`, + }, + }, + { + name: "mixed parameters with defaults", + params: []parameters.ParameterManifest{ + { + Name: "param1", + Description: "Param 1", + Type: "string", + Required: true, + }, + { + Name: "param2", + Description: "Param 2", + Type: "integer", + Default: 42, + Required: false, + }, + }, + wantContains: []string{ + `"param1": {`, + `"param2": {`, + `"default": 42`, + `"required": [`, + `"param1"`, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := formatParameters(tt.params) + if (err != nil) != tt.wantErr { + t.Errorf("formatParameters() error = %v, wantErr %v", err, tt.wantErr) + return + } + if tt.wantErr { + return + } + + if len(tt.params) == 0 { + if got != "" { + t.Errorf("formatParameters() = %v, want empty string", got) + } + return + } + + for _, want := range tt.wantContains { + if !strings.Contains(got, want) { + t.Errorf("formatParameters() result missing expected string: %s\nGot:\n%s", want, got) + } + } + }) + } +} + +func TestGenerateSkillMarkdown(t *testing.T) { + toolsMap := map[string]tools.Tool{ + "tool1": server.MockTool{ + Description: "First tool", + Params: []parameters.Parameter{ + parameters.NewStringParameter("p1", "d1"), + }, + }, + } + + got, err := generateSkillMarkdown("MySkill", "My Description", toolsMap) + if err != nil { + t.Fatalf("generateSkillMarkdown() error = %v", err) + } + + expectedSubstrings := []string{ + "name: MySkill", + "description: My Description", + "## Usage", + "All scripts can be executed using Node.js", + "**Bash:**", + "`node scripts/.js '{\"\": \"\"}'`", + "**PowerShell:**", + "`node scripts/.js '{\"\": \"\"}'`", + "## Scripts", + "### tool1", + "First tool", + "## Parameters", + } + + for _, s := range expectedSubstrings { + if !strings.Contains(got, s) { + t.Errorf("generateSkillMarkdown() missing substring %q", s) + } + } +} + +func TestGenerateScriptContent(t *testing.T) { + tests := []struct { + name string + toolName string + toolsFileName string + wantContains []string + }{ + { + name: "basic script", + toolName: "test-tool", + toolsFileName: "", + wantContains: []string{ + `const toolName = "test-tool";`, + `const toolsFileName = "";`, + `const toolboxArgs = [...configArgs, "invoke", toolName, ...args];`, + }, + }, + { + name: "script with tools file", + toolName: "complex-tool", + toolsFileName: "tools.yaml", + wantContains: []string{ + `const toolName = "complex-tool";`, + `const toolsFileName = "tools.yaml";`, + `configArgs.push("--tools-file", path.join(__dirname, "..", "assets", toolsFileName));`, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := generateScriptContent(tt.toolName, tt.toolsFileName) + if err != nil { + t.Fatalf("generateScriptContent() error = %v", err) + } + + for _, s := range tt.wantContains { + if !strings.Contains(got, s) { + t.Errorf("generateScriptContent() missing substring %q\nGot:\n%s", s, got) + } + } + }) + } +} + +func TestGenerateToolConfigYAML(t *testing.T) { + cfg := server.ServerConfig{ + ToolConfigs: server.ToolConfigs{ + "tool1": MockToolConfig{ + Name: "tool1", + Type: "custom-tool", + Source: "src1", + Other: "foo", + }, + "toolNoSource": MockToolConfig{ + Name: "toolNoSource", + Type: "http", + }, + "toolWithParams": MockToolConfig{ + Name: "toolWithParams", + Type: "custom-tool", + Parameters: []parameters.Parameter{ + parameters.NewStringParameter("param1", "desc1"), + }, + }, + "toolWithMissingSource": MockToolConfig{ + Name: "toolWithMissingSource", + Type: "custom-tool", + Source: "missing-src", + }, + }, + SourceConfigs: server.SourceConfigs{ + "src1": MockSourceConfig{ + Name: "src1", + Type: "postgres", + ConnectionString: "conn1", + }, + }, + } + + tests := []struct { + name string + toolName string + wantContains []string + wantErr bool + wantNil bool + }{ + { + name: "tool with source", + toolName: "tool1", + wantContains: []string{ + "kind: tools", + "name: tool1", + "type: custom-tool", + "source: src1", + "other: foo", + "---", + "kind: sources", + "name: src1", + "type: postgres", + "connection_string: conn1", + }, + }, + { + name: "tool without source", + toolName: "toolNoSource", + wantContains: []string{ + "kind: tools", + "name: toolNoSource", + "type: http", + }, + }, + { + name: "tool with parameters", + toolName: "toolWithParams", + wantContains: []string{ + "kind: tools", + "name: toolWithParams", + "type: custom-tool", + "parameters:", + "- name: param1", + "type: string", + "description: desc1", + }, + }, + { + name: "non-existent tool", + toolName: "missing-tool", + wantErr: true, + }, + { + name: "tool with missing source config", + toolName: "toolWithMissingSource", + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotBytes, err := generateToolConfigYAML(cfg, tt.toolName) + if (err != nil) != tt.wantErr { + t.Errorf("generateToolConfigYAML() error = %v, wantErr %v", err, tt.wantErr) + return + } + if tt.wantErr { + return + } + + if tt.wantNil { + if gotBytes != nil { + t.Errorf("generateToolConfigYAML() expected nil, got %s", string(gotBytes)) + } + return + } + + got := string(gotBytes) + for _, want := range tt.wantContains { + if !strings.Contains(got, want) { + t.Errorf("generateToolConfigYAML() result missing expected string: %q\nGot:\n%s", want, got) + } + } + }) + } +} diff --git a/cmd/internal/tools_file.go b/cmd/internal/tools_file.go new file mode 100644 index 0000000000..ba91790845 --- /dev/null +++ b/cmd/internal/tools_file.go @@ -0,0 +1,349 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package internal + +import ( + "bytes" + "context" + "fmt" + "io" + "os" + "path/filepath" + "regexp" + "slices" + "strings" + + "github.com/goccy/go-yaml" + "github.com/googleapis/genai-toolbox/internal/server" +) + +type ToolsFile struct { + Sources server.SourceConfigs `yaml:"sources"` + AuthServices server.AuthServiceConfigs `yaml:"authServices"` + EmbeddingModels server.EmbeddingModelConfigs `yaml:"embeddingModels"` + Tools server.ToolConfigs `yaml:"tools"` + Toolsets server.ToolsetConfigs `yaml:"toolsets"` + Prompts server.PromptConfigs `yaml:"prompts"` +} + +// parseEnv replaces environment variables ${ENV_NAME} with their values. +// also support ${ENV_NAME:default_value}. +func parseEnv(input string) (string, error) { + re := regexp.MustCompile(`\$\{(\w+)(:([^}]*))?\}`) + + var err error + output := re.ReplaceAllStringFunc(input, func(match string) string { + parts := re.FindStringSubmatch(match) + + // extract the variable name + variableName := parts[1] + if value, found := os.LookupEnv(variableName); found { + return value + } + if len(parts) >= 4 && parts[2] != "" { + return parts[3] + } + err = fmt.Errorf("environment variable not found: %q", variableName) + return "" + }) + return output, err +} + +// parseToolsFile parses the provided yaml into appropriate configs. +func parseToolsFile(ctx context.Context, raw []byte) (ToolsFile, error) { + var toolsFile ToolsFile + // Replace environment variables if found + output, err := parseEnv(string(raw)) + if err != nil { + return toolsFile, fmt.Errorf("error parsing environment variables: %s", err) + } + raw = []byte(output) + + raw, err = convertToolsFile(raw) + if err != nil { + return toolsFile, fmt.Errorf("error converting tools file: %s", err) + } + + // Parse contents + toolsFile.Sources, toolsFile.AuthServices, toolsFile.EmbeddingModels, toolsFile.Tools, toolsFile.Toolsets, toolsFile.Prompts, err = server.UnmarshalResourceConfig(ctx, raw) + if err != nil { + return toolsFile, err + } + return toolsFile, nil +} + +func convertToolsFile(raw []byte) ([]byte, error) { + var input yaml.MapSlice + decoder := yaml.NewDecoder(bytes.NewReader(raw), yaml.UseOrderedMap()) + + // convert to tools file v2 + var buf bytes.Buffer + encoder := yaml.NewEncoder(&buf) + + v1keys := []string{"sources", "authSources", "authServices", "embeddingModels", "tools", "toolsets", "prompts"} + for { + if err := decoder.Decode(&input); err != nil { + if err == io.EOF { + break + } + return nil, err + } + for _, item := range input { + key, ok := item.Key.(string) + if !ok { + return nil, fmt.Errorf("unexpected non-string key in input: %v", item.Key) + } + // check if the key is config file v1's key + if slices.Contains(v1keys, key) { + // check if value conversion to yaml.MapSlice successfully + // fields such as "tools" in toolsets might pass the first check but + // fail to convert to MapSlice + if slice, ok := item.Value.(yaml.MapSlice); ok { + // Deprecated: convert authSources to authServices + if key == "authSources" { + key = "authServices" + } + transformed, err := transformDocs(key, slice) + if err != nil { + return nil, err + } + // encode per-doc + for _, doc := range transformed { + if err := encoder.Encode(doc); err != nil { + return nil, err + } + } + } else { + // invalid input will be ignored + // we don't want to throw error here since the config could + // be valid but with a different order such as: + // --- + // tools: + // - tool_a + // kind: toolsets + // --- + continue + } + } else { + // this doc is already v2, encode to buf + if err := encoder.Encode(input); err != nil { + return nil, err + } + break + } + } + } + return buf.Bytes(), nil +} + +// transformDocs transforms the configuration file from v1 format to v2 +// yaml.MapSlice will preserve the order in a map +func transformDocs(kind string, input yaml.MapSlice) ([]yaml.MapSlice, error) { + var transformed []yaml.MapSlice + for _, entry := range input { + entryName, ok := entry.Key.(string) + if !ok { + return nil, fmt.Errorf("unexpected non-string key for entry in '%s': %v", kind, entry.Key) + } + entryBody := ProcessValue(entry.Value, kind == "toolsets") + + currentTransformed := yaml.MapSlice{ + {Key: "kind", Value: kind}, + {Key: "name", Value: entryName}, + } + + // Merge the transformed body into our result + if bodySlice, ok := entryBody.(yaml.MapSlice); ok { + currentTransformed = append(currentTransformed, bodySlice...) + } else { + return nil, fmt.Errorf("unable to convert entryBody to MapSlice") + } + transformed = append(transformed, currentTransformed) + } + return transformed, nil +} + +// ProcessValue recursively looks for MapSlices to rename 'kind' -> 'type' +func ProcessValue(v any, isToolset bool) any { + switch val := v.(type) { + case yaml.MapSlice: + // creating a new MapSlice is safer for recursive transformation + newVal := make(yaml.MapSlice, len(val)) + for i, item := range val { + // Perform renaming + if item.Key == "kind" { + item.Key = "type" + } + // Recursive call for nested values (e.g., nested objects or lists) + item.Value = ProcessValue(item.Value, false) + newVal[i] = item + } + return newVal + case []any: + // Process lists: If it's a toolset top-level list, wrap it. + if isToolset { + return yaml.MapSlice{{Key: "tools", Value: val}} + } + // Otherwise, recurse into list items (to catch nested objects) + newVal := make([]any, len(val)) + for i := range val { + newVal[i] = ProcessValue(val[i], false) + } + return newVal + default: + return val + } +} + +// mergeToolsFiles merges multiple ToolsFile structs into one. +// Detects and raises errors for resource conflicts in sources, authServices, tools, and toolsets. +// All resource names (sources, authServices, tools, toolsets) must be unique across all files. +func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) { + merged := ToolsFile{ + Sources: make(server.SourceConfigs), + AuthServices: make(server.AuthServiceConfigs), + EmbeddingModels: make(server.EmbeddingModelConfigs), + Tools: make(server.ToolConfigs), + Toolsets: make(server.ToolsetConfigs), + Prompts: make(server.PromptConfigs), + } + + var conflicts []string + + for fileIndex, file := range files { + // Check for conflicts and merge sources + for name, source := range file.Sources { + if _, exists := merged.Sources[name]; exists { + conflicts = append(conflicts, fmt.Sprintf("source '%s' (file #%d)", name, fileIndex+1)) + } else { + merged.Sources[name] = source + } + } + + // Check for conflicts and merge authServices + for name, authService := range file.AuthServices { + if _, exists := merged.AuthServices[name]; exists { + conflicts = append(conflicts, fmt.Sprintf("authService '%s' (file #%d)", name, fileIndex+1)) + } else { + merged.AuthServices[name] = authService + } + } + + // Check for conflicts and merge embeddingModels + for name, em := range file.EmbeddingModels { + if _, exists := merged.EmbeddingModels[name]; exists { + conflicts = append(conflicts, fmt.Sprintf("embedding model '%s' (file #%d)", name, fileIndex+1)) + } else { + merged.EmbeddingModels[name] = em + } + } + + // Check for conflicts and merge tools + for name, tool := range file.Tools { + if _, exists := merged.Tools[name]; exists { + conflicts = append(conflicts, fmt.Sprintf("tool '%s' (file #%d)", name, fileIndex+1)) + } else { + merged.Tools[name] = tool + } + } + + // Check for conflicts and merge toolsets + for name, toolset := range file.Toolsets { + if _, exists := merged.Toolsets[name]; exists { + conflicts = append(conflicts, fmt.Sprintf("toolset '%s' (file #%d)", name, fileIndex+1)) + } else { + merged.Toolsets[name] = toolset + } + } + + // Check for conflicts and merge prompts + for name, prompt := range file.Prompts { + if _, exists := merged.Prompts[name]; exists { + conflicts = append(conflicts, fmt.Sprintf("prompt '%s' (file #%d)", name, fileIndex+1)) + } else { + merged.Prompts[name] = prompt + } + } + } + + // If conflicts were detected, return an error + if len(conflicts) > 0 { + return ToolsFile{}, fmt.Errorf("resource conflicts detected:\n - %s\n\nPlease ensure each source, authService, tool, toolset and prompt has a unique name across all files", strings.Join(conflicts, "\n - ")) + } + + return merged, nil +} + +// LoadAndMergeToolsFiles loads multiple YAML files and merges them +func LoadAndMergeToolsFiles(ctx context.Context, filePaths []string) (ToolsFile, error) { + var toolsFiles []ToolsFile + + for _, filePath := range filePaths { + buf, err := os.ReadFile(filePath) + if err != nil { + return ToolsFile{}, fmt.Errorf("unable to read tool file at %q: %w", filePath, err) + } + + toolsFile, err := parseToolsFile(ctx, buf) + if err != nil { + return ToolsFile{}, fmt.Errorf("unable to parse tool file at %q: %w", filePath, err) + } + + toolsFiles = append(toolsFiles, toolsFile) + } + + mergedFile, err := mergeToolsFiles(toolsFiles...) + if err != nil { + return ToolsFile{}, fmt.Errorf("unable to merge tools files: %w", err) + } + + return mergedFile, nil +} + +// LoadAndMergeToolsFolder loads all YAML files from a directory and merges them +func LoadAndMergeToolsFolder(ctx context.Context, folderPath string) (ToolsFile, error) { + // Check if directory exists + info, err := os.Stat(folderPath) + if err != nil { + return ToolsFile{}, fmt.Errorf("unable to access tools folder at %q: %w", folderPath, err) + } + if !info.IsDir() { + return ToolsFile{}, fmt.Errorf("path %q is not a directory", folderPath) + } + + // Find all YAML files in the directory + pattern := filepath.Join(folderPath, "*.yaml") + yamlFiles, err := filepath.Glob(pattern) + if err != nil { + return ToolsFile{}, fmt.Errorf("error finding YAML files in %q: %w", folderPath, err) + } + + // Also find .yml files + ymlPattern := filepath.Join(folderPath, "*.yml") + ymlFiles, err := filepath.Glob(ymlPattern) + if err != nil { + return ToolsFile{}, fmt.Errorf("error finding YML files in %q: %w", folderPath, err) + } + + // Combine both file lists + allFiles := append(yamlFiles, ymlFiles...) + + if len(allFiles) == 0 { + return ToolsFile{}, fmt.Errorf("no YAML files found in directory %q", folderPath) + } + + // Use existing LoadAndMergeToolsFiles function + return LoadAndMergeToolsFiles(ctx, allFiles) +} diff --git a/cmd/internal/tools_file_test.go b/cmd/internal/tools_file_test.go new file mode 100644 index 0000000000..3b26baa621 --- /dev/null +++ b/cmd/internal/tools_file_test.go @@ -0,0 +1,2141 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package internal + +import ( + "fmt" + "strings" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/googleapis/genai-toolbox/internal/auth/google" + "github.com/googleapis/genai-toolbox/internal/embeddingmodels/gemini" + "github.com/googleapis/genai-toolbox/internal/prebuiltconfigs" + "github.com/googleapis/genai-toolbox/internal/prompts" + "github.com/googleapis/genai-toolbox/internal/prompts/custom" + "github.com/googleapis/genai-toolbox/internal/server" + cloudsqlpgsrc "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg" + httpsrc "github.com/googleapis/genai-toolbox/internal/sources/http" + "github.com/googleapis/genai-toolbox/internal/testutils" + "github.com/googleapis/genai-toolbox/internal/tools" + "github.com/googleapis/genai-toolbox/internal/tools/http" + "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql" + "github.com/googleapis/genai-toolbox/internal/util/parameters" +) + +func TestParseEnv(t *testing.T) { + tcs := []struct { + desc string + env map[string]string + in string + want string + err bool + errString string + }{ + { + desc: "without default without env", + in: "${FOO}", + want: "", + err: true, + errString: `environment variable not found: "FOO"`, + }, + { + desc: "without default with env", + env: map[string]string{ + "FOO": "bar", + }, + in: "${FOO}", + want: "bar", + }, + { + desc: "with empty default", + in: "${FOO:}", + want: "", + }, + { + desc: "with default", + in: "${FOO:bar}", + want: "bar", + }, + { + desc: "with default with env", + env: map[string]string{ + "FOO": "hello", + }, + in: "${FOO:bar}", + want: "hello", + }, + } + for _, tc := range tcs { + t.Run(tc.desc, func(t *testing.T) { + if tc.env != nil { + for k, v := range tc.env { + t.Setenv(k, v) + } + } + got, err := parseEnv(tc.in) + if tc.err { + if err == nil { + t.Fatalf("expected error not found") + } + if tc.errString != err.Error() { + t.Fatalf("incorrect error string: got %s, want %s", err, tc.errString) + } + } + if tc.want != got { + t.Fatalf("unexpected want: got %s, want %s", got, tc.want) + } + }) + } +} + +func TestConvertToolsFile(t *testing.T) { + tcs := []struct { + desc string + in string + want string + isErr bool + errStr string + }{ + { + desc: "basic convert", + in: ` + sources: + my-pg-instance: + kind: cloud-sql-postgres + project: my-project + region: my-region + instance: my-instance + database: my_db + user: my_user + password: my_pass + authServices: + my-google-auth: + kind: google + clientId: testing-id + tools: + example_tool: + kind: postgres-sql + source: my-pg-instance + description: some description + statement: SELECT * FROM SQL_STATEMENT; + parameters: + - name: country + type: string + description: some description + toolsets: + example_toolset: + - example_tool + prompts: + code_review: + description: ask llm to analyze code quality + messages: + - content: "please review the following code for quality: {{.code}}" + arguments: + - name: code + description: the code to review + embeddingModels: + gemini-model: + kind: gemini + model: gemini-embedding-001 + apiKey: some-key + dimension: 768`, + want: `kind: sources +name: my-pg-instance +type: cloud-sql-postgres +project: my-project +region: my-region +instance: my-instance +database: my_db +user: my_user +password: my_pass +--- +kind: authServices +name: my-google-auth +type: google +clientId: testing-id +--- +kind: tools +name: example_tool +type: postgres-sql +source: my-pg-instance +description: some description +statement: SELECT * FROM SQL_STATEMENT; +parameters: +- name: country + type: string + description: some description +--- +kind: toolsets +name: example_toolset +tools: +- example_tool +--- +kind: prompts +name: code_review +description: ask llm to analyze code quality +messages: +- content: "please review the following code for quality: {{.code}}" +arguments: +- name: code + description: the code to review +--- +kind: embeddingModels +name: gemini-model +type: gemini +model: gemini-embedding-001 +apiKey: some-key +dimension: 768 +`, + }, + { + desc: "preserve resource order", + in: ` + tools: + example_tool: + kind: postgres-sql + source: my-pg-instance + description: some description + statement: SELECT * FROM SQL_STATEMENT; + parameters: + - name: country + type: string + description: some description + sources: + my-pg-instance: + kind: cloud-sql-postgres + project: my-project + region: my-region + instance: my-instance + database: my_db + user: my_user + password: my_pass + authServices: + my-google-auth: + kind: google + clientId: testing-id + toolsets: + example_toolset: + - example_tool + authSources: + my-google-auth2: + kind: google + clientId: testing-id`, + want: `kind: tools +name: example_tool +type: postgres-sql +source: my-pg-instance +description: some description +statement: SELECT * FROM SQL_STATEMENT; +parameters: +- name: country + type: string + description: some description +--- +kind: sources +name: my-pg-instance +type: cloud-sql-postgres +project: my-project +region: my-region +instance: my-instance +database: my_db +user: my_user +password: my_pass +--- +kind: authServices +name: my-google-auth +type: google +clientId: testing-id +--- +kind: toolsets +name: example_toolset +tools: +- example_tool +--- +kind: authServices +name: my-google-auth2 +type: google +clientId: testing-id +`, + }, + { + desc: "convert combination of v1 and v2", + in: ` + sources: + my-pg-instance: + kind: cloud-sql-postgres + project: my-project + region: my-region + instance: my-instance + database: my_db + user: my_user + password: my_pass + authServices: + my-google-auth: + kind: google + clientId: testing-id + tools: + example_tool: + kind: postgres-sql + source: my-pg-instance + description: some description + statement: SELECT * FROM SQL_STATEMENT; + parameters: + - name: country + type: string + description: some description + toolsets: + example_toolset: + - example_tool + prompts: + code_review: + description: ask llm to analyze code quality + messages: + - content: "please review the following code for quality: {{.code}}" + arguments: + - name: code + description: the code to review + embeddingModels: + gemini-model: + kind: gemini + model: gemini-embedding-001 + apiKey: some-key + dimension: 768 +--- + kind: sources + name: my-pg-instance2 + type: cloud-sql-postgres + project: my-project + region: my-region + instance: my-instance +--- + kind: authServices + name: my-google-auth2 + type: google + clientId: testing-id +--- + kind: tools + name: example_tool2 + type: postgres-sql + source: my-pg-instance + description: some description + statement: SELECT * FROM SQL_STATEMENT; + parameters: + - name: country + type: string + description: some description +--- + kind: toolsets + name: example_toolset2 + tools: + - example_tool +--- + tools: + - example_tool + kind: toolsets + name: example_toolset3 +--- + kind: prompts + name: code_review2 + description: ask llm to analyze code quality + messages: + - content: "please review the following code for quality: {{.code}}" + arguments: + - name: code + description: the code to review +--- + kind: embeddingModels + name: gemini-model2 + type: gemini`, + want: `kind: sources +name: my-pg-instance +type: cloud-sql-postgres +project: my-project +region: my-region +instance: my-instance +database: my_db +user: my_user +password: my_pass +--- +kind: authServices +name: my-google-auth +type: google +clientId: testing-id +--- +kind: tools +name: example_tool +type: postgres-sql +source: my-pg-instance +description: some description +statement: SELECT * FROM SQL_STATEMENT; +parameters: +- name: country + type: string + description: some description +--- +kind: toolsets +name: example_toolset +tools: +- example_tool +--- +kind: prompts +name: code_review +description: ask llm to analyze code quality +messages: +- content: "please review the following code for quality: {{.code}}" +arguments: +- name: code + description: the code to review +--- +kind: embeddingModels +name: gemini-model +type: gemini +model: gemini-embedding-001 +apiKey: some-key +dimension: 768 +--- +kind: sources +name: my-pg-instance2 +type: cloud-sql-postgres +project: my-project +region: my-region +instance: my-instance +--- +kind: authServices +name: my-google-auth2 +type: google +clientId: testing-id +--- +kind: tools +name: example_tool2 +type: postgres-sql +source: my-pg-instance +description: some description +statement: SELECT * FROM SQL_STATEMENT; +parameters: +- name: country + type: string + description: some description +--- +kind: toolsets +name: example_toolset2 +tools: +- example_tool +--- +tools: +- example_tool +kind: toolsets +name: example_toolset3 +--- +kind: prompts +name: code_review2 +description: ask llm to analyze code quality +messages: +- content: "please review the following code for quality: {{.code}}" +arguments: +- name: code + description: the code to review +--- +kind: embeddingModels +name: gemini-model2 +type: gemini +`, + }, + { + desc: "no convertion needed", + in: `kind: sources +name: my-pg-instance +type: cloud-sql-postgres +project: my-project +region: my-region +instance: my-instance +database: my_db +user: my_user +password: my_pass +--- +kind: tools +name: example_tool +type: postgres-sql +source: my-pg-instance +description: some description +statement: SELECT * FROM SQL_STATEMENT; +parameters: +- name: country + type: string + description: some description +--- +kind: toolsets +name: example_toolset +tools: +- example_tool`, + want: `kind: sources +name: my-pg-instance +type: cloud-sql-postgres +project: my-project +region: my-region +instance: my-instance +database: my_db +user: my_user +password: my_pass +--- +kind: tools +name: example_tool +type: postgres-sql +source: my-pg-instance +description: some description +statement: SELECT * FROM SQL_STATEMENT; +parameters: +- name: country + type: string + description: some description +--- +kind: toolsets +name: example_toolset +tools: +- example_tool +`, + }, + { + desc: "invalid source", + in: `sources: invalid`, + want: "", + }, + { + desc: "invalid toolset", + in: `toolsets: invalid`, + want: "", + }, + } + for _, tc := range tcs { + t.Run(tc.desc, func(t *testing.T) { + output, err := convertToolsFile([]byte(tc.in)) + if err != nil { + t.Fatalf("unexpected error: %s", err) + } + + if diff := cmp.Diff(string(output), tc.want); diff != "" { + t.Fatalf("incorrect toolsets parse: diff %v", diff) + } + }) + } +} + +func TestParseToolFile(t *testing.T) { + ctx, err := testutils.ContextWithNewLogger() + if err != nil { + t.Fatalf("unexpected error: %s", err) + } + tcs := []struct { + description string + in string + wantToolsFile ToolsFile + }{ + { + description: "basic example tools file v1", + in: ` + sources: + my-pg-instance: + kind: cloud-sql-postgres + project: my-project + region: my-region + instance: my-instance + database: my_db + user: my_user + password: my_pass + tools: + example_tool: + kind: postgres-sql + source: my-pg-instance + description: some description + statement: | + SELECT * FROM SQL_STATEMENT; + parameters: + - name: country + type: string + description: some description + toolsets: + example_toolset: + - example_tool + `, + wantToolsFile: ToolsFile{ + Sources: server.SourceConfigs{ + "my-pg-instance": cloudsqlpgsrc.Config{ + Name: "my-pg-instance", + Type: cloudsqlpgsrc.SourceType, + Project: "my-project", + Region: "my-region", + Instance: "my-instance", + IPType: "public", + Database: "my_db", + User: "my_user", + Password: "my_pass", + }, + }, + Tools: server.ToolConfigs{ + "example_tool": postgressql.Config{ + Name: "example_tool", + Type: "postgres-sql", + Source: "my-pg-instance", + Description: "some description", + Statement: "SELECT * FROM SQL_STATEMENT;\n", + Parameters: []parameters.Parameter{ + parameters.NewStringParameter("country", "some description"), + }, + AuthRequired: []string{}, + }, + }, + Toolsets: server.ToolsetConfigs{ + "example_toolset": tools.ToolsetConfig{ + Name: "example_toolset", + ToolNames: []string{"example_tool"}, + }, + }, + AuthServices: nil, + Prompts: nil, + }, + }, + { + description: "basic example tools file v2", + in: ` + kind: sources + name: my-pg-instance + type: cloud-sql-postgres + project: my-project + region: my-region + instance: my-instance + database: my_db + user: my_user + password: my_pass +--- + kind: authServices + name: my-google-auth + type: google + clientId: testing-id +--- + kind: embeddingModels + name: gemini-model + type: gemini + model: gemini-embedding-001 + apiKey: some-key + dimension: 768 +--- + kind: tools + name: example_tool + type: postgres-sql + source: my-pg-instance + description: some description + statement: | + SELECT * FROM SQL_STATEMENT; + parameters: + - name: country + type: string + description: some description +--- + kind: toolsets + name: example_toolset + tools: + - example_tool +--- + kind: prompts + name: code_review + description: ask llm to analyze code quality + messages: + - content: "please review the following code for quality: {{.code}}" + arguments: + - name: code + description: the code to review + `, + wantToolsFile: ToolsFile{ + Sources: server.SourceConfigs{ + "my-pg-instance": cloudsqlpgsrc.Config{ + Name: "my-pg-instance", + Type: cloudsqlpgsrc.SourceType, + Project: "my-project", + Region: "my-region", + Instance: "my-instance", + IPType: "public", + Database: "my_db", + User: "my_user", + Password: "my_pass", + }, + }, + AuthServices: server.AuthServiceConfigs{ + "my-google-auth": google.Config{ + Name: "my-google-auth", + Type: google.AuthServiceType, + ClientID: "testing-id", + }, + }, + EmbeddingModels: server.EmbeddingModelConfigs{ + "gemini-model": gemini.Config{ + Name: "gemini-model", + Type: gemini.EmbeddingModelType, + Model: "gemini-embedding-001", + ApiKey: "some-key", + Dimension: 768, + }, + }, + Tools: server.ToolConfigs{ + "example_tool": postgressql.Config{ + Name: "example_tool", + Type: "postgres-sql", + Source: "my-pg-instance", + Description: "some description", + Statement: "SELECT * FROM SQL_STATEMENT;\n", + Parameters: []parameters.Parameter{ + parameters.NewStringParameter("country", "some description"), + }, + AuthRequired: []string{}, + }, + }, + Toolsets: server.ToolsetConfigs{ + "example_toolset": tools.ToolsetConfig{ + Name: "example_toolset", + ToolNames: []string{"example_tool"}, + }, + }, + Prompts: server.PromptConfigs{ + "code_review": &custom.Config{ + Name: "code_review", + Description: "ask llm to analyze code quality", + Arguments: prompts.Arguments{ + {Parameter: parameters.NewStringParameter("code", "the code to review")}, + }, + Messages: []prompts.Message{ + {Role: "user", Content: "please review the following code for quality: {{.code}}"}, + }, + }, + }, + }, + }, + { + description: "only prompts", + in: ` + kind: prompts + name: my-prompt + description: A prompt template for data analysis. + arguments: + - name: country + description: The country to analyze. + messages: + - content: Analyze the data for {{.country}}. + `, + wantToolsFile: ToolsFile{ + Sources: nil, + AuthServices: nil, + Tools: nil, + Toolsets: nil, + Prompts: server.PromptConfigs{ + "my-prompt": &custom.Config{ + Name: "my-prompt", + Description: "A prompt template for data analysis.", + Arguments: prompts.Arguments{ + {Parameter: parameters.NewStringParameter("country", "The country to analyze.")}, + }, + Messages: []prompts.Message{ + {Role: "user", Content: "Analyze the data for {{.country}}."}, + }, + }, + }, + }, + }, + } + for _, tc := range tcs { + t.Run(tc.description, func(t *testing.T) { + toolsFile, err := parseToolsFile(ctx, testutils.FormatYaml(tc.in)) + if err != nil { + t.Fatalf("failed to parse input: %v", err) + } + if diff := cmp.Diff(tc.wantToolsFile.Sources, toolsFile.Sources); diff != "" { + t.Fatalf("incorrect sources parse: diff %v", diff) + } + if diff := cmp.Diff(tc.wantToolsFile.AuthServices, toolsFile.AuthServices); diff != "" { + t.Fatalf("incorrect authServices parse: diff %v", diff) + } + if diff := cmp.Diff(tc.wantToolsFile.Tools, toolsFile.Tools); diff != "" { + t.Fatalf("incorrect tools parse: diff %v", diff) + } + if diff := cmp.Diff(tc.wantToolsFile.Toolsets, toolsFile.Toolsets); diff != "" { + t.Fatalf("incorrect toolsets parse: diff %v", diff) + } + if diff := cmp.Diff(tc.wantToolsFile.Prompts, toolsFile.Prompts); diff != "" { + t.Fatalf("incorrect prompts parse: diff %v", diff) + } + }) + } +} + +func TestParseToolFileWithAuth(t *testing.T) { + ctx, err := testutils.ContextWithNewLogger() + if err != nil { + t.Fatalf("unexpected error: %s", err) + } + tcs := []struct { + description string + in string + wantToolsFile ToolsFile + }{ + { + description: "basic example", + in: ` + kind: sources + name: my-pg-instance + type: cloud-sql-postgres + project: my-project + region: my-region + instance: my-instance + database: my_db + user: my_user + password: my_pass +--- + kind: authServices + name: my-google-service + type: google + clientId: my-client-id +--- + kind: authServices + name: other-google-service + type: google + clientId: other-client-id +--- + kind: tools + name: example_tool + type: postgres-sql + source: my-pg-instance + description: some description + statement: | + SELECT * FROM SQL_STATEMENT; + parameters: + - name: country + type: string + description: some description + - name: id + type: integer + description: user id + authServices: + - name: my-google-service + field: user_id + - name: email + type: string + description: user email + authServices: + - name: my-google-service + field: email + - name: other-google-service + field: other_email +--- + kind: toolsets + name: example_toolset + tools: + - example_tool + `, + wantToolsFile: ToolsFile{ + Sources: server.SourceConfigs{ + "my-pg-instance": cloudsqlpgsrc.Config{ + Name: "my-pg-instance", + Type: cloudsqlpgsrc.SourceType, + Project: "my-project", + Region: "my-region", + Instance: "my-instance", + IPType: "public", + Database: "my_db", + User: "my_user", + Password: "my_pass", + }, + }, + AuthServices: server.AuthServiceConfigs{ + "my-google-service": google.Config{ + Name: "my-google-service", + Type: google.AuthServiceType, + ClientID: "my-client-id", + }, + "other-google-service": google.Config{ + Name: "other-google-service", + Type: google.AuthServiceType, + ClientID: "other-client-id", + }, + }, + Tools: server.ToolConfigs{ + "example_tool": postgressql.Config{ + Name: "example_tool", + Type: "postgres-sql", + Source: "my-pg-instance", + Description: "some description", + Statement: "SELECT * FROM SQL_STATEMENT;\n", + AuthRequired: []string{}, + Parameters: []parameters.Parameter{ + parameters.NewStringParameter("country", "some description"), + parameters.NewIntParameterWithAuth("id", "user id", []parameters.ParamAuthService{{Name: "my-google-service", Field: "user_id"}}), + parameters.NewStringParameterWithAuth("email", "user email", []parameters.ParamAuthService{{Name: "my-google-service", Field: "email"}, {Name: "other-google-service", Field: "other_email"}}), + }, + }, + }, + Toolsets: server.ToolsetConfigs{ + "example_toolset": tools.ToolsetConfig{ + Name: "example_toolset", + ToolNames: []string{"example_tool"}, + }, + }, + Prompts: nil, + }, + }, + { + description: "basic example with authSources", + in: ` + sources: + my-pg-instance: + kind: cloud-sql-postgres + project: my-project + region: my-region + instance: my-instance + database: my_db + user: my_user + password: my_pass + authSources: + my-google-service: + kind: google + clientId: my-client-id + other-google-service: + kind: google + clientId: other-client-id + + tools: + example_tool: + kind: postgres-sql + source: my-pg-instance + description: some description + statement: | + SELECT * FROM SQL_STATEMENT; + parameters: + - name: country + type: string + description: some description + - name: id + type: integer + description: user id + authSources: + - name: my-google-service + field: user_id + - name: email + type: string + description: user email + authSources: + - name: my-google-service + field: email + - name: other-google-service + field: other_email + + toolsets: + example_toolset: + - example_tool + `, + wantToolsFile: ToolsFile{ + Sources: server.SourceConfigs{ + "my-pg-instance": cloudsqlpgsrc.Config{ + Name: "my-pg-instance", + Type: cloudsqlpgsrc.SourceType, + Project: "my-project", + Region: "my-region", + Instance: "my-instance", + IPType: "public", + Database: "my_db", + User: "my_user", + Password: "my_pass", + }, + }, + AuthServices: server.AuthServiceConfigs{ + "my-google-service": google.Config{ + Name: "my-google-service", + Type: google.AuthServiceType, + ClientID: "my-client-id", + }, + "other-google-service": google.Config{ + Name: "other-google-service", + Type: google.AuthServiceType, + ClientID: "other-client-id", + }, + }, + Tools: server.ToolConfigs{ + "example_tool": postgressql.Config{ + Name: "example_tool", + Type: "postgres-sql", + Source: "my-pg-instance", + Description: "some description", + Statement: "SELECT * FROM SQL_STATEMENT;\n", + AuthRequired: []string{}, + Parameters: []parameters.Parameter{ + parameters.NewStringParameter("country", "some description"), + parameters.NewIntParameterWithAuth("id", "user id", []parameters.ParamAuthService{{Name: "my-google-service", Field: "user_id"}}), + parameters.NewStringParameterWithAuth("email", "user email", []parameters.ParamAuthService{{Name: "my-google-service", Field: "email"}, {Name: "other-google-service", Field: "other_email"}}), + }, + }, + }, + Toolsets: server.ToolsetConfigs{ + "example_toolset": tools.ToolsetConfig{ + Name: "example_toolset", + ToolNames: []string{"example_tool"}, + }, + }, + Prompts: nil, + }, + }, + { + description: "basic example with authRequired", + in: ` + kind: sources + name: my-pg-instance + type: cloud-sql-postgres + project: my-project + region: my-region + instance: my-instance + database: my_db + user: my_user + password: my_pass +--- + kind: authServices + name: my-google-service + type: google + clientId: my-client-id +--- + kind: authServices + name: other-google-service + type: google + clientId: other-client-id +--- + kind: tools + name: example_tool + type: postgres-sql + source: my-pg-instance + description: some description + statement: | + SELECT * FROM SQL_STATEMENT; + authRequired: + - my-google-service + parameters: + - name: country + type: string + description: some description + - name: id + type: integer + description: user id + authServices: + - name: my-google-service + field: user_id + - name: email + type: string + description: user email + authServices: + - name: my-google-service + field: email + - name: other-google-service + field: other_email +--- + kind: toolsets + name: example_toolset + tools: + - example_tool + `, + wantToolsFile: ToolsFile{ + Sources: server.SourceConfigs{ + "my-pg-instance": cloudsqlpgsrc.Config{ + Name: "my-pg-instance", + Type: cloudsqlpgsrc.SourceType, + Project: "my-project", + Region: "my-region", + Instance: "my-instance", + IPType: "public", + Database: "my_db", + User: "my_user", + Password: "my_pass", + }, + }, + AuthServices: server.AuthServiceConfigs{ + "my-google-service": google.Config{ + Name: "my-google-service", + Type: google.AuthServiceType, + ClientID: "my-client-id", + }, + "other-google-service": google.Config{ + Name: "other-google-service", + Type: google.AuthServiceType, + ClientID: "other-client-id", + }, + }, + Tools: server.ToolConfigs{ + "example_tool": postgressql.Config{ + Name: "example_tool", + Type: "postgres-sql", + Source: "my-pg-instance", + Description: "some description", + Statement: "SELECT * FROM SQL_STATEMENT;\n", + AuthRequired: []string{"my-google-service"}, + Parameters: []parameters.Parameter{ + parameters.NewStringParameter("country", "some description"), + parameters.NewIntParameterWithAuth("id", "user id", []parameters.ParamAuthService{{Name: "my-google-service", Field: "user_id"}}), + parameters.NewStringParameterWithAuth("email", "user email", []parameters.ParamAuthService{{Name: "my-google-service", Field: "email"}, {Name: "other-google-service", Field: "other_email"}}), + }, + }, + }, + Toolsets: server.ToolsetConfigs{ + "example_toolset": tools.ToolsetConfig{ + Name: "example_toolset", + ToolNames: []string{"example_tool"}, + }, + }, + Prompts: nil, + }, + }, + } + for _, tc := range tcs { + t.Run(tc.description, func(t *testing.T) { + toolsFile, err := parseToolsFile(ctx, testutils.FormatYaml(tc.in)) + if err != nil { + t.Fatalf("failed to parse input: %v", err) + } + if diff := cmp.Diff(tc.wantToolsFile.Sources, toolsFile.Sources); diff != "" { + t.Fatalf("incorrect sources parse: diff %v", diff) + } + if diff := cmp.Diff(tc.wantToolsFile.AuthServices, toolsFile.AuthServices); diff != "" { + t.Fatalf("incorrect authServices parse: diff %v", diff) + } + if diff := cmp.Diff(tc.wantToolsFile.Tools, toolsFile.Tools); diff != "" { + t.Fatalf("incorrect tools parse: diff %v", diff) + } + if diff := cmp.Diff(tc.wantToolsFile.Toolsets, toolsFile.Toolsets); diff != "" { + t.Fatalf("incorrect toolsets parse: diff %v", diff) + } + if diff := cmp.Diff(tc.wantToolsFile.Prompts, toolsFile.Prompts); diff != "" { + t.Fatalf("incorrect prompts parse: diff %v", diff) + } + }) + } + +} + +func TestEnvVarReplacement(t *testing.T) { + ctx, err := testutils.ContextWithNewLogger() + t.Setenv("TestHeader", "ACTUAL_HEADER") + t.Setenv("API_KEY", "ACTUAL_API_KEY") + t.Setenv("clientId", "ACTUAL_CLIENT_ID") + t.Setenv("clientId2", "ACTUAL_CLIENT_ID_2") + t.Setenv("toolset_name", "ACTUAL_TOOLSET_NAME") + t.Setenv("cat_string", "cat") + t.Setenv("food_string", "food") + t.Setenv("TestHeader", "ACTUAL_HEADER") + t.Setenv("prompt_name", "ACTUAL_PROMPT_NAME") + t.Setenv("prompt_content", "ACTUAL_CONTENT") + + if err != nil { + t.Fatalf("unexpected error: %s", err) + } + tcs := []struct { + description string + in string + wantToolsFile ToolsFile + }{ + { + description: "file with env var example", + in: ` + sources: + my-http-instance: + kind: http + baseUrl: http://test_server/ + timeout: 10s + headers: + Authorization: ${TestHeader} + queryParams: + api-key: ${API_KEY} + authServices: + my-google-service: + kind: google + clientId: ${clientId} + other-google-service: + kind: google + clientId: ${clientId2} + + tools: + example_tool: + kind: http + source: my-instance + method: GET + path: "search?name=alice&pet=${cat_string}" + description: some description + authRequired: + - my-google-auth-service + - other-auth-service + queryParams: + - name: country + type: string + description: some description + authServices: + - name: my-google-auth-service + field: user_id + - name: other-auth-service + field: user_id + requestBody: | + { + "age": {{.age}}, + "city": "{{.city}}", + "food": "${food_string}", + "other": "$OTHER" + } + bodyParams: + - name: age + type: integer + description: age num + - name: city + type: string + description: city string + headers: + Authorization: API_KEY + Content-Type: application/json + headerParams: + - name: Language + type: string + description: language string + + toolsets: + ${toolset_name}: + - example_tool + + + prompts: + ${prompt_name}: + description: A test prompt for {{.name}}. + messages: + - role: user + content: ${prompt_content} + `, + wantToolsFile: ToolsFile{ + Sources: server.SourceConfigs{ + "my-http-instance": httpsrc.Config{ + Name: "my-http-instance", + Type: httpsrc.SourceType, + BaseURL: "http://test_server/", + Timeout: "10s", + DefaultHeaders: map[string]string{"Authorization": "ACTUAL_HEADER"}, + QueryParams: map[string]string{"api-key": "ACTUAL_API_KEY"}, + }, + }, + AuthServices: server.AuthServiceConfigs{ + "my-google-service": google.Config{ + Name: "my-google-service", + Type: google.AuthServiceType, + ClientID: "ACTUAL_CLIENT_ID", + }, + "other-google-service": google.Config{ + Name: "other-google-service", + Type: google.AuthServiceType, + ClientID: "ACTUAL_CLIENT_ID_2", + }, + }, + Tools: server.ToolConfigs{ + "example_tool": http.Config{ + Name: "example_tool", + Type: "http", + Source: "my-instance", + Method: "GET", + Path: "search?name=alice&pet=cat", + Description: "some description", + AuthRequired: []string{"my-google-auth-service", "other-auth-service"}, + QueryParams: []parameters.Parameter{ + parameters.NewStringParameterWithAuth("country", "some description", + []parameters.ParamAuthService{{Name: "my-google-auth-service", Field: "user_id"}, + {Name: "other-auth-service", Field: "user_id"}}), + }, + RequestBody: `{ + "age": {{.age}}, + "city": "{{.city}}", + "food": "food", + "other": "$OTHER" +} +`, + BodyParams: []parameters.Parameter{parameters.NewIntParameter("age", "age num"), parameters.NewStringParameter("city", "city string")}, + Headers: map[string]string{"Authorization": "API_KEY", "Content-Type": "application/json"}, + HeaderParams: []parameters.Parameter{parameters.NewStringParameter("Language", "language string")}, + }, + }, + Toolsets: server.ToolsetConfigs{ + "ACTUAL_TOOLSET_NAME": tools.ToolsetConfig{ + Name: "ACTUAL_TOOLSET_NAME", + ToolNames: []string{"example_tool"}, + }, + }, + Prompts: server.PromptConfigs{ + "ACTUAL_PROMPT_NAME": &custom.Config{ + Name: "ACTUAL_PROMPT_NAME", + Description: "A test prompt for {{.name}}.", + Messages: []prompts.Message{ + { + Role: "user", + Content: "ACTUAL_CONTENT", + }, + }, + Arguments: nil, + }, + }, + }, + }, + { + description: "file with env var example toolsfile v2", + in: ` + kind: sources + name: my-http-instance + type: http + baseUrl: http://test_server/ + timeout: 10s + headers: + Authorization: ${TestHeader} + queryParams: + api-key: ${API_KEY} +--- + kind: authServices + name: my-google-service + type: google + clientId: ${clientId} +--- + kind: authServices + name: other-google-service + type: google + clientId: ${clientId2} +--- + kind: tools + name: example_tool + type: http + source: my-instance + method: GET + path: "search?name=alice&pet=${cat_string}" + description: some description + authRequired: + - my-google-auth-service + - other-auth-service + queryParams: + - name: country + type: string + description: some description + authServices: + - name: my-google-auth-service + field: user_id + - name: other-auth-service + field: user_id + requestBody: | + { + "age": {{.age}}, + "city": "{{.city}}", + "food": "${food_string}", + "other": "$OTHER" + } + bodyParams: + - name: age + type: integer + description: age num + - name: city + type: string + description: city string + headers: + Authorization: API_KEY + Content-Type: application/json + headerParams: + - name: Language + type: string + description: language string +--- + kind: toolsets + name: ${toolset_name} + tools: + - example_tool +--- + kind: prompts + name: ${prompt_name} + description: A test prompt for {{.name}}. + messages: + - role: user + content: ${prompt_content} + `, + wantToolsFile: ToolsFile{ + Sources: server.SourceConfigs{ + "my-http-instance": httpsrc.Config{ + Name: "my-http-instance", + Type: httpsrc.SourceType, + BaseURL: "http://test_server/", + Timeout: "10s", + DefaultHeaders: map[string]string{"Authorization": "ACTUAL_HEADER"}, + QueryParams: map[string]string{"api-key": "ACTUAL_API_KEY"}, + }, + }, + AuthServices: server.AuthServiceConfigs{ + "my-google-service": google.Config{ + Name: "my-google-service", + Type: google.AuthServiceType, + ClientID: "ACTUAL_CLIENT_ID", + }, + "other-google-service": google.Config{ + Name: "other-google-service", + Type: google.AuthServiceType, + ClientID: "ACTUAL_CLIENT_ID_2", + }, + }, + Tools: server.ToolConfigs{ + "example_tool": http.Config{ + Name: "example_tool", + Type: "http", + Source: "my-instance", + Method: "GET", + Path: "search?name=alice&pet=cat", + Description: "some description", + AuthRequired: []string{"my-google-auth-service", "other-auth-service"}, + QueryParams: []parameters.Parameter{ + parameters.NewStringParameterWithAuth("country", "some description", + []parameters.ParamAuthService{{Name: "my-google-auth-service", Field: "user_id"}, + {Name: "other-auth-service", Field: "user_id"}}), + }, + RequestBody: `{ + "age": {{.age}}, + "city": "{{.city}}", + "food": "food", + "other": "$OTHER" +} +`, + BodyParams: []parameters.Parameter{parameters.NewIntParameter("age", "age num"), parameters.NewStringParameter("city", "city string")}, + Headers: map[string]string{"Authorization": "API_KEY", "Content-Type": "application/json"}, + HeaderParams: []parameters.Parameter{parameters.NewStringParameter("Language", "language string")}, + }, + }, + Toolsets: server.ToolsetConfigs{ + "ACTUAL_TOOLSET_NAME": tools.ToolsetConfig{ + Name: "ACTUAL_TOOLSET_NAME", + ToolNames: []string{"example_tool"}, + }, + }, + Prompts: server.PromptConfigs{ + "ACTUAL_PROMPT_NAME": &custom.Config{ + Name: "ACTUAL_PROMPT_NAME", + Description: "A test prompt for {{.name}}.", + Messages: []prompts.Message{ + { + Role: "user", + Content: "ACTUAL_CONTENT", + }, + }, + Arguments: nil, + }, + }, + }, + }, + } + for _, tc := range tcs { + t.Run(tc.description, func(t *testing.T) { + toolsFile, err := parseToolsFile(ctx, testutils.FormatYaml(tc.in)) + if err != nil { + t.Fatalf("failed to parse input: %v", err) + } + if diff := cmp.Diff(tc.wantToolsFile.Sources, toolsFile.Sources); diff != "" { + t.Fatalf("incorrect sources parse: diff %v", diff) + } + if diff := cmp.Diff(tc.wantToolsFile.AuthServices, toolsFile.AuthServices); diff != "" { + t.Fatalf("incorrect authServices parse: diff %v", diff) + } + if diff := cmp.Diff(tc.wantToolsFile.Tools, toolsFile.Tools); diff != "" { + t.Fatalf("incorrect tools parse: diff %v", diff) + } + if diff := cmp.Diff(tc.wantToolsFile.Toolsets, toolsFile.Toolsets); diff != "" { + t.Fatalf("incorrect toolsets parse: diff %v", diff) + } + if diff := cmp.Diff(tc.wantToolsFile.Prompts, toolsFile.Prompts); diff != "" { + t.Fatalf("incorrect prompts parse: diff %v", diff) + } + }) + } +} + +func TestPrebuiltTools(t *testing.T) { + // Get prebuilt configs + alloydb_omni_config, _ := prebuiltconfigs.Get("alloydb-omni") + alloydb_admin_config, _ := prebuiltconfigs.Get("alloydb-postgres-admin") + alloydb_config, _ := prebuiltconfigs.Get("alloydb-postgres") + bigquery_config, _ := prebuiltconfigs.Get("bigquery") + clickhouse_config, _ := prebuiltconfigs.Get("clickhouse") + cloudsqlpg_config, _ := prebuiltconfigs.Get("cloud-sql-postgres") + cloudsqlpg_admin_config, _ := prebuiltconfigs.Get("cloud-sql-postgres-admin") + cloudsqlmysql_config, _ := prebuiltconfigs.Get("cloud-sql-mysql") + cloudsqlmysql_admin_config, _ := prebuiltconfigs.Get("cloud-sql-mysql-admin") + cloudsqlmssql_config, _ := prebuiltconfigs.Get("cloud-sql-mssql") + cloudsqlmssql_admin_config, _ := prebuiltconfigs.Get("cloud-sql-mssql-admin") + dataplex_config, _ := prebuiltconfigs.Get("dataplex") + firestoreconfig, _ := prebuiltconfigs.Get("firestore") + mysql_config, _ := prebuiltconfigs.Get("mysql") + mssql_config, _ := prebuiltconfigs.Get("mssql") + looker_config, _ := prebuiltconfigs.Get("looker") + lookerca_config, _ := prebuiltconfigs.Get("looker-conversational-analytics") + postgresconfig, _ := prebuiltconfigs.Get("postgres") + spanner_config, _ := prebuiltconfigs.Get("spanner") + spannerpg_config, _ := prebuiltconfigs.Get("spanner-postgres") + mindsdb_config, _ := prebuiltconfigs.Get("mindsdb") + sqlite_config, _ := prebuiltconfigs.Get("sqlite") + neo4jconfig, _ := prebuiltconfigs.Get("neo4j") + alloydbobsvconfig, _ := prebuiltconfigs.Get("alloydb-postgres-observability") + cloudsqlpgobsvconfig, _ := prebuiltconfigs.Get("cloud-sql-postgres-observability") + cloudsqlmysqlobsvconfig, _ := prebuiltconfigs.Get("cloud-sql-mysql-observability") + cloudsqlmssqlobsvconfig, _ := prebuiltconfigs.Get("cloud-sql-mssql-observability") + serverless_spark_config, _ := prebuiltconfigs.Get("serverless-spark") + cloudhealthcare_config, _ := prebuiltconfigs.Get("cloud-healthcare") + snowflake_config, _ := prebuiltconfigs.Get("snowflake") + + // Set environment variables + t.Setenv("API_KEY", "your_api_key") + + t.Setenv("BIGQUERY_PROJECT", "your_gcp_project_id") + t.Setenv("DATAPLEX_PROJECT", "your_gcp_project_id") + t.Setenv("FIRESTORE_PROJECT", "your_gcp_project_id") + t.Setenv("FIRESTORE_DATABASE", "your_firestore_db_name") + + t.Setenv("SPANNER_PROJECT", "your_gcp_project_id") + t.Setenv("SPANNER_INSTANCE", "your_spanner_instance") + t.Setenv("SPANNER_DATABASE", "your_spanner_db") + + t.Setenv("ALLOYDB_POSTGRES_PROJECT", "your_gcp_project_id") + t.Setenv("ALLOYDB_POSTGRES_REGION", "your_gcp_region") + t.Setenv("ALLOYDB_POSTGRES_CLUSTER", "your_alloydb_cluster") + t.Setenv("ALLOYDB_POSTGRES_INSTANCE", "your_alloydb_instance") + t.Setenv("ALLOYDB_POSTGRES_DATABASE", "your_alloydb_db") + t.Setenv("ALLOYDB_POSTGRES_USER", "your_alloydb_user") + t.Setenv("ALLOYDB_POSTGRES_PASSWORD", "your_alloydb_password") + + t.Setenv("ALLOYDB_OMNI_HOST", "localhost") + t.Setenv("ALLOYDB_OMNI_PORT", "5432") + t.Setenv("ALLOYDB_OMNI_DATABASE", "your_alloydb_db") + t.Setenv("ALLOYDB_OMNI_USER", "your_alloydb_user") + t.Setenv("ALLOYDB_OMNI_PASSWORD", "your_alloydb_password") + + t.Setenv("CLICKHOUSE_PROTOCOL", "your_clickhouse_protocol") + t.Setenv("CLICKHOUSE_DATABASE", "your_clickhouse_database") + t.Setenv("CLICKHOUSE_PASSWORD", "your_clickhouse_password") + t.Setenv("CLICKHOUSE_USER", "your_clickhouse_user") + t.Setenv("CLICKHOUSE_HOST", "your_clickhosue_host") + t.Setenv("CLICKHOUSE_PORT", "8123") + + t.Setenv("CLOUD_SQL_POSTGRES_PROJECT", "your_pg_project") + t.Setenv("CLOUD_SQL_POSTGRES_INSTANCE", "your_pg_instance") + t.Setenv("CLOUD_SQL_POSTGRES_DATABASE", "your_pg_db") + t.Setenv("CLOUD_SQL_POSTGRES_REGION", "your_pg_region") + t.Setenv("CLOUD_SQL_POSTGRES_USER", "your_pg_user") + t.Setenv("CLOUD_SQL_POSTGRES_PASS", "your_pg_pass") + + t.Setenv("CLOUD_SQL_MYSQL_PROJECT", "your_gcp_project_id") + t.Setenv("CLOUD_SQL_MYSQL_REGION", "your_gcp_region") + t.Setenv("CLOUD_SQL_MYSQL_INSTANCE", "your_instance") + t.Setenv("CLOUD_SQL_MYSQL_DATABASE", "your_cloudsql_mysql_db") + t.Setenv("CLOUD_SQL_MYSQL_USER", "your_cloudsql_mysql_user") + t.Setenv("CLOUD_SQL_MYSQL_PASSWORD", "your_cloudsql_mysql_password") + + t.Setenv("CLOUD_SQL_MSSQL_PROJECT", "your_gcp_project_id") + t.Setenv("CLOUD_SQL_MSSQL_REGION", "your_gcp_region") + t.Setenv("CLOUD_SQL_MSSQL_INSTANCE", "your_cloudsql_mssql_instance") + t.Setenv("CLOUD_SQL_MSSQL_DATABASE", "your_cloudsql_mssql_db") + t.Setenv("CLOUD_SQL_MSSQL_IP_ADDRESS", "127.0.0.1") + t.Setenv("CLOUD_SQL_MSSQL_USER", "your_cloudsql_mssql_user") + t.Setenv("CLOUD_SQL_MSSQL_PASSWORD", "your_cloudsql_mssql_password") + t.Setenv("CLOUD_SQL_POSTGRES_PASSWORD", "your_cloudsql_pg_password") + + t.Setenv("SERVERLESS_SPARK_PROJECT", "your_gcp_project_id") + t.Setenv("SERVERLESS_SPARK_LOCATION", "your_gcp_location") + + t.Setenv("POSTGRES_HOST", "localhost") + t.Setenv("POSTGRES_PORT", "5432") + t.Setenv("POSTGRES_DATABASE", "your_postgres_db") + t.Setenv("POSTGRES_USER", "your_postgres_user") + t.Setenv("POSTGRES_PASSWORD", "your_postgres_password") + + t.Setenv("MYSQL_HOST", "localhost") + t.Setenv("MYSQL_PORT", "3306") + t.Setenv("MYSQL_DATABASE", "your_mysql_db") + t.Setenv("MYSQL_USER", "your_mysql_user") + t.Setenv("MYSQL_PASSWORD", "your_mysql_password") + + t.Setenv("MSSQL_HOST", "localhost") + t.Setenv("MSSQL_PORT", "1433") + t.Setenv("MSSQL_DATABASE", "your_mssql_db") + t.Setenv("MSSQL_USER", "your_mssql_user") + t.Setenv("MSSQL_PASSWORD", "your_mssql_password") + + t.Setenv("MINDSDB_HOST", "localhost") + t.Setenv("MINDSDB_PORT", "47334") + t.Setenv("MINDSDB_DATABASE", "your_mindsdb_db") + t.Setenv("MINDSDB_USER", "your_mindsdb_user") + t.Setenv("MINDSDB_PASS", "your_mindsdb_password") + + t.Setenv("LOOKER_BASE_URL", "https://your_company.looker.com") + t.Setenv("LOOKER_CLIENT_ID", "your_looker_client_id") + t.Setenv("LOOKER_CLIENT_SECRET", "your_looker_client_secret") + t.Setenv("LOOKER_VERIFY_SSL", "true") + + t.Setenv("LOOKER_PROJECT", "your_project_id") + t.Setenv("LOOKER_LOCATION", "us") + + t.Setenv("SQLITE_DATABASE", "test.db") + + t.Setenv("NEO4J_URI", "bolt://localhost:7687") + t.Setenv("NEO4J_DATABASE", "neo4j") + t.Setenv("NEO4J_USERNAME", "your_neo4j_user") + t.Setenv("NEO4J_PASSWORD", "your_neo4j_password") + + t.Setenv("CLOUD_HEALTHCARE_PROJECT", "your_gcp_project_id") + t.Setenv("CLOUD_HEALTHCARE_REGION", "your_gcp_region") + t.Setenv("CLOUD_HEALTHCARE_DATASET", "your_healthcare_dataset") + + t.Setenv("SNOWFLAKE_ACCOUNT", "your_account") + t.Setenv("SNOWFLAKE_USER", "your_username") + t.Setenv("SNOWFLAKE_PASSWORD", "your_pass") + t.Setenv("SNOWFLAKE_DATABASE", "your_db") + t.Setenv("SNOWFLAKE_SCHEMA", "your_schema") + t.Setenv("SNOWFLAKE_WAREHOUSE", "your_wh") + t.Setenv("SNOWFLAKE_ROLE", "your_role") + + ctx, err := testutils.ContextWithNewLogger() + if err != nil { + t.Fatalf("unexpected error: %s", err) + } + tcs := []struct { + name string + in []byte + wantToolset server.ToolsetConfigs + }{ + { + name: "alloydb omni prebuilt tools", + in: alloydb_omni_config, + wantToolset: server.ToolsetConfigs{ + "alloydb_omni_database_tools": tools.ToolsetConfig{ + Name: "alloydb_omni_database_tools", + ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_columnar_configurations", "list_columnar_recommended_columns", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats", "list_stored_procedure"}, + }, + }, + }, + { + name: "alloydb postgres admin prebuilt tools", + in: alloydb_admin_config, + wantToolset: server.ToolsetConfigs{ + "alloydb_postgres_admin_tools": tools.ToolsetConfig{ + Name: "alloydb_postgres_admin_tools", + ToolNames: []string{"create_cluster", "wait_for_operation", "create_instance", "list_clusters", "list_instances", "list_users", "create_user", "get_cluster", "get_instance", "get_user"}, + }, + }, + }, + { + name: "cloudsql pg admin prebuilt tools", + in: cloudsqlpg_admin_config, + wantToolset: server.ToolsetConfigs{ + "cloud_sql_postgres_admin_tools": tools.ToolsetConfig{ + Name: "cloud_sql_postgres_admin_tools", + ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck", "clone_instance", "create_backup", "restore_backup"}, + }, + }, + }, + { + name: "cloudsql mysql admin prebuilt tools", + in: cloudsqlmysql_admin_config, + wantToolset: server.ToolsetConfigs{ + "cloud_sql_mysql_admin_tools": tools.ToolsetConfig{ + Name: "cloud_sql_mysql_admin_tools", + ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup", "restore_backup"}, + }, + }, + }, + { + name: "cloudsql mssql admin prebuilt tools", + in: cloudsqlmssql_admin_config, + wantToolset: server.ToolsetConfigs{ + "cloud_sql_mssql_admin_tools": tools.ToolsetConfig{ + Name: "cloud_sql_mssql_admin_tools", + ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup", "restore_backup"}, + }, + }, + }, + { + name: "alloydb prebuilt tools", + in: alloydb_config, + wantToolset: server.ToolsetConfigs{ + "alloydb_postgres_database_tools": tools.ToolsetConfig{ + Name: "alloydb_postgres_database_tools", + ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats", "list_stored_procedure"}, + }, + }, + }, + { + name: "bigquery prebuilt tools", + in: bigquery_config, + wantToolset: server.ToolsetConfigs{ + "bigquery_database_tools": tools.ToolsetConfig{ + Name: "bigquery_database_tools", + ToolNames: []string{"analyze_contribution", "ask_data_insights", "execute_sql", "forecast", "get_dataset_info", "get_table_info", "list_dataset_ids", "list_table_ids", "search_catalog"}, + }, + }, + }, + { + name: "clickhouse prebuilt tools", + in: clickhouse_config, + wantToolset: server.ToolsetConfigs{ + "clickhouse_database_tools": tools.ToolsetConfig{ + Name: "clickhouse_database_tools", + ToolNames: []string{"execute_sql", "list_databases", "list_tables"}, + }, + }, + }, + { + name: "cloudsqlpg prebuilt tools", + in: cloudsqlpg_config, + wantToolset: server.ToolsetConfigs{ + "cloud_sql_postgres_database_tools": tools.ToolsetConfig{ + Name: "cloud_sql_postgres_database_tools", + ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats", "list_stored_procedure"}, + }, + }, + }, + { + name: "cloudsqlmysql prebuilt tools", + in: cloudsqlmysql_config, + wantToolset: server.ToolsetConfigs{ + "cloud_sql_mysql_database_tools": tools.ToolsetConfig{ + Name: "cloud_sql_mysql_database_tools", + ToolNames: []string{"execute_sql", "list_tables", "get_query_plan", "list_active_queries", "list_tables_missing_unique_indexes", "list_table_fragmentation"}, + }, + }, + }, + { + name: "cloudsqlmssql prebuilt tools", + in: cloudsqlmssql_config, + wantToolset: server.ToolsetConfigs{ + "cloud_sql_mssql_database_tools": tools.ToolsetConfig{ + Name: "cloud_sql_mssql_database_tools", + ToolNames: []string{"execute_sql", "list_tables"}, + }, + }, + }, + { + name: "dataplex prebuilt tools", + in: dataplex_config, + wantToolset: server.ToolsetConfigs{ + "dataplex_tools": tools.ToolsetConfig{ + Name: "dataplex_tools", + ToolNames: []string{"search_entries", "lookup_entry", "search_aspect_types"}, + }, + }, + }, + { + name: "serverless spark prebuilt tools", + in: serverless_spark_config, + wantToolset: server.ToolsetConfigs{ + "serverless_spark_tools": tools.ToolsetConfig{ + Name: "serverless_spark_tools", + ToolNames: []string{"list_batches", "get_batch", "cancel_batch", "create_pyspark_batch", "create_spark_batch"}, + }, + }, + }, + { + name: "firestore prebuilt tools", + in: firestoreconfig, + wantToolset: server.ToolsetConfigs{ + "firestore_database_tools": tools.ToolsetConfig{ + Name: "firestore_database_tools", + ToolNames: []string{"get_documents", "add_documents", "update_document", "list_collections", "delete_documents", "query_collection", "get_rules", "validate_rules"}, + }, + }, + }, + { + name: "mysql prebuilt tools", + in: mysql_config, + wantToolset: server.ToolsetConfigs{ + "mysql_database_tools": tools.ToolsetConfig{ + Name: "mysql_database_tools", + ToolNames: []string{"execute_sql", "list_tables", "get_query_plan", "list_active_queries", "list_tables_missing_unique_indexes", "list_table_fragmentation"}, + }, + }, + }, + { + name: "mssql prebuilt tools", + in: mssql_config, + wantToolset: server.ToolsetConfigs{ + "mssql_database_tools": tools.ToolsetConfig{ + Name: "mssql_database_tools", + ToolNames: []string{"execute_sql", "list_tables"}, + }, + }, + }, + { + name: "looker prebuilt tools", + in: looker_config, + wantToolset: server.ToolsetConfigs{ + "looker_tools": tools.ToolsetConfig{ + Name: "looker_tools", + ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look", "make_look", "get_dashboards", "run_dashboard", "make_dashboard", "add_dashboard_element", "add_dashboard_filter", "generate_embed_url", "health_pulse", "health_analyze", "health_vacuum", "dev_mode", "get_projects", "get_project_files", "get_project_file", "create_project_file", "update_project_file", "delete_project_file", "validate_project", "get_connections", "get_connection_schemas", "get_connection_databases", "get_connection_tables", "get_connection_table_columns"}, + }, + }, + }, + { + name: "looker-conversational-analytics prebuilt tools", + in: lookerca_config, + wantToolset: server.ToolsetConfigs{ + "looker_conversational_analytics_tools": tools.ToolsetConfig{ + Name: "looker_conversational_analytics_tools", + ToolNames: []string{"ask_data_insights", "get_models", "get_explores"}, + }, + }, + }, + { + name: "postgres prebuilt tools", + in: postgresconfig, + wantToolset: server.ToolsetConfigs{ + "postgres_database_tools": tools.ToolsetConfig{ + Name: "postgres_database_tools", + ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats", "list_stored_procedure"}, + }, + }, + }, + { + name: "spanner prebuilt tools", + in: spanner_config, + wantToolset: server.ToolsetConfigs{ + "spanner-database-tools": tools.ToolsetConfig{ + Name: "spanner-database-tools", + ToolNames: []string{"execute_sql", "execute_sql_dql", "list_tables", "list_graphs"}, + }, + }, + }, + { + name: "spanner pg prebuilt tools", + in: spannerpg_config, + wantToolset: server.ToolsetConfigs{ + "spanner_postgres_database_tools": tools.ToolsetConfig{ + Name: "spanner_postgres_database_tools", + ToolNames: []string{"execute_sql", "execute_sql_dql", "list_tables"}, + }, + }, + }, + { + name: "mindsdb prebuilt tools", + in: mindsdb_config, + wantToolset: server.ToolsetConfigs{ + "mindsdb-tools": tools.ToolsetConfig{ + Name: "mindsdb-tools", + ToolNames: []string{"mindsdb-execute-sql", "mindsdb-sql"}, + }, + }, + }, + { + name: "sqlite prebuilt tools", + in: sqlite_config, + wantToolset: server.ToolsetConfigs{ + "sqlite_database_tools": tools.ToolsetConfig{ + Name: "sqlite_database_tools", + ToolNames: []string{"execute_sql", "list_tables"}, + }, + }, + }, + { + name: "neo4j prebuilt tools", + in: neo4jconfig, + wantToolset: server.ToolsetConfigs{ + "neo4j_database_tools": tools.ToolsetConfig{ + Name: "neo4j_database_tools", + ToolNames: []string{"execute_cypher", "get_schema"}, + }, + }, + }, + { + name: "alloydb postgres observability prebuilt tools", + in: alloydbobsvconfig, + wantToolset: server.ToolsetConfigs{ + "alloydb_postgres_cloud_monitoring_tools": tools.ToolsetConfig{ + Name: "alloydb_postgres_cloud_monitoring_tools", + ToolNames: []string{"get_system_metrics", "get_query_metrics"}, + }, + }, + }, + { + name: "cloudsql postgres observability prebuilt tools", + in: cloudsqlpgobsvconfig, + wantToolset: server.ToolsetConfigs{ + "cloud_sql_postgres_cloud_monitoring_tools": tools.ToolsetConfig{ + Name: "cloud_sql_postgres_cloud_monitoring_tools", + ToolNames: []string{"get_system_metrics", "get_query_metrics"}, + }, + }, + }, + { + name: "cloudsql mysql observability prebuilt tools", + in: cloudsqlmysqlobsvconfig, + wantToolset: server.ToolsetConfigs{ + "cloud_sql_mysql_cloud_monitoring_tools": tools.ToolsetConfig{ + Name: "cloud_sql_mysql_cloud_monitoring_tools", + ToolNames: []string{"get_system_metrics", "get_query_metrics"}, + }, + }, + }, + { + name: "cloudsql mssql observability prebuilt tools", + in: cloudsqlmssqlobsvconfig, + wantToolset: server.ToolsetConfigs{ + "cloud_sql_mssql_cloud_monitoring_tools": tools.ToolsetConfig{ + Name: "cloud_sql_mssql_cloud_monitoring_tools", + ToolNames: []string{"get_system_metrics"}, + }, + }, + }, + { + name: "cloud healthcare prebuilt tools", + in: cloudhealthcare_config, + wantToolset: server.ToolsetConfigs{ + "cloud_healthcare_dataset_tools": tools.ToolsetConfig{ + Name: "cloud_healthcare_dataset_tools", + ToolNames: []string{"get_dataset", "list_dicom_stores", "list_fhir_stores"}, + }, + "cloud_healthcare_fhir_tools": tools.ToolsetConfig{ + Name: "cloud_healthcare_fhir_tools", + ToolNames: []string{"get_fhir_store", "get_fhir_store_metrics", "get_fhir_resource", "fhir_patient_search", "fhir_patient_everything", "fhir_fetch_page"}, + }, + "cloud_healthcare_dicom_tools": tools.ToolsetConfig{ + Name: "cloud_healthcare_dicom_tools", + ToolNames: []string{"get_dicom_store", "get_dicom_store_metrics", "search_dicom_studies", "search_dicom_series", "search_dicom_instances", "retrieve_rendered_dicom_instance"}, + }, + }, + }, + { + name: "Snowflake prebuilt tool", + in: snowflake_config, + wantToolset: server.ToolsetConfigs{ + "snowflake_tools": tools.ToolsetConfig{ + Name: "snowflake_tools", + ToolNames: []string{"execute_sql", "list_tables"}, + }, + }, + }, + } + + for _, tc := range tcs { + t.Run(tc.name, func(t *testing.T) { + toolsFile, err := parseToolsFile(ctx, tc.in) + if err != nil { + t.Fatalf("failed to parse input: %v", err) + } + if diff := cmp.Diff(tc.wantToolset, toolsFile.Toolsets); diff != "" { + t.Fatalf("incorrect tools parse: diff %v", diff) + } + // Prebuilt configs do not have prompts, so assert empty maps. + if len(toolsFile.Prompts) != 0 { + t.Fatalf("expected empty prompts map for prebuilt config, got: %v", toolsFile.Prompts) + } + }) + } +} + +func TestMergeToolsFiles(t *testing.T) { + file1 := ToolsFile{ + Sources: server.SourceConfigs{"source1": httpsrc.Config{Name: "source1"}}, + Tools: server.ToolConfigs{"tool1": http.Config{Name: "tool1"}}, + Toolsets: server.ToolsetConfigs{"set1": tools.ToolsetConfig{Name: "set1"}}, + EmbeddingModels: server.EmbeddingModelConfigs{"model1": gemini.Config{Name: "gemini-text"}}, + } + file2 := ToolsFile{ + AuthServices: server.AuthServiceConfigs{"auth1": google.Config{Name: "auth1"}}, + Tools: server.ToolConfigs{"tool2": http.Config{Name: "tool2"}}, + Toolsets: server.ToolsetConfigs{"set2": tools.ToolsetConfig{Name: "set2"}}, + } + fileWithConflicts := ToolsFile{ + Sources: server.SourceConfigs{"source1": httpsrc.Config{Name: "source1"}}, + Tools: server.ToolConfigs{"tool2": http.Config{Name: "tool2"}}, + } + + testCases := []struct { + name string + files []ToolsFile + want ToolsFile + wantErr bool + }{ + { + name: "merge two distinct files", + files: []ToolsFile{file1, file2}, + want: ToolsFile{ + Sources: server.SourceConfigs{"source1": httpsrc.Config{Name: "source1"}}, + AuthServices: server.AuthServiceConfigs{"auth1": google.Config{Name: "auth1"}}, + Tools: server.ToolConfigs{"tool1": http.Config{Name: "tool1"}, "tool2": http.Config{Name: "tool2"}}, + Toolsets: server.ToolsetConfigs{"set1": tools.ToolsetConfig{Name: "set1"}, "set2": tools.ToolsetConfig{Name: "set2"}}, + Prompts: server.PromptConfigs{}, + EmbeddingModels: server.EmbeddingModelConfigs{"model1": gemini.Config{Name: "gemini-text"}}, + }, + wantErr: false, + }, + { + name: "merge with conflicts", + files: []ToolsFile{file1, file2, fileWithConflicts}, + wantErr: true, + }, + { + name: "merge single file", + files: []ToolsFile{file1}, + want: ToolsFile{ + Sources: file1.Sources, + AuthServices: make(server.AuthServiceConfigs), + EmbeddingModels: server.EmbeddingModelConfigs{"model1": gemini.Config{Name: "gemini-text"}}, + Tools: file1.Tools, + Toolsets: file1.Toolsets, + Prompts: server.PromptConfigs{}, + }, + }, + { + name: "merge empty list", + files: []ToolsFile{}, + want: ToolsFile{ + Sources: make(server.SourceConfigs), + AuthServices: make(server.AuthServiceConfigs), + EmbeddingModels: make(server.EmbeddingModelConfigs), + Tools: make(server.ToolConfigs), + Toolsets: make(server.ToolsetConfigs), + Prompts: server.PromptConfigs{}, + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + got, err := mergeToolsFiles(tc.files...) + if (err != nil) != tc.wantErr { + t.Fatalf("mergeToolsFiles() error = %v, wantErr %v", err, tc.wantErr) + } + if !tc.wantErr { + if diff := cmp.Diff(tc.want, got); diff != "" { + t.Errorf("mergeToolsFiles() mismatch (-want +got):\n%s", diff) + } + } else { + if err == nil { + t.Fatal("expected an error for conflicting files but got none") + } + if !strings.Contains(err.Error(), "resource conflicts detected") { + t.Errorf("expected conflict error, but got: %v", err) + } + } + }) + } +} + +func TestParameterReferenceValidation(t *testing.T) { + ctx, err := testutils.ContextWithNewLogger() + if err != nil { + t.Fatalf("unexpected error: %s", err) + } + + // Base template + baseYaml := ` +sources: + dummy-source: + kind: http + baseUrl: http://example.com +tools: + test-tool: + kind: postgres-sql + source: dummy-source + description: test tool + statement: SELECT 1; + parameters: +%s` + + tcs := []struct { + desc string + params string + wantErr bool + errSubstr string + }{ + { + desc: "valid backward reference", + params: ` + - name: source_param + type: string + description: source + - name: copy_param + type: string + description: copy + valueFromParam: source_param`, + wantErr: false, + }, + { + desc: "valid forward reference (out of order)", + params: ` + - name: copy_param + type: string + description: copy + valueFromParam: source_param + - name: source_param + type: string + description: source`, + wantErr: false, + }, + { + desc: "invalid missing reference", + params: ` + - name: copy_param + type: string + description: copy + valueFromParam: non_existent_param`, + wantErr: true, + errSubstr: "references '\"non_existent_param\"' in the 'valueFromParam' field", + }, + { + desc: "invalid self reference", + params: ` + - name: myself + type: string + description: self + valueFromParam: myself`, + wantErr: true, + errSubstr: "parameter \"myself\" cannot copy value from itself", + }, + { + desc: "multiple valid references", + params: ` + - name: a + type: string + description: a + - name: b + type: string + description: b + valueFromParam: a + - name: c + type: string + description: c + valueFromParam: a`, + wantErr: false, + }, + } + + for _, tc := range tcs { + t.Run(tc.desc, func(t *testing.T) { + // Indent parameters to match YAML structure + yamlContent := fmt.Sprintf(baseYaml, tc.params) + + _, err := parseToolsFile(ctx, []byte(yamlContent)) + + if tc.wantErr { + if err == nil { + t.Fatal("expected error, got nil") + } + if !strings.Contains(err.Error(), tc.errSubstr) { + t.Errorf("error %q does not contain expected substring %q", err.Error(), tc.errSubstr) + } + } else { + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + } + }) + } +} diff --git a/cmd/options.go b/cmd/options.go deleted file mode 100644 index b87a7e6d55..0000000000 --- a/cmd/options.go +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2024 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package cmd - -import ( - "io" -) - -// Option is a function that configures a Command. -type Option func(*Command) - -// WithStreams overrides the default writer. -func WithStreams(out, err io.Writer) Option { - return func(c *Command) { - c.outStream = out - c.errStream = err - } -} diff --git a/cmd/root.go b/cmd/root.go index dfac1c250f..33383366ea 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -23,7 +23,6 @@ import ( "os" "os/signal" "path/filepath" - "regexp" "runtime" "slices" "strings" @@ -31,249 +30,18 @@ import ( "time" "github.com/fsnotify/fsnotify" - yaml "github.com/goccy/go-yaml" + // Importing the cmd/internal package also import packages for side effect of registration + "github.com/googleapis/genai-toolbox/cmd/internal" + "github.com/googleapis/genai-toolbox/cmd/internal/invoke" + "github.com/googleapis/genai-toolbox/cmd/internal/skills" "github.com/googleapis/genai-toolbox/internal/auth" "github.com/googleapis/genai-toolbox/internal/embeddingmodels" - "github.com/googleapis/genai-toolbox/internal/log" - "github.com/googleapis/genai-toolbox/internal/prebuiltconfigs" "github.com/googleapis/genai-toolbox/internal/prompts" "github.com/googleapis/genai-toolbox/internal/server" "github.com/googleapis/genai-toolbox/internal/sources" - "github.com/googleapis/genai-toolbox/internal/telemetry" "github.com/googleapis/genai-toolbox/internal/tools" "github.com/googleapis/genai-toolbox/internal/util" - - // Import prompt packages for side effect of registration - _ "github.com/googleapis/genai-toolbox/internal/prompts/custom" - - // Import tool packages for side effect of registration - _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreatecluster" - _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreateinstance" - _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreateuser" - _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetcluster" - _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetinstance" - _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetuser" - _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistclusters" - _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistinstances" - _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistusers" - _ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbwaitforoperation" - _ "github.com/googleapis/genai-toolbox/internal/tools/alloydbainl" - _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryanalyzecontribution" - _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryconversationalanalytics" - _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryexecutesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryforecast" - _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygetdatasetinfo" - _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygettableinfo" - _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylistdatasetids" - _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylisttableids" - _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysearchcatalog" - _ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql" - _ "github.com/googleapis/genai-toolbox/internal/tools/bigtable" - _ "github.com/googleapis/genai-toolbox/internal/tools/cassandra/cassandracql" - _ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouseexecutesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselistdatabases" - _ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselisttables" - _ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhousesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudgda" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirfetchpage" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatienteverything" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatientsearch" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdataset" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdicomstore" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdicomstoremetrics" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirresource" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirstore" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirstoremetrics" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarelistdicomstores" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarelistfhirstores" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcareretrieverendereddicominstance" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicominstances" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomseries" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomstudies" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcloneinstance" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatebackup" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatedatabase" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreateusers" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistdatabases" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistinstances" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlrestorebackup" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmssql/cloudsqlmssqlcreateinstance" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmysql/cloudsqlmysqlcreateinstance" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlpg/cloudsqlpgcreateinstances" - _ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlpg/cloudsqlpgupgradeprecheck" - _ "github.com/googleapis/genai-toolbox/internal/tools/couchbase" - _ "github.com/googleapis/genai-toolbox/internal/tools/dataform/dataformcompilelocal" - _ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexlookupentry" - _ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchaspecttypes" - _ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchentries" - _ "github.com/googleapis/genai-toolbox/internal/tools/dgraph" - _ "github.com/googleapis/genai-toolbox/internal/tools/elasticsearch/elasticsearchesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/firebird/firebirdexecutesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/firebird/firebirdsql" - _ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreadddocuments" - _ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoredeletedocuments" - _ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetdocuments" - _ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetrules" - _ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorelistcollections" - _ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequery" - _ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequerycollection" - _ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreupdatedocument" - _ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules" - _ "github.com/googleapis/genai-toolbox/internal/tools/http" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardelement" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardfilter" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerconversationalanalytics" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookercreateprojectfile" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdeleteprojectfile" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdevmode" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergenerateembedurl" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiondatabases" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnections" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectionschemas" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiontablecolumns" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiontables" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdashboards" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdimensions" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetexplores" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetfilters" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetlooks" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmeasures" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmodels" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetparameters" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojectfile" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojectfiles" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojects" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthanalyze" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthpulse" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthvacuum" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakedashboard" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakelook" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquery" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquerysql" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerqueryurl" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrundashboard" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrunlook" - _ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerupdateprojectfile" - _ "github.com/googleapis/genai-toolbox/internal/tools/mindsdb/mindsdbexecutesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/mindsdb/mindsdbsql" - _ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbaggregate" - _ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeletemany" - _ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeleteone" - _ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfind" - _ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfindone" - _ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertmany" - _ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertone" - _ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdatemany" - _ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdateone" - _ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlexecutesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqllisttables" - _ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlsql" - _ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlexecutesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlgetqueryplan" - _ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllistactivequeries" - _ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttablefragmentation" - _ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttables" - _ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttablesmissinguniqueindexes" - _ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlsql" - _ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jcypher" - _ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jexecutecypher" - _ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema" - _ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbaseexecutesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbasesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/oracle/oracleexecutesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/oracle/oraclesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresdatabaseoverview" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresexecutesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresgetcolumncardinality" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistactivequeries" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistavailableextensions" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistdatabasestats" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistindexes" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistinstalledextensions" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistlocks" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpgsettings" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpublicationtables" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistquerystats" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistroles" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistschemas" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistsequences" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresliststoredprocedure" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablespaces" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablestats" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttriggers" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistviews" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslongrunningtransactions" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresreplicationstats" - _ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql" - _ "github.com/googleapis/genai-toolbox/internal/tools/redis" - _ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcancelbatch" - _ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatepysparkbatch" - _ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatesparkbatch" - _ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkgetbatch" - _ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparklistbatches" - _ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoreexecutesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoresql" - _ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakeexecutesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlistgraphs" - _ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlisttables" - _ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannersql" - _ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqliteexecutesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbexecutesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbsql" - _ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinoexecutesql" - _ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinosql" - _ "github.com/googleapis/genai-toolbox/internal/tools/utility/wait" - _ "github.com/googleapis/genai-toolbox/internal/tools/valkey" - _ "github.com/googleapis/genai-toolbox/internal/tools/yugabytedbsql" - "github.com/spf13/cobra" - - _ "github.com/googleapis/genai-toolbox/internal/sources/alloydbadmin" - _ "github.com/googleapis/genai-toolbox/internal/sources/alloydbpg" - _ "github.com/googleapis/genai-toolbox/internal/sources/bigquery" - _ "github.com/googleapis/genai-toolbox/internal/sources/bigtable" - _ "github.com/googleapis/genai-toolbox/internal/sources/cassandra" - _ "github.com/googleapis/genai-toolbox/internal/sources/clickhouse" - _ "github.com/googleapis/genai-toolbox/internal/sources/cloudgda" - _ "github.com/googleapis/genai-toolbox/internal/sources/cloudhealthcare" - _ "github.com/googleapis/genai-toolbox/internal/sources/cloudmonitoring" - _ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin" - _ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmssql" - _ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmysql" - _ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg" - _ "github.com/googleapis/genai-toolbox/internal/sources/couchbase" - _ "github.com/googleapis/genai-toolbox/internal/sources/dataplex" - _ "github.com/googleapis/genai-toolbox/internal/sources/dgraph" - _ "github.com/googleapis/genai-toolbox/internal/sources/elasticsearch" - _ "github.com/googleapis/genai-toolbox/internal/sources/firebird" - _ "github.com/googleapis/genai-toolbox/internal/sources/firestore" - _ "github.com/googleapis/genai-toolbox/internal/sources/http" - _ "github.com/googleapis/genai-toolbox/internal/sources/looker" - _ "github.com/googleapis/genai-toolbox/internal/sources/mindsdb" - _ "github.com/googleapis/genai-toolbox/internal/sources/mongodb" - _ "github.com/googleapis/genai-toolbox/internal/sources/mssql" - _ "github.com/googleapis/genai-toolbox/internal/sources/mysql" - _ "github.com/googleapis/genai-toolbox/internal/sources/neo4j" - _ "github.com/googleapis/genai-toolbox/internal/sources/oceanbase" - _ "github.com/googleapis/genai-toolbox/internal/sources/oracle" - _ "github.com/googleapis/genai-toolbox/internal/sources/postgres" - _ "github.com/googleapis/genai-toolbox/internal/sources/redis" - _ "github.com/googleapis/genai-toolbox/internal/sources/serverlessspark" - _ "github.com/googleapis/genai-toolbox/internal/sources/singlestore" - _ "github.com/googleapis/genai-toolbox/internal/sources/snowflake" - _ "github.com/googleapis/genai-toolbox/internal/sources/spanner" - _ "github.com/googleapis/genai-toolbox/internal/sources/sqlite" - _ "github.com/googleapis/genai-toolbox/internal/sources/tidb" - _ "github.com/googleapis/genai-toolbox/internal/sources/trino" - _ "github.com/googleapis/genai-toolbox/internal/sources/valkey" - _ "github.com/googleapis/genai-toolbox/internal/sources/yugabytedb" ) var ( @@ -302,303 +70,74 @@ func semanticVersion() string { return v } +// GenerateCommand returns a new Command object with the specified IO streams +// This is used for integration test package +func GenerateCommand(out, err io.Writer) *cobra.Command { + opts := internal.NewToolboxOptions(internal.WithIOStreams(out, err)) + return NewCommand(opts) +} + // Execute adds all child commands to the root command and sets flags appropriately. // This is called by main.main(). It only needs to happen once to the rootCmd. func Execute() { - if err := NewCommand().Execute(); err != nil { + // Initialize options + opts := internal.NewToolboxOptions() + + if err := NewCommand(opts).Execute(); err != nil { exit := 1 os.Exit(exit) } } -// Command represents an invocation of the CLI. -type Command struct { - *cobra.Command - - cfg server.ServerConfig - logger log.Logger - tools_file string - tools_files []string - tools_folder string - prebuiltConfigs []string - inStream io.Reader - outStream io.Writer - errStream io.Writer -} - // NewCommand returns a Command object representing an invocation of the CLI. -func NewCommand(opts ...Option) *Command { - in := os.Stdin - out := os.Stdout - err := os.Stderr - - baseCmd := &cobra.Command{ +func NewCommand(opts *internal.ToolboxOptions) *cobra.Command { + cmd := &cobra.Command{ Use: "toolbox", Version: versionString, SilenceErrors: true, } - cmd := &Command{ - Command: baseCmd, - inStream: in, - outStream: out, - errStream: err, - } - - for _, o := range opts { - o(cmd) - } // Do not print Usage on runtime error cmd.SilenceUsage = true // Set server version - cmd.cfg.Version = versionString + opts.Cfg.Version = versionString // set baseCmd in, out and err the same as cmd. - baseCmd.SetIn(cmd.inStream) - baseCmd.SetOut(cmd.outStream) - baseCmd.SetErr(cmd.errStream) + cmd.SetIn(opts.IOStreams.In) + cmd.SetOut(opts.IOStreams.Out) + cmd.SetErr(opts.IOStreams.ErrOut) + + // setup flags that are common across all commands + internal.PersistentFlags(cmd, opts) flags := cmd.Flags() - flags.StringVarP(&cmd.cfg.Address, "address", "a", "127.0.0.1", "Address of the interface the server will listen on.") - flags.IntVarP(&cmd.cfg.Port, "port", "p", 5000, "Port the server will listen on.") - flags.StringVar(&cmd.tools_file, "tools_file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.") + flags.StringVarP(&opts.Cfg.Address, "address", "a", "127.0.0.1", "Address of the interface the server will listen on.") + flags.IntVarP(&opts.Cfg.Port, "port", "p", 5000, "Port the server will listen on.") + + flags.StringVar(&opts.ToolsFile, "tools_file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.") // deprecate tools_file _ = flags.MarkDeprecated("tools_file", "please use --tools-file instead") - flags.StringVar(&cmd.tools_file, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.") - flags.StringSliceVar(&cmd.tools_files, "tools-files", []string{}, "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file, or --tools-folder.") - flags.StringVar(&cmd.tools_folder, "tools-folder", "", "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file, or --tools-files.") - flags.Var(&cmd.cfg.LogLevel, "log-level", "Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'.") - flags.Var(&cmd.cfg.LoggingFormat, "logging-format", "Specify logging format to use. Allowed: 'standard' or 'JSON'.") - flags.BoolVar(&cmd.cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.") - flags.StringVar(&cmd.cfg.TelemetryOTLP, "telemetry-otlp", "", "Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318')") - flags.StringVar(&cmd.cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.") - // Fetch prebuilt tools sources to customize the help description - prebuiltHelp := fmt.Sprintf( - "Use a prebuilt tool configuration by source type. Allowed: '%s'. Can be specified multiple times.", - strings.Join(prebuiltconfigs.GetPrebuiltSources(), "', '"), - ) - flags.StringSliceVar(&cmd.prebuiltConfigs, "prebuilt", []string{}, prebuiltHelp) - flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.") - flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.") - flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.") + flags.BoolVar(&opts.Cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.") + flags.BoolVar(&opts.Cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.") + flags.BoolVar(&opts.Cfg.UI, "ui", false, "Launches the Toolbox UI web server.") // TODO: Insecure by default. Might consider updating this for v1.0.0 - flags.StringSliceVar(&cmd.cfg.AllowedOrigins, "allowed-origins", []string{"*"}, "Specifies a list of origins permitted to access this server. Defaults to '*'.") - flags.StringSliceVar(&cmd.cfg.AllowedHosts, "allowed-hosts", []string{"*"}, "Specifies a list of hosts permitted to access this server. Defaults to '*'.") - flags.StringSliceVar(&cmd.cfg.UserAgentMetadata, "user-agent-metadata", []string{}, "Appends additional metadata to the User-Agent.") + flags.StringSliceVar(&opts.Cfg.AllowedOrigins, "allowed-origins", []string{"*"}, "Specifies a list of origins permitted to access this server. Defaults to '*'.") + flags.StringSliceVar(&opts.Cfg.AllowedHosts, "allowed-hosts", []string{"*"}, "Specifies a list of hosts permitted to access this server. Defaults to '*'.") // wrap RunE command so that we have access to original Command object - cmd.RunE = func(*cobra.Command, []string) error { return run(cmd) } + cmd.RunE = func(*cobra.Command, []string) error { return run(cmd, opts) } + + // Register subcommands for tool invocation + cmd.AddCommand(invoke.NewCommand(opts)) + // Register subcommands for skill generation + cmd.AddCommand(skills.NewCommand(opts)) return cmd } -type ToolsFile struct { - Sources server.SourceConfigs `yaml:"sources"` - AuthSources server.AuthServiceConfigs `yaml:"authSources"` // Deprecated: Kept for compatibility. - AuthServices server.AuthServiceConfigs `yaml:"authServices"` - EmbeddingModels server.EmbeddingModelConfigs `yaml:"embeddingModels"` - Tools server.ToolConfigs `yaml:"tools"` - Toolsets server.ToolsetConfigs `yaml:"toolsets"` - Prompts server.PromptConfigs `yaml:"prompts"` -} - -// parseEnv replaces environment variables ${ENV_NAME} with their values. -// also support ${ENV_NAME:default_value}. -func parseEnv(input string) (string, error) { - re := regexp.MustCompile(`\$\{(\w+)(:([^}]*))?\}`) - - var err error - output := re.ReplaceAllStringFunc(input, func(match string) string { - parts := re.FindStringSubmatch(match) - - // extract the variable name - variableName := parts[1] - if value, found := os.LookupEnv(variableName); found { - return value - } - if len(parts) >= 4 && parts[2] != "" { - return parts[3] - } - err = fmt.Errorf("environment variable not found: %q", variableName) - return "" - }) - return output, err -} - -// parseToolsFile parses the provided yaml into appropriate configs. -func parseToolsFile(ctx context.Context, raw []byte) (ToolsFile, error) { - var toolsFile ToolsFile - // Replace environment variables if found - output, err := parseEnv(string(raw)) - if err != nil { - return toolsFile, fmt.Errorf("error parsing environment variables: %s", err) - } - raw = []byte(output) - - // Parse contents - err = yaml.UnmarshalContext(ctx, raw, &toolsFile, yaml.Strict()) - if err != nil { - return toolsFile, err - } - return toolsFile, nil -} - -// mergeToolsFiles merges multiple ToolsFile structs into one. -// Detects and raises errors for resource conflicts in sources, authServices, tools, and toolsets. -// All resource names (sources, authServices, tools, toolsets) must be unique across all files. -func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) { - merged := ToolsFile{ - Sources: make(server.SourceConfigs), - AuthServices: make(server.AuthServiceConfigs), - EmbeddingModels: make(server.EmbeddingModelConfigs), - Tools: make(server.ToolConfigs), - Toolsets: make(server.ToolsetConfigs), - Prompts: make(server.PromptConfigs), - } - - var conflicts []string - - for fileIndex, file := range files { - // Check for conflicts and merge sources - for name, source := range file.Sources { - if _, exists := merged.Sources[name]; exists { - conflicts = append(conflicts, fmt.Sprintf("source '%s' (file #%d)", name, fileIndex+1)) - } else { - merged.Sources[name] = source - } - } - - // Check for conflicts and merge authSources (deprecated, but still support) - for name, authSource := range file.AuthSources { - if _, exists := merged.AuthSources[name]; exists { - conflicts = append(conflicts, fmt.Sprintf("authSource '%s' (file #%d)", name, fileIndex+1)) - } else { - if merged.AuthSources == nil { - merged.AuthSources = make(server.AuthServiceConfigs) - } - merged.AuthSources[name] = authSource - } - } - - // Check for conflicts and merge authServices - for name, authService := range file.AuthServices { - if _, exists := merged.AuthServices[name]; exists { - conflicts = append(conflicts, fmt.Sprintf("authService '%s' (file #%d)", name, fileIndex+1)) - } else { - merged.AuthServices[name] = authService - } - } - - // Check for conflicts and merge embeddingModels - for name, em := range file.EmbeddingModels { - if _, exists := merged.EmbeddingModels[name]; exists { - conflicts = append(conflicts, fmt.Sprintf("embedding model '%s' (file #%d)", name, fileIndex+1)) - } else { - merged.EmbeddingModels[name] = em - } - } - - // Check for conflicts and merge tools - for name, tool := range file.Tools { - if _, exists := merged.Tools[name]; exists { - conflicts = append(conflicts, fmt.Sprintf("tool '%s' (file #%d)", name, fileIndex+1)) - } else { - merged.Tools[name] = tool - } - } - - // Check for conflicts and merge toolsets - for name, toolset := range file.Toolsets { - if _, exists := merged.Toolsets[name]; exists { - conflicts = append(conflicts, fmt.Sprintf("toolset '%s' (file #%d)", name, fileIndex+1)) - } else { - merged.Toolsets[name] = toolset - } - } - - // Check for conflicts and merge prompts - for name, prompt := range file.Prompts { - if _, exists := merged.Prompts[name]; exists { - conflicts = append(conflicts, fmt.Sprintf("prompt '%s' (file #%d)", name, fileIndex+1)) - } else { - merged.Prompts[name] = prompt - } - } - } - - // If conflicts were detected, return an error - if len(conflicts) > 0 { - return ToolsFile{}, fmt.Errorf("resource conflicts detected:\n - %s\n\nPlease ensure each source, authService, tool, toolset and prompt has a unique name across all files", strings.Join(conflicts, "\n - ")) - } - - return merged, nil -} - -// loadAndMergeToolsFiles loads multiple YAML files and merges them -func loadAndMergeToolsFiles(ctx context.Context, filePaths []string) (ToolsFile, error) { - var toolsFiles []ToolsFile - - for _, filePath := range filePaths { - buf, err := os.ReadFile(filePath) - if err != nil { - return ToolsFile{}, fmt.Errorf("unable to read tool file at %q: %w", filePath, err) - } - - toolsFile, err := parseToolsFile(ctx, buf) - if err != nil { - return ToolsFile{}, fmt.Errorf("unable to parse tool file at %q: %w", filePath, err) - } - - toolsFiles = append(toolsFiles, toolsFile) - } - - mergedFile, err := mergeToolsFiles(toolsFiles...) - if err != nil { - return ToolsFile{}, fmt.Errorf("unable to merge tools files: %w", err) - } - - return mergedFile, nil -} - -// loadAndMergeToolsFolder loads all YAML files from a directory and merges them -func loadAndMergeToolsFolder(ctx context.Context, folderPath string) (ToolsFile, error) { - // Check if directory exists - info, err := os.Stat(folderPath) - if err != nil { - return ToolsFile{}, fmt.Errorf("unable to access tools folder at %q: %w", folderPath, err) - } - if !info.IsDir() { - return ToolsFile{}, fmt.Errorf("path %q is not a directory", folderPath) - } - - // Find all YAML files in the directory - pattern := filepath.Join(folderPath, "*.yaml") - yamlFiles, err := filepath.Glob(pattern) - if err != nil { - return ToolsFile{}, fmt.Errorf("error finding YAML files in %q: %w", folderPath, err) - } - - // Also find .yml files - ymlPattern := filepath.Join(folderPath, "*.yml") - ymlFiles, err := filepath.Glob(ymlPattern) - if err != nil { - return ToolsFile{}, fmt.Errorf("error finding YML files in %q: %w", folderPath, err) - } - - // Combine both file lists - allFiles := append(yamlFiles, ymlFiles...) - - if len(allFiles) == 0 { - return ToolsFile{}, fmt.Errorf("no YAML files found in directory %q", folderPath) - } - - // Use existing loadAndMergeToolsFiles function - return loadAndMergeToolsFiles(ctx, allFiles) -} - -func handleDynamicReload(ctx context.Context, toolsFile ToolsFile, s *server.Server) error { +func handleDynamicReload(ctx context.Context, toolsFile internal.ToolsFile, s *server.Server) error { logger, err := util.LoggerFromContext(ctx) if err != nil { panic(err) @@ -618,7 +157,7 @@ func handleDynamicReload(ctx context.Context, toolsFile ToolsFile, s *server.Ser // validateReloadEdits checks that the reloaded tools file configs can initialized without failing func validateReloadEdits( - ctx context.Context, toolsFile ToolsFile, + ctx context.Context, toolsFile internal.ToolsFile, ) (map[string]sources.Source, map[string]auth.AuthService, map[string]embeddingmodels.EmbeddingModel, map[string]tools.Tool, map[string]tools.Toolset, map[string]prompts.Prompt, map[string]prompts.Promptset, error, ) { logger, err := util.LoggerFromContext(ctx) @@ -744,18 +283,18 @@ func watchChanges(ctx context.Context, watchDirs map[string]bool, watchedFiles m case <-debounce.C: debounce.Stop() - var reloadedToolsFile ToolsFile + var reloadedToolsFile internal.ToolsFile if watchingFolder { logger.DebugContext(ctx, "Reloading tools folder.") - reloadedToolsFile, err = loadAndMergeToolsFolder(ctx, folderToWatch) + reloadedToolsFile, err = internal.LoadAndMergeToolsFolder(ctx, folderToWatch) if err != nil { logger.WarnContext(ctx, "error loading tools folder %s", err) continue } } else { logger.DebugContext(ctx, "Reloading tools file(s).") - reloadedToolsFile, err = loadAndMergeToolsFiles(ctx, slices.Collect(maps.Keys(watchedFiles))) + reloadedToolsFile, err = internal.LoadAndMergeToolsFiles(ctx, slices.Collect(maps.Keys(watchedFiles))) if err != nil { logger.WarnContext(ctx, "error loading tools files %s", err) continue @@ -799,7 +338,7 @@ func resolveWatcherInputs(toolsFile string, toolsFiles []string, toolsFolder str return watchDirs, watchedFiles } -func run(cmd *Command) error { +func run(cmd *cobra.Command, opts *internal.ToolboxOptions) error { ctx, cancel := context.WithCancel(cmd.Context()) defer cancel() @@ -816,192 +355,40 @@ func run(cmd *Command) error { } switch s { case syscall.SIGINT: - cmd.logger.DebugContext(sCtx, "Received SIGINT signal to shutdown.") + opts.Logger.DebugContext(sCtx, "Received SIGINT signal to shutdown.") case syscall.SIGTERM: - cmd.logger.DebugContext(sCtx, "Sending SIGTERM signal to shutdown.") + opts.Logger.DebugContext(sCtx, "Sending SIGTERM signal to shutdown.") } cancel() }(ctx) - // If stdio, set logger's out stream (usually DEBUG and INFO logs) to errStream - loggerOut := cmd.outStream - if cmd.cfg.Stdio { - loggerOut = cmd.errStream - } - - // Handle logger separately from config - switch strings.ToLower(cmd.cfg.LoggingFormat.String()) { - case "json": - logger, err := log.NewStructuredLogger(loggerOut, cmd.errStream, cmd.cfg.LogLevel.String()) - if err != nil { - return fmt.Errorf("unable to initialize logger: %w", err) - } - cmd.logger = logger - case "standard": - logger, err := log.NewStdLogger(loggerOut, cmd.errStream, cmd.cfg.LogLevel.String()) - if err != nil { - return fmt.Errorf("unable to initialize logger: %w", err) - } - cmd.logger = logger - default: - return fmt.Errorf("logging format invalid") - } - - ctx = util.WithLogger(ctx, cmd.logger) - - // Set up OpenTelemetry - otelShutdown, err := telemetry.SetupOTel(ctx, cmd.cfg.Version, cmd.cfg.TelemetryOTLP, cmd.cfg.TelemetryGCP, cmd.cfg.TelemetryServiceName) + ctx, shutdown, err := opts.Setup(ctx) if err != nil { - errMsg := fmt.Errorf("error setting up OpenTelemetry: %w", err) - cmd.logger.ErrorContext(ctx, errMsg.Error()) - return errMsg + return err } defer func() { - err := otelShutdown(ctx) - if err != nil { - errMsg := fmt.Errorf("error shutting down OpenTelemetry: %w", err) - cmd.logger.ErrorContext(ctx, errMsg.Error()) - } + _ = shutdown(ctx) }() - var allToolsFiles []ToolsFile - - // Load Prebuilt Configuration - - if len(cmd.prebuiltConfigs) > 0 { - slices.Sort(cmd.prebuiltConfigs) - sourcesList := strings.Join(cmd.prebuiltConfigs, ", ") - logMsg := fmt.Sprintf("Using prebuilt tool configurations for: %s", sourcesList) - cmd.logger.InfoContext(ctx, logMsg) - - for _, configName := range cmd.prebuiltConfigs { - buf, err := prebuiltconfigs.Get(configName) - if err != nil { - cmd.logger.ErrorContext(ctx, err.Error()) - return err - } - - // Update version string - cmd.cfg.Version += "+prebuilt." + configName - - // Parse into ToolsFile struct - parsed, err := parseToolsFile(ctx, buf) - if err != nil { - errMsg := fmt.Errorf("unable to parse prebuilt tool configuration for '%s': %w", configName, err) - cmd.logger.ErrorContext(ctx, errMsg.Error()) - return errMsg - } - allToolsFiles = append(allToolsFiles, parsed) - } - } - - // Determine if Custom Files should be loaded - // Check for explicit custom flags - isCustomConfigured := cmd.tools_file != "" || len(cmd.tools_files) > 0 || cmd.tools_folder != "" - - // Determine if default 'tools.yaml' should be used (No prebuilt AND No custom flags) - useDefaultToolsFile := len(cmd.prebuiltConfigs) == 0 && !isCustomConfigured - - if useDefaultToolsFile { - cmd.tools_file = "tools.yaml" - isCustomConfigured = true - } - - // Load Custom Configurations - if isCustomConfigured { - // Enforce exclusivity among custom flags (tools-file vs tools-files vs tools-folder) - if (cmd.tools_file != "" && len(cmd.tools_files) > 0) || - (cmd.tools_file != "" && cmd.tools_folder != "") || - (len(cmd.tools_files) > 0 && cmd.tools_folder != "") { - errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously") - cmd.logger.ErrorContext(ctx, errMsg.Error()) - return errMsg - } - - var customTools ToolsFile - var err error - - if len(cmd.tools_files) > 0 { - // Use tools-files - cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging %d tool configuration files", len(cmd.tools_files))) - customTools, err = loadAndMergeToolsFiles(ctx, cmd.tools_files) - } else if cmd.tools_folder != "" { - // Use tools-folder - cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging all YAML files from directory: %s", cmd.tools_folder)) - customTools, err = loadAndMergeToolsFolder(ctx, cmd.tools_folder) - } else { - // Use single file (tools-file or default `tools.yaml`) - buf, readFileErr := os.ReadFile(cmd.tools_file) - if readFileErr != nil { - errMsg := fmt.Errorf("unable to read tool file at %q: %w", cmd.tools_file, readFileErr) - cmd.logger.ErrorContext(ctx, errMsg.Error()) - return errMsg - } - customTools, err = parseToolsFile(ctx, buf) - if err != nil { - err = fmt.Errorf("unable to parse tool file at %q: %w", cmd.tools_file, err) - } - } - - if err != nil { - cmd.logger.ErrorContext(ctx, err.Error()) - return err - } - allToolsFiles = append(allToolsFiles, customTools) - } - - // Merge Everything - // This will error if custom tools collide with prebuilt tools - finalToolsFile, err := mergeToolsFiles(allToolsFiles...) + isCustomConfigured, err := opts.LoadConfig(ctx) if err != nil { - cmd.logger.ErrorContext(ctx, err.Error()) return err } - cmd.cfg.SourceConfigs = finalToolsFile.Sources - cmd.cfg.AuthServiceConfigs = finalToolsFile.AuthServices - cmd.cfg.EmbeddingModelConfigs = finalToolsFile.EmbeddingModels - cmd.cfg.ToolConfigs = finalToolsFile.Tools - cmd.cfg.ToolsetConfigs = finalToolsFile.Toolsets - cmd.cfg.PromptConfigs = finalToolsFile.Prompts - - authSourceConfigs := finalToolsFile.AuthSources - if authSourceConfigs != nil { - cmd.logger.WarnContext(ctx, "`authSources` is deprecated, use `authServices` instead") - - for k, v := range authSourceConfigs { - if _, exists := cmd.cfg.AuthServiceConfigs[k]; exists { - errMsg := fmt.Errorf("resource conflict detected: authSource '%s' has the same name as an existing authService. Please rename your authSource", k) - cmd.logger.ErrorContext(ctx, errMsg.Error()) - return errMsg - } - cmd.cfg.AuthServiceConfigs[k] = v - } - } - - instrumentation, err := telemetry.CreateTelemetryInstrumentation(versionString) - if err != nil { - errMsg := fmt.Errorf("unable to create telemetry instrumentation: %w", err) - cmd.logger.ErrorContext(ctx, errMsg.Error()) - return errMsg - } - - ctx = util.WithInstrumentation(ctx, instrumentation) - // start server - s, err := server.NewServer(ctx, cmd.cfg) + s, err := server.NewServer(ctx, opts.Cfg) if err != nil { errMsg := fmt.Errorf("toolbox failed to initialize: %w", err) - cmd.logger.ErrorContext(ctx, errMsg.Error()) + opts.Logger.ErrorContext(ctx, errMsg.Error()) return errMsg } // run server in background srvErr := make(chan error) - if cmd.cfg.Stdio { + if opts.Cfg.Stdio { go func() { defer close(srvErr) - err = s.ServeStdio(ctx, cmd.inStream, cmd.outStream) + err = s.ServeStdio(ctx, opts.IOStreams.In, opts.IOStreams.Out) if err != nil { srvErr <- err } @@ -1010,12 +397,12 @@ func run(cmd *Command) error { err = s.Listen(ctx) if err != nil { errMsg := fmt.Errorf("toolbox failed to start listener: %w", err) - cmd.logger.ErrorContext(ctx, errMsg.Error()) + opts.Logger.ErrorContext(ctx, errMsg.Error()) return errMsg } - cmd.logger.InfoContext(ctx, "Server ready to serve!") - if cmd.cfg.UI { - cmd.logger.InfoContext(ctx, fmt.Sprintf("Toolbox UI is up and running at: http://%s:%d/ui", cmd.cfg.Address, cmd.cfg.Port)) + opts.Logger.InfoContext(ctx, "Server ready to serve!") + if opts.Cfg.UI { + opts.Logger.InfoContext(ctx, fmt.Sprintf("Toolbox UI is up and running at: http://%s:%d/ui", opts.Cfg.Address, opts.Cfg.Port)) } go func() { @@ -1027,8 +414,8 @@ func run(cmd *Command) error { }() } - if isCustomConfigured && !cmd.cfg.DisableReload { - watchDirs, watchedFiles := resolveWatcherInputs(cmd.tools_file, cmd.tools_files, cmd.tools_folder) + if isCustomConfigured && !opts.Cfg.DisableReload { + watchDirs, watchedFiles := resolveWatcherInputs(opts.ToolsFile, opts.ToolsFiles, opts.ToolsFolder) // start watching the file(s) or folder for changes to trigger dynamic reloading go watchChanges(ctx, watchDirs, watchedFiles, s) } @@ -1038,13 +425,13 @@ func run(cmd *Command) error { case err := <-srvErr: if err != nil { errMsg := fmt.Errorf("toolbox crashed with the following error: %w", err) - cmd.logger.ErrorContext(ctx, errMsg.Error()) + opts.Logger.ErrorContext(ctx, errMsg.Error()) return errMsg } case <-ctx.Done(): shutdownContext, cancel := context.WithTimeout(context.Background(), 10*time.Second) defer cancel() - cmd.logger.WarnContext(shutdownContext, "Shutting down gracefully...") + opts.Logger.WarnContext(shutdownContext, "Shutting down gracefully...") err := s.Shutdown(shutdownContext) if err == context.DeadlineExceeded { return fmt.Errorf("graceful shutdown timed out... forcing exit") diff --git a/cmd/root_test.go b/cmd/root_test.go index 17058d18ff..e85aaa3d26 100644 --- a/cmd/root_test.go +++ b/cmd/root_test.go @@ -31,22 +31,12 @@ import ( "github.com/google/go-cmp/cmp" - "github.com/googleapis/genai-toolbox/internal/auth/google" - "github.com/googleapis/genai-toolbox/internal/embeddingmodels/gemini" + "github.com/googleapis/genai-toolbox/cmd/internal" "github.com/googleapis/genai-toolbox/internal/log" - "github.com/googleapis/genai-toolbox/internal/prebuiltconfigs" - "github.com/googleapis/genai-toolbox/internal/prompts" - "github.com/googleapis/genai-toolbox/internal/prompts/custom" "github.com/googleapis/genai-toolbox/internal/server" - cloudsqlpgsrc "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg" - httpsrc "github.com/googleapis/genai-toolbox/internal/sources/http" "github.com/googleapis/genai-toolbox/internal/telemetry" "github.com/googleapis/genai-toolbox/internal/testutils" - "github.com/googleapis/genai-toolbox/internal/tools" - "github.com/googleapis/genai-toolbox/internal/tools/http" - "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql" "github.com/googleapis/genai-toolbox/internal/util" - "github.com/googleapis/genai-toolbox/internal/util/parameters" "github.com/spf13/cobra" ) @@ -76,15 +66,16 @@ func withDefaults(c server.ServerConfig) server.ServerConfig { return c } -func invokeCommand(args []string) (*Command, string, error) { - c := NewCommand() +func invokeCommand(args []string) (*cobra.Command, *internal.ToolboxOptions, string, error) { + buf := new(bytes.Buffer) + opts := internal.NewToolboxOptions(internal.WithIOStreams(buf, buf)) + c := NewCommand(opts) // Keep the test output quiet c.SilenceUsage = true c.SilenceErrors = true // Capture output - buf := new(bytes.Buffer) c.SetOut(buf) c.SetErr(buf) c.SetArgs(args) @@ -96,22 +87,23 @@ func invokeCommand(args []string) (*Command, string, error) { err := c.Execute() - return c, buf.String(), err + return c, opts, buf.String(), err } // invokeCommandWithContext executes the command with a context and returns the captured output. -func invokeCommandWithContext(ctx context.Context, args []string) (*Command, string, error) { - // Capture output using a buffer +func invokeCommandWithContext(ctx context.Context, args []string) (*cobra.Command, *internal.ToolboxOptions, string, error) { buf := new(bytes.Buffer) - c := NewCommand(WithStreams(buf, buf)) + opts := internal.NewToolboxOptions(internal.WithIOStreams(buf, buf)) + c := NewCommand(opts) + // Capture output using a buffer c.SetArgs(args) c.SilenceUsage = true c.SilenceErrors = true c.SetContext(ctx) err := c.Execute() - return c, buf.String(), err + return c, opts, buf.String(), err } func TestVersion(t *testing.T) { @@ -121,7 +113,7 @@ func TestVersion(t *testing.T) { } want := strings.TrimSpace(string(data)) - _, got, err := invokeCommand([]string{"--version"}) + _, _, got, err := invokeCommand([]string{"--version"}) if err != nil { t.Fatalf("error invoking command: %s", err) } @@ -243,79 +235,13 @@ func TestServerConfigFlags(t *testing.T) { } for _, tc := range tcs { t.Run(tc.desc, func(t *testing.T) { - c, _, err := invokeCommand(tc.args) + _, opts, _, err := invokeCommand(tc.args) if err != nil { t.Fatalf("unexpected error invoking command: %s", err) } - if !cmp.Equal(c.cfg, tc.want) { - t.Fatalf("got %v, want %v", c.cfg, tc.want) - } - }) - } -} - -func TestParseEnv(t *testing.T) { - tcs := []struct { - desc string - env map[string]string - in string - want string - err bool - errString string - }{ - { - desc: "without default without env", - in: "${FOO}", - want: "", - err: true, - errString: `environment variable not found: "FOO"`, - }, - { - desc: "without default with env", - env: map[string]string{ - "FOO": "bar", - }, - in: "${FOO}", - want: "bar", - }, - { - desc: "with empty default", - in: "${FOO:}", - want: "", - }, - { - desc: "with default", - in: "${FOO:bar}", - want: "bar", - }, - { - desc: "with default with env", - env: map[string]string{ - "FOO": "hello", - }, - in: "${FOO:bar}", - want: "hello", - }, - } - for _, tc := range tcs { - t.Run(tc.desc, func(t *testing.T) { - if tc.env != nil { - for k, v := range tc.env { - t.Setenv(k, v) - } - } - got, err := parseEnv(tc.in) - if tc.err { - if err == nil { - t.Fatalf("expected error not found") - } - if tc.errString != err.Error() { - t.Fatalf("incorrect error string: got %s, want %s", err, tc.errString) - } - } - if tc.want != got { - t.Fatalf("unexpected want: got %s, want %s", got, tc.want) + if !cmp.Equal(opts.Cfg, tc.want) { + t.Fatalf("got %v, want %v", opts.Cfg, tc.want) } }) } @@ -350,12 +276,12 @@ func TestToolFileFlag(t *testing.T) { } for _, tc := range tcs { t.Run(tc.desc, func(t *testing.T) { - c, _, err := invokeCommand(tc.args) + _, opts, _, err := invokeCommand(tc.args) if err != nil { t.Fatalf("unexpected error invoking command: %s", err) } - if c.tools_file != tc.want { - t.Fatalf("got %v, want %v", c.cfg, tc.want) + if opts.ToolsFile != tc.want { + t.Fatalf("got %v, want %v", opts.Cfg, tc.want) } }) } @@ -385,12 +311,12 @@ func TestToolsFilesFlag(t *testing.T) { } for _, tc := range tcs { t.Run(tc.desc, func(t *testing.T) { - c, _, err := invokeCommand(tc.args) + _, opts, _, err := invokeCommand(tc.args) if err != nil { t.Fatalf("unexpected error invoking command: %s", err) } - if diff := cmp.Diff(c.tools_files, tc.want); diff != "" { - t.Fatalf("got %v, want %v", c.tools_files, tc.want) + if diff := cmp.Diff(opts.ToolsFiles, tc.want); diff != "" { + t.Fatalf("got %v, want %v", opts.ToolsFiles, tc.want) } }) } @@ -415,12 +341,12 @@ func TestToolsFolderFlag(t *testing.T) { } for _, tc := range tcs { t.Run(tc.desc, func(t *testing.T) { - c, _, err := invokeCommand(tc.args) + _, opts, _, err := invokeCommand(tc.args) if err != nil { t.Fatalf("unexpected error invoking command: %s", err) } - if c.tools_folder != tc.want { - t.Fatalf("got %v, want %v", c.tools_folder, tc.want) + if opts.ToolsFolder != tc.want { + t.Fatalf("got %v, want %v", opts.ToolsFolder, tc.want) } }) } @@ -455,12 +381,12 @@ func TestPrebuiltFlag(t *testing.T) { } for _, tc := range tcs { t.Run(tc.desc, func(t *testing.T) { - c, _, err := invokeCommand(tc.args) + _, opts, _, err := invokeCommand(tc.args) if err != nil { t.Fatalf("unexpected error invoking command: %s", err) } - if diff := cmp.Diff(c.prebuiltConfigs, tc.want); diff != "" { - t.Fatalf("got %v, want %v, diff %s", c.prebuiltConfigs, tc.want, diff) + if diff := cmp.Diff(opts.PrebuiltConfigs, tc.want); diff != "" { + t.Fatalf("got %v, want %v, diff %s", opts.PrebuiltConfigs, tc.want, diff) } }) } @@ -482,7 +408,7 @@ func TestFailServerConfigFlags(t *testing.T) { } for _, tc := range tcs { t.Run(tc.desc, func(t *testing.T) { - _, _, err := invokeCommand(tc.args) + _, _, _, err := invokeCommand(tc.args) if err == nil { t.Fatalf("expected an error, but got nil") } @@ -491,11 +417,11 @@ func TestFailServerConfigFlags(t *testing.T) { } func TestDefaultLoggingFormat(t *testing.T) { - c, _, err := invokeCommand([]string{}) + _, opts, _, err := invokeCommand([]string{}) if err != nil { t.Fatalf("unexpected error invoking command: %s", err) } - got := c.cfg.LoggingFormat.String() + got := opts.Cfg.LoggingFormat.String() want := "standard" if got != want { t.Fatalf("unexpected default logging format flag: got %v, want %v", got, want) @@ -503,677 +429,17 @@ func TestDefaultLoggingFormat(t *testing.T) { } func TestDefaultLogLevel(t *testing.T) { - c, _, err := invokeCommand([]string{}) + _, opts, _, err := invokeCommand([]string{}) if err != nil { t.Fatalf("unexpected error invoking command: %s", err) } - got := c.cfg.LogLevel.String() + got := opts.Cfg.LogLevel.String() want := "info" if got != want { t.Fatalf("unexpected default log level flag: got %v, want %v", got, want) } } -func TestParseToolFile(t *testing.T) { - ctx, err := testutils.ContextWithNewLogger() - if err != nil { - t.Fatalf("unexpected error: %s", err) - } - tcs := []struct { - description string - in string - wantToolsFile ToolsFile - }{ - { - description: "basic example", - in: ` - sources: - my-pg-instance: - kind: cloud-sql-postgres - project: my-project - region: my-region - instance: my-instance - database: my_db - user: my_user - password: my_pass - tools: - example_tool: - kind: postgres-sql - source: my-pg-instance - description: some description - statement: | - SELECT * FROM SQL_STATEMENT; - parameters: - - name: country - type: string - description: some description - toolsets: - example_toolset: - - example_tool - `, - wantToolsFile: ToolsFile{ - Sources: server.SourceConfigs{ - "my-pg-instance": cloudsqlpgsrc.Config{ - Name: "my-pg-instance", - Kind: cloudsqlpgsrc.SourceKind, - Project: "my-project", - Region: "my-region", - Instance: "my-instance", - IPType: "public", - Database: "my_db", - User: "my_user", - Password: "my_pass", - }, - }, - Tools: server.ToolConfigs{ - "example_tool": postgressql.Config{ - Name: "example_tool", - Kind: "postgres-sql", - Source: "my-pg-instance", - Description: "some description", - Statement: "SELECT * FROM SQL_STATEMENT;\n", - Parameters: []parameters.Parameter{ - parameters.NewStringParameter("country", "some description"), - }, - AuthRequired: []string{}, - }, - }, - Toolsets: server.ToolsetConfigs{ - "example_toolset": tools.ToolsetConfig{ - Name: "example_toolset", - ToolNames: []string{"example_tool"}, - }, - }, - Prompts: nil, - }, - }, - { - description: "with prompts example", - in: ` - prompts: - my-prompt: - description: A prompt template for data analysis. - arguments: - - name: country - description: The country to analyze. - messages: - - content: Analyze the data for {{.country}}. - `, - wantToolsFile: ToolsFile{ - Sources: nil, - AuthServices: nil, - Tools: nil, - Toolsets: nil, - Prompts: server.PromptConfigs{ - "my-prompt": &custom.Config{ - Name: "my-prompt", - Description: "A prompt template for data analysis.", - Arguments: prompts.Arguments{ - {Parameter: parameters.NewStringParameter("country", "The country to analyze.")}, - }, - Messages: []prompts.Message{ - {Role: "user", Content: "Analyze the data for {{.country}}."}, - }, - }, - }, - }, - }, - } - for _, tc := range tcs { - t.Run(tc.description, func(t *testing.T) { - toolsFile, err := parseToolsFile(ctx, testutils.FormatYaml(tc.in)) - if err != nil { - t.Fatalf("failed to parse input: %v", err) - } - if diff := cmp.Diff(tc.wantToolsFile.Sources, toolsFile.Sources); diff != "" { - t.Fatalf("incorrect sources parse: diff %v", diff) - } - if diff := cmp.Diff(tc.wantToolsFile.AuthServices, toolsFile.AuthServices); diff != "" { - t.Fatalf("incorrect authServices parse: diff %v", diff) - } - if diff := cmp.Diff(tc.wantToolsFile.Tools, toolsFile.Tools); diff != "" { - t.Fatalf("incorrect tools parse: diff %v", diff) - } - if diff := cmp.Diff(tc.wantToolsFile.Toolsets, toolsFile.Toolsets); diff != "" { - t.Fatalf("incorrect toolsets parse: diff %v", diff) - } - if diff := cmp.Diff(tc.wantToolsFile.Prompts, toolsFile.Prompts); diff != "" { - t.Fatalf("incorrect prompts parse: diff %v", diff) - } - }) - } - -} - -func TestParseToolFileWithAuth(t *testing.T) { - ctx, err := testutils.ContextWithNewLogger() - if err != nil { - t.Fatalf("unexpected error: %s", err) - } - tcs := []struct { - description string - in string - wantToolsFile ToolsFile - }{ - { - description: "basic example", - in: ` - sources: - my-pg-instance: - kind: cloud-sql-postgres - project: my-project - region: my-region - instance: my-instance - database: my_db - user: my_user - password: my_pass - authServices: - my-google-service: - kind: google - clientId: my-client-id - other-google-service: - kind: google - clientId: other-client-id - - tools: - example_tool: - kind: postgres-sql - source: my-pg-instance - description: some description - statement: | - SELECT * FROM SQL_STATEMENT; - parameters: - - name: country - type: string - description: some description - - name: id - type: integer - description: user id - authServices: - - name: my-google-service - field: user_id - - name: email - type: string - description: user email - authServices: - - name: my-google-service - field: email - - name: other-google-service - field: other_email - - toolsets: - example_toolset: - - example_tool - `, - wantToolsFile: ToolsFile{ - Sources: server.SourceConfigs{ - "my-pg-instance": cloudsqlpgsrc.Config{ - Name: "my-pg-instance", - Kind: cloudsqlpgsrc.SourceKind, - Project: "my-project", - Region: "my-region", - Instance: "my-instance", - IPType: "public", - Database: "my_db", - User: "my_user", - Password: "my_pass", - }, - }, - AuthServices: server.AuthServiceConfigs{ - "my-google-service": google.Config{ - Name: "my-google-service", - Kind: google.AuthServiceKind, - ClientID: "my-client-id", - }, - "other-google-service": google.Config{ - Name: "other-google-service", - Kind: google.AuthServiceKind, - ClientID: "other-client-id", - }, - }, - Tools: server.ToolConfigs{ - "example_tool": postgressql.Config{ - Name: "example_tool", - Kind: "postgres-sql", - Source: "my-pg-instance", - Description: "some description", - Statement: "SELECT * FROM SQL_STATEMENT;\n", - AuthRequired: []string{}, - Parameters: []parameters.Parameter{ - parameters.NewStringParameter("country", "some description"), - parameters.NewIntParameterWithAuth("id", "user id", []parameters.ParamAuthService{{Name: "my-google-service", Field: "user_id"}}), - parameters.NewStringParameterWithAuth("email", "user email", []parameters.ParamAuthService{{Name: "my-google-service", Field: "email"}, {Name: "other-google-service", Field: "other_email"}}), - }, - }, - }, - Toolsets: server.ToolsetConfigs{ - "example_toolset": tools.ToolsetConfig{ - Name: "example_toolset", - ToolNames: []string{"example_tool"}, - }, - }, - Prompts: nil, - }, - }, - { - description: "basic example with authSources", - in: ` - sources: - my-pg-instance: - kind: cloud-sql-postgres - project: my-project - region: my-region - instance: my-instance - database: my_db - user: my_user - password: my_pass - authSources: - my-google-service: - kind: google - clientId: my-client-id - other-google-service: - kind: google - clientId: other-client-id - - tools: - example_tool: - kind: postgres-sql - source: my-pg-instance - description: some description - statement: | - SELECT * FROM SQL_STATEMENT; - parameters: - - name: country - type: string - description: some description - - name: id - type: integer - description: user id - authSources: - - name: my-google-service - field: user_id - - name: email - type: string - description: user email - authSources: - - name: my-google-service - field: email - - name: other-google-service - field: other_email - - toolsets: - example_toolset: - - example_tool - `, - wantToolsFile: ToolsFile{ - Sources: server.SourceConfigs{ - "my-pg-instance": cloudsqlpgsrc.Config{ - Name: "my-pg-instance", - Kind: cloudsqlpgsrc.SourceKind, - Project: "my-project", - Region: "my-region", - Instance: "my-instance", - IPType: "public", - Database: "my_db", - User: "my_user", - Password: "my_pass", - }, - }, - AuthSources: server.AuthServiceConfigs{ - "my-google-service": google.Config{ - Name: "my-google-service", - Kind: google.AuthServiceKind, - ClientID: "my-client-id", - }, - "other-google-service": google.Config{ - Name: "other-google-service", - Kind: google.AuthServiceKind, - ClientID: "other-client-id", - }, - }, - Tools: server.ToolConfigs{ - "example_tool": postgressql.Config{ - Name: "example_tool", - Kind: "postgres-sql", - Source: "my-pg-instance", - Description: "some description", - Statement: "SELECT * FROM SQL_STATEMENT;\n", - AuthRequired: []string{}, - Parameters: []parameters.Parameter{ - parameters.NewStringParameter("country", "some description"), - parameters.NewIntParameterWithAuth("id", "user id", []parameters.ParamAuthService{{Name: "my-google-service", Field: "user_id"}}), - parameters.NewStringParameterWithAuth("email", "user email", []parameters.ParamAuthService{{Name: "my-google-service", Field: "email"}, {Name: "other-google-service", Field: "other_email"}}), - }, - }, - }, - Toolsets: server.ToolsetConfigs{ - "example_toolset": tools.ToolsetConfig{ - Name: "example_toolset", - ToolNames: []string{"example_tool"}, - }, - }, - Prompts: nil, - }, - }, - { - description: "basic example with authRequired", - in: ` - sources: - my-pg-instance: - kind: cloud-sql-postgres - project: my-project - region: my-region - instance: my-instance - database: my_db - user: my_user - password: my_pass - authServices: - my-google-service: - kind: google - clientId: my-client-id - other-google-service: - kind: google - clientId: other-client-id - - tools: - example_tool: - kind: postgres-sql - source: my-pg-instance - description: some description - statement: | - SELECT * FROM SQL_STATEMENT; - authRequired: - - my-google-service - parameters: - - name: country - type: string - description: some description - - name: id - type: integer - description: user id - authServices: - - name: my-google-service - field: user_id - - name: email - type: string - description: user email - authServices: - - name: my-google-service - field: email - - name: other-google-service - field: other_email - - toolsets: - example_toolset: - - example_tool - `, - wantToolsFile: ToolsFile{ - Sources: server.SourceConfigs{ - "my-pg-instance": cloudsqlpgsrc.Config{ - Name: "my-pg-instance", - Kind: cloudsqlpgsrc.SourceKind, - Project: "my-project", - Region: "my-region", - Instance: "my-instance", - IPType: "public", - Database: "my_db", - User: "my_user", - Password: "my_pass", - }, - }, - AuthServices: server.AuthServiceConfigs{ - "my-google-service": google.Config{ - Name: "my-google-service", - Kind: google.AuthServiceKind, - ClientID: "my-client-id", - }, - "other-google-service": google.Config{ - Name: "other-google-service", - Kind: google.AuthServiceKind, - ClientID: "other-client-id", - }, - }, - Tools: server.ToolConfigs{ - "example_tool": postgressql.Config{ - Name: "example_tool", - Kind: "postgres-sql", - Source: "my-pg-instance", - Description: "some description", - Statement: "SELECT * FROM SQL_STATEMENT;\n", - AuthRequired: []string{"my-google-service"}, - Parameters: []parameters.Parameter{ - parameters.NewStringParameter("country", "some description"), - parameters.NewIntParameterWithAuth("id", "user id", []parameters.ParamAuthService{{Name: "my-google-service", Field: "user_id"}}), - parameters.NewStringParameterWithAuth("email", "user email", []parameters.ParamAuthService{{Name: "my-google-service", Field: "email"}, {Name: "other-google-service", Field: "other_email"}}), - }, - }, - }, - Toolsets: server.ToolsetConfigs{ - "example_toolset": tools.ToolsetConfig{ - Name: "example_toolset", - ToolNames: []string{"example_tool"}, - }, - }, - Prompts: nil, - }, - }, - } - for _, tc := range tcs { - t.Run(tc.description, func(t *testing.T) { - toolsFile, err := parseToolsFile(ctx, testutils.FormatYaml(tc.in)) - if err != nil { - t.Fatalf("failed to parse input: %v", err) - } - if diff := cmp.Diff(tc.wantToolsFile.Sources, toolsFile.Sources); diff != "" { - t.Fatalf("incorrect sources parse: diff %v", diff) - } - if diff := cmp.Diff(tc.wantToolsFile.AuthServices, toolsFile.AuthServices); diff != "" { - t.Fatalf("incorrect authServices parse: diff %v", diff) - } - if diff := cmp.Diff(tc.wantToolsFile.Tools, toolsFile.Tools); diff != "" { - t.Fatalf("incorrect tools parse: diff %v", diff) - } - if diff := cmp.Diff(tc.wantToolsFile.Toolsets, toolsFile.Toolsets); diff != "" { - t.Fatalf("incorrect toolsets parse: diff %v", diff) - } - if diff := cmp.Diff(tc.wantToolsFile.Prompts, toolsFile.Prompts); diff != "" { - t.Fatalf("incorrect prompts parse: diff %v", diff) - } - }) - } - -} - -func TestEnvVarReplacement(t *testing.T) { - ctx, err := testutils.ContextWithNewLogger() - t.Setenv("TestHeader", "ACTUAL_HEADER") - t.Setenv("API_KEY", "ACTUAL_API_KEY") - t.Setenv("clientId", "ACTUAL_CLIENT_ID") - t.Setenv("clientId2", "ACTUAL_CLIENT_ID_2") - t.Setenv("toolset_name", "ACTUAL_TOOLSET_NAME") - t.Setenv("cat_string", "cat") - t.Setenv("food_string", "food") - t.Setenv("TestHeader", "ACTUAL_HEADER") - t.Setenv("prompt_name", "ACTUAL_PROMPT_NAME") - t.Setenv("prompt_content", "ACTUAL_CONTENT") - - if err != nil { - t.Fatalf("unexpected error: %s", err) - } - tcs := []struct { - description string - in string - wantToolsFile ToolsFile - }{ - { - description: "file with env var example", - in: ` - sources: - my-http-instance: - kind: http - baseUrl: http://test_server/ - timeout: 10s - headers: - Authorization: ${TestHeader} - queryParams: - api-key: ${API_KEY} - authServices: - my-google-service: - kind: google - clientId: ${clientId} - other-google-service: - kind: google - clientId: ${clientId2} - - tools: - example_tool: - kind: http - source: my-instance - method: GET - path: "search?name=alice&pet=${cat_string}" - description: some description - authRequired: - - my-google-auth-service - - other-auth-service - queryParams: - - name: country - type: string - description: some description - authServices: - - name: my-google-auth-service - field: user_id - - name: other-auth-service - field: user_id - requestBody: | - { - "age": {{.age}}, - "city": "{{.city}}", - "food": "${food_string}", - "other": "$OTHER" - } - bodyParams: - - name: age - type: integer - description: age num - - name: city - type: string - description: city string - headers: - Authorization: API_KEY - Content-Type: application/json - headerParams: - - name: Language - type: string - description: language string - - toolsets: - ${toolset_name}: - - example_tool - - - prompts: - ${prompt_name}: - description: A test prompt for {{.name}}. - messages: - - role: user - content: ${prompt_content} - `, - wantToolsFile: ToolsFile{ - Sources: server.SourceConfigs{ - "my-http-instance": httpsrc.Config{ - Name: "my-http-instance", - Kind: httpsrc.SourceKind, - BaseURL: "http://test_server/", - Timeout: "10s", - DefaultHeaders: map[string]string{"Authorization": "ACTUAL_HEADER"}, - QueryParams: map[string]string{"api-key": "ACTUAL_API_KEY"}, - }, - }, - AuthServices: server.AuthServiceConfigs{ - "my-google-service": google.Config{ - Name: "my-google-service", - Kind: google.AuthServiceKind, - ClientID: "ACTUAL_CLIENT_ID", - }, - "other-google-service": google.Config{ - Name: "other-google-service", - Kind: google.AuthServiceKind, - ClientID: "ACTUAL_CLIENT_ID_2", - }, - }, - Tools: server.ToolConfigs{ - "example_tool": http.Config{ - Name: "example_tool", - Kind: "http", - Source: "my-instance", - Method: "GET", - Path: "search?name=alice&pet=cat", - Description: "some description", - AuthRequired: []string{"my-google-auth-service", "other-auth-service"}, - QueryParams: []parameters.Parameter{ - parameters.NewStringParameterWithAuth("country", "some description", - []parameters.ParamAuthService{{Name: "my-google-auth-service", Field: "user_id"}, - {Name: "other-auth-service", Field: "user_id"}}), - }, - RequestBody: `{ - "age": {{.age}}, - "city": "{{.city}}", - "food": "food", - "other": "$OTHER" -} -`, - BodyParams: []parameters.Parameter{parameters.NewIntParameter("age", "age num"), parameters.NewStringParameter("city", "city string")}, - Headers: map[string]string{"Authorization": "API_KEY", "Content-Type": "application/json"}, - HeaderParams: []parameters.Parameter{parameters.NewStringParameter("Language", "language string")}, - }, - }, - Toolsets: server.ToolsetConfigs{ - "ACTUAL_TOOLSET_NAME": tools.ToolsetConfig{ - Name: "ACTUAL_TOOLSET_NAME", - ToolNames: []string{"example_tool"}, - }, - }, - Prompts: server.PromptConfigs{ - "ACTUAL_PROMPT_NAME": &custom.Config{ - Name: "ACTUAL_PROMPT_NAME", - Description: "A test prompt for {{.name}}.", - Messages: []prompts.Message{ - { - Role: "user", - Content: "ACTUAL_CONTENT", - }, - }, - Arguments: nil, - }, - }, - }, - }, - } - for _, tc := range tcs { - t.Run(tc.description, func(t *testing.T) { - toolsFile, err := parseToolsFile(ctx, testutils.FormatYaml(tc.in)) - if err != nil { - t.Fatalf("failed to parse input: %v", err) - } - if diff := cmp.Diff(tc.wantToolsFile.Sources, toolsFile.Sources); diff != "" { - t.Fatalf("incorrect sources parse: diff %v", diff) - } - if diff := cmp.Diff(tc.wantToolsFile.AuthServices, toolsFile.AuthServices); diff != "" { - t.Fatalf("incorrect authServices parse: diff %v", diff) - } - if diff := cmp.Diff(tc.wantToolsFile.Tools, toolsFile.Tools); diff != "" { - t.Fatalf("incorrect tools parse: diff %v", diff) - } - if diff := cmp.Diff(tc.wantToolsFile.Toolsets, toolsFile.Toolsets); diff != "" { - t.Fatalf("incorrect toolsets parse: diff %v", diff) - } - if diff := cmp.Diff(tc.wantToolsFile.Prompts, toolsFile.Prompts); diff != "" { - t.Fatalf("incorrect prompts parse: diff %v", diff) - } - }) - } -} - // normalizeFilepaths is a helper function to allow same filepath formats for Mac and Windows. // this prevents needing multiple "want" cases for TestResolveWatcherInputs func normalizeFilepaths(m map[string]bool) map[string]bool { @@ -1352,468 +618,6 @@ func TestSingleEdit(t *testing.T) { } } -func TestPrebuiltTools(t *testing.T) { - // Get prebuilt configs - alloydb_admin_config, _ := prebuiltconfigs.Get("alloydb-postgres-admin") - alloydb_config, _ := prebuiltconfigs.Get("alloydb-postgres") - bigquery_config, _ := prebuiltconfigs.Get("bigquery") - clickhouse_config, _ := prebuiltconfigs.Get("clickhouse") - cloudsqlpg_config, _ := prebuiltconfigs.Get("cloud-sql-postgres") - cloudsqlpg_admin_config, _ := prebuiltconfigs.Get("cloud-sql-postgres-admin") - cloudsqlmysql_config, _ := prebuiltconfigs.Get("cloud-sql-mysql") - cloudsqlmysql_admin_config, _ := prebuiltconfigs.Get("cloud-sql-mysql-admin") - cloudsqlmssql_config, _ := prebuiltconfigs.Get("cloud-sql-mssql") - cloudsqlmssql_admin_config, _ := prebuiltconfigs.Get("cloud-sql-mssql-admin") - dataplex_config, _ := prebuiltconfigs.Get("dataplex") - firestoreconfig, _ := prebuiltconfigs.Get("firestore") - mysql_config, _ := prebuiltconfigs.Get("mysql") - mssql_config, _ := prebuiltconfigs.Get("mssql") - looker_config, _ := prebuiltconfigs.Get("looker") - lookerca_config, _ := prebuiltconfigs.Get("looker-conversational-analytics") - postgresconfig, _ := prebuiltconfigs.Get("postgres") - spanner_config, _ := prebuiltconfigs.Get("spanner") - spannerpg_config, _ := prebuiltconfigs.Get("spanner-postgres") - mindsdb_config, _ := prebuiltconfigs.Get("mindsdb") - sqlite_config, _ := prebuiltconfigs.Get("sqlite") - neo4jconfig, _ := prebuiltconfigs.Get("neo4j") - alloydbobsvconfig, _ := prebuiltconfigs.Get("alloydb-postgres-observability") - cloudsqlpgobsvconfig, _ := prebuiltconfigs.Get("cloud-sql-postgres-observability") - cloudsqlmysqlobsvconfig, _ := prebuiltconfigs.Get("cloud-sql-mysql-observability") - cloudsqlmssqlobsvconfig, _ := prebuiltconfigs.Get("cloud-sql-mssql-observability") - serverless_spark_config, _ := prebuiltconfigs.Get("serverless-spark") - cloudhealthcare_config, _ := prebuiltconfigs.Get("cloud-healthcare") - snowflake_config, _ := prebuiltconfigs.Get("snowflake") - - // Set environment variables - t.Setenv("API_KEY", "your_api_key") - - t.Setenv("BIGQUERY_PROJECT", "your_gcp_project_id") - t.Setenv("DATAPLEX_PROJECT", "your_gcp_project_id") - t.Setenv("FIRESTORE_PROJECT", "your_gcp_project_id") - t.Setenv("FIRESTORE_DATABASE", "your_firestore_db_name") - - t.Setenv("SPANNER_PROJECT", "your_gcp_project_id") - t.Setenv("SPANNER_INSTANCE", "your_spanner_instance") - t.Setenv("SPANNER_DATABASE", "your_spanner_db") - - t.Setenv("ALLOYDB_POSTGRES_PROJECT", "your_gcp_project_id") - t.Setenv("ALLOYDB_POSTGRES_REGION", "your_gcp_region") - t.Setenv("ALLOYDB_POSTGRES_CLUSTER", "your_alloydb_cluster") - t.Setenv("ALLOYDB_POSTGRES_INSTANCE", "your_alloydb_instance") - t.Setenv("ALLOYDB_POSTGRES_DATABASE", "your_alloydb_db") - t.Setenv("ALLOYDB_POSTGRES_USER", "your_alloydb_user") - t.Setenv("ALLOYDB_POSTGRES_PASSWORD", "your_alloydb_password") - - t.Setenv("CLICKHOUSE_PROTOCOL", "your_clickhouse_protocol") - t.Setenv("CLICKHOUSE_DATABASE", "your_clickhouse_database") - t.Setenv("CLICKHOUSE_PASSWORD", "your_clickhouse_password") - t.Setenv("CLICKHOUSE_USER", "your_clickhouse_user") - t.Setenv("CLICKHOUSE_HOST", "your_clickhosue_host") - t.Setenv("CLICKHOUSE_PORT", "8123") - - t.Setenv("CLOUD_SQL_POSTGRES_PROJECT", "your_pg_project") - t.Setenv("CLOUD_SQL_POSTGRES_INSTANCE", "your_pg_instance") - t.Setenv("CLOUD_SQL_POSTGRES_DATABASE", "your_pg_db") - t.Setenv("CLOUD_SQL_POSTGRES_REGION", "your_pg_region") - t.Setenv("CLOUD_SQL_POSTGRES_USER", "your_pg_user") - t.Setenv("CLOUD_SQL_POSTGRES_PASS", "your_pg_pass") - - t.Setenv("CLOUD_SQL_MYSQL_PROJECT", "your_gcp_project_id") - t.Setenv("CLOUD_SQL_MYSQL_REGION", "your_gcp_region") - t.Setenv("CLOUD_SQL_MYSQL_INSTANCE", "your_instance") - t.Setenv("CLOUD_SQL_MYSQL_DATABASE", "your_cloudsql_mysql_db") - t.Setenv("CLOUD_SQL_MYSQL_USER", "your_cloudsql_mysql_user") - t.Setenv("CLOUD_SQL_MYSQL_PASSWORD", "your_cloudsql_mysql_password") - - t.Setenv("CLOUD_SQL_MSSQL_PROJECT", "your_gcp_project_id") - t.Setenv("CLOUD_SQL_MSSQL_REGION", "your_gcp_region") - t.Setenv("CLOUD_SQL_MSSQL_INSTANCE", "your_cloudsql_mssql_instance") - t.Setenv("CLOUD_SQL_MSSQL_DATABASE", "your_cloudsql_mssql_db") - t.Setenv("CLOUD_SQL_MSSQL_IP_ADDRESS", "127.0.0.1") - t.Setenv("CLOUD_SQL_MSSQL_USER", "your_cloudsql_mssql_user") - t.Setenv("CLOUD_SQL_MSSQL_PASSWORD", "your_cloudsql_mssql_password") - t.Setenv("CLOUD_SQL_POSTGRES_PASSWORD", "your_cloudsql_pg_password") - - t.Setenv("SERVERLESS_SPARK_PROJECT", "your_gcp_project_id") - t.Setenv("SERVERLESS_SPARK_LOCATION", "your_gcp_location") - - t.Setenv("POSTGRES_HOST", "localhost") - t.Setenv("POSTGRES_PORT", "5432") - t.Setenv("POSTGRES_DATABASE", "your_postgres_db") - t.Setenv("POSTGRES_USER", "your_postgres_user") - t.Setenv("POSTGRES_PASSWORD", "your_postgres_password") - - t.Setenv("MYSQL_HOST", "localhost") - t.Setenv("MYSQL_PORT", "3306") - t.Setenv("MYSQL_DATABASE", "your_mysql_db") - t.Setenv("MYSQL_USER", "your_mysql_user") - t.Setenv("MYSQL_PASSWORD", "your_mysql_password") - - t.Setenv("MSSQL_HOST", "localhost") - t.Setenv("MSSQL_PORT", "1433") - t.Setenv("MSSQL_DATABASE", "your_mssql_db") - t.Setenv("MSSQL_USER", "your_mssql_user") - t.Setenv("MSSQL_PASSWORD", "your_mssql_password") - - t.Setenv("MINDSDB_HOST", "localhost") - t.Setenv("MINDSDB_PORT", "47334") - t.Setenv("MINDSDB_DATABASE", "your_mindsdb_db") - t.Setenv("MINDSDB_USER", "your_mindsdb_user") - t.Setenv("MINDSDB_PASS", "your_mindsdb_password") - - t.Setenv("LOOKER_BASE_URL", "https://your_company.looker.com") - t.Setenv("LOOKER_CLIENT_ID", "your_looker_client_id") - t.Setenv("LOOKER_CLIENT_SECRET", "your_looker_client_secret") - t.Setenv("LOOKER_VERIFY_SSL", "true") - - t.Setenv("LOOKER_PROJECT", "your_project_id") - t.Setenv("LOOKER_LOCATION", "us") - - t.Setenv("SQLITE_DATABASE", "test.db") - - t.Setenv("NEO4J_URI", "bolt://localhost:7687") - t.Setenv("NEO4J_DATABASE", "neo4j") - t.Setenv("NEO4J_USERNAME", "your_neo4j_user") - t.Setenv("NEO4J_PASSWORD", "your_neo4j_password") - - t.Setenv("CLOUD_HEALTHCARE_PROJECT", "your_gcp_project_id") - t.Setenv("CLOUD_HEALTHCARE_REGION", "your_gcp_region") - t.Setenv("CLOUD_HEALTHCARE_DATASET", "your_healthcare_dataset") - - t.Setenv("SNOWFLAKE_ACCOUNT", "your_account") - t.Setenv("SNOWFLAKE_USER", "your_username") - t.Setenv("SNOWFLAKE_PASSWORD", "your_pass") - t.Setenv("SNOWFLAKE_DATABASE", "your_db") - t.Setenv("SNOWFLAKE_SCHEMA", "your_schema") - t.Setenv("SNOWFLAKE_WAREHOUSE", "your_wh") - t.Setenv("SNOWFLAKE_ROLE", "your_role") - - ctx, err := testutils.ContextWithNewLogger() - if err != nil { - t.Fatalf("unexpected error: %s", err) - } - tcs := []struct { - name string - in []byte - wantToolset server.ToolsetConfigs - }{ - { - name: "alloydb postgres admin prebuilt tools", - in: alloydb_admin_config, - wantToolset: server.ToolsetConfigs{ - "alloydb_postgres_admin_tools": tools.ToolsetConfig{ - Name: "alloydb_postgres_admin_tools", - ToolNames: []string{"create_cluster", "wait_for_operation", "create_instance", "list_clusters", "list_instances", "list_users", "create_user", "get_cluster", "get_instance", "get_user"}, - }, - }, - }, - { - name: "cloudsql pg admin prebuilt tools", - in: cloudsqlpg_admin_config, - wantToolset: server.ToolsetConfigs{ - "cloud_sql_postgres_admin_tools": tools.ToolsetConfig{ - Name: "cloud_sql_postgres_admin_tools", - ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck", "clone_instance", "create_backup", "restore_backup"}, - }, - }, - }, - { - name: "cloudsql mysql admin prebuilt tools", - in: cloudsqlmysql_admin_config, - wantToolset: server.ToolsetConfigs{ - "cloud_sql_mysql_admin_tools": tools.ToolsetConfig{ - Name: "cloud_sql_mysql_admin_tools", - ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup", "restore_backup"}, - }, - }, - }, - { - name: "cloudsql mssql admin prebuilt tools", - in: cloudsqlmssql_admin_config, - wantToolset: server.ToolsetConfigs{ - "cloud_sql_mssql_admin_tools": tools.ToolsetConfig{ - Name: "cloud_sql_mssql_admin_tools", - ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup", "restore_backup"}, - }, - }, - }, - { - name: "alloydb prebuilt tools", - in: alloydb_config, - wantToolset: server.ToolsetConfigs{ - "alloydb_postgres_database_tools": tools.ToolsetConfig{ - Name: "alloydb_postgres_database_tools", - ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats", "list_stored_procedure"}, - }, - }, - }, - { - name: "bigquery prebuilt tools", - in: bigquery_config, - wantToolset: server.ToolsetConfigs{ - "bigquery_database_tools": tools.ToolsetConfig{ - Name: "bigquery_database_tools", - ToolNames: []string{"analyze_contribution", "ask_data_insights", "execute_sql", "forecast", "get_dataset_info", "get_table_info", "list_dataset_ids", "list_table_ids", "search_catalog"}, - }, - }, - }, - { - name: "clickhouse prebuilt tools", - in: clickhouse_config, - wantToolset: server.ToolsetConfigs{ - "clickhouse_database_tools": tools.ToolsetConfig{ - Name: "clickhouse_database_tools", - ToolNames: []string{"execute_sql", "list_databases", "list_tables"}, - }, - }, - }, - { - name: "cloudsqlpg prebuilt tools", - in: cloudsqlpg_config, - wantToolset: server.ToolsetConfigs{ - "cloud_sql_postgres_database_tools": tools.ToolsetConfig{ - Name: "cloud_sql_postgres_database_tools", - ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats", "list_stored_procedure"}, - }, - }, - }, - { - name: "cloudsqlmysql prebuilt tools", - in: cloudsqlmysql_config, - wantToolset: server.ToolsetConfigs{ - "cloud_sql_mysql_database_tools": tools.ToolsetConfig{ - Name: "cloud_sql_mysql_database_tools", - ToolNames: []string{"execute_sql", "list_tables", "get_query_plan", "list_active_queries", "list_tables_missing_unique_indexes", "list_table_fragmentation"}, - }, - }, - }, - { - name: "cloudsqlmssql prebuilt tools", - in: cloudsqlmssql_config, - wantToolset: server.ToolsetConfigs{ - "cloud_sql_mssql_database_tools": tools.ToolsetConfig{ - Name: "cloud_sql_mssql_database_tools", - ToolNames: []string{"execute_sql", "list_tables"}, - }, - }, - }, - { - name: "dataplex prebuilt tools", - in: dataplex_config, - wantToolset: server.ToolsetConfigs{ - "dataplex_tools": tools.ToolsetConfig{ - Name: "dataplex_tools", - ToolNames: []string{"search_entries", "lookup_entry", "search_aspect_types"}, - }, - }, - }, - { - name: "serverless spark prebuilt tools", - in: serverless_spark_config, - wantToolset: server.ToolsetConfigs{ - "serverless_spark_tools": tools.ToolsetConfig{ - Name: "serverless_spark_tools", - ToolNames: []string{"list_batches", "get_batch", "cancel_batch", "create_pyspark_batch", "create_spark_batch"}, - }, - }, - }, - { - name: "firestore prebuilt tools", - in: firestoreconfig, - wantToolset: server.ToolsetConfigs{ - "firestore_database_tools": tools.ToolsetConfig{ - Name: "firestore_database_tools", - ToolNames: []string{"get_documents", "add_documents", "update_document", "list_collections", "delete_documents", "query_collection", "get_rules", "validate_rules"}, - }, - }, - }, - { - name: "mysql prebuilt tools", - in: mysql_config, - wantToolset: server.ToolsetConfigs{ - "mysql_database_tools": tools.ToolsetConfig{ - Name: "mysql_database_tools", - ToolNames: []string{"execute_sql", "list_tables", "get_query_plan", "list_active_queries", "list_tables_missing_unique_indexes", "list_table_fragmentation"}, - }, - }, - }, - { - name: "mssql prebuilt tools", - in: mssql_config, - wantToolset: server.ToolsetConfigs{ - "mssql_database_tools": tools.ToolsetConfig{ - Name: "mssql_database_tools", - ToolNames: []string{"execute_sql", "list_tables"}, - }, - }, - }, - { - name: "looker prebuilt tools", - in: looker_config, - wantToolset: server.ToolsetConfigs{ - "looker_tools": tools.ToolsetConfig{ - Name: "looker_tools", - ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look", "make_look", "get_dashboards", "run_dashboard", "make_dashboard", "add_dashboard_element", "add_dashboard_filter", "generate_embed_url", "health_pulse", "health_analyze", "health_vacuum", "dev_mode", "get_projects", "get_project_files", "get_project_file", "create_project_file", "update_project_file", "delete_project_file", "get_connections", "get_connection_schemas", "get_connection_databases", "get_connection_tables", "get_connection_table_columns"}, - }, - }, - }, - { - name: "looker-conversational-analytics prebuilt tools", - in: lookerca_config, - wantToolset: server.ToolsetConfigs{ - "looker_conversational_analytics_tools": tools.ToolsetConfig{ - Name: "looker_conversational_analytics_tools", - ToolNames: []string{"ask_data_insights", "get_models", "get_explores"}, - }, - }, - }, - { - name: "postgres prebuilt tools", - in: postgresconfig, - wantToolset: server.ToolsetConfigs{ - "postgres_database_tools": tools.ToolsetConfig{ - Name: "postgres_database_tools", - ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats", "list_stored_procedure"}, - }, - }, - }, - { - name: "spanner prebuilt tools", - in: spanner_config, - wantToolset: server.ToolsetConfigs{ - "spanner-database-tools": tools.ToolsetConfig{ - Name: "spanner-database-tools", - ToolNames: []string{"execute_sql", "execute_sql_dql", "list_tables", "list_graphs"}, - }, - }, - }, - { - name: "spanner pg prebuilt tools", - in: spannerpg_config, - wantToolset: server.ToolsetConfigs{ - "spanner_postgres_database_tools": tools.ToolsetConfig{ - Name: "spanner_postgres_database_tools", - ToolNames: []string{"execute_sql", "execute_sql_dql", "list_tables"}, - }, - }, - }, - { - name: "mindsdb prebuilt tools", - in: mindsdb_config, - wantToolset: server.ToolsetConfigs{ - "mindsdb-tools": tools.ToolsetConfig{ - Name: "mindsdb-tools", - ToolNames: []string{"mindsdb-execute-sql", "mindsdb-sql"}, - }, - }, - }, - { - name: "sqlite prebuilt tools", - in: sqlite_config, - wantToolset: server.ToolsetConfigs{ - "sqlite_database_tools": tools.ToolsetConfig{ - Name: "sqlite_database_tools", - ToolNames: []string{"execute_sql", "list_tables"}, - }, - }, - }, - { - name: "neo4j prebuilt tools", - in: neo4jconfig, - wantToolset: server.ToolsetConfigs{ - "neo4j_database_tools": tools.ToolsetConfig{ - Name: "neo4j_database_tools", - ToolNames: []string{"execute_cypher", "get_schema"}, - }, - }, - }, - { - name: "alloydb postgres observability prebuilt tools", - in: alloydbobsvconfig, - wantToolset: server.ToolsetConfigs{ - "alloydb_postgres_cloud_monitoring_tools": tools.ToolsetConfig{ - Name: "alloydb_postgres_cloud_monitoring_tools", - ToolNames: []string{"get_system_metrics", "get_query_metrics"}, - }, - }, - }, - { - name: "cloudsql postgres observability prebuilt tools", - in: cloudsqlpgobsvconfig, - wantToolset: server.ToolsetConfigs{ - "cloud_sql_postgres_cloud_monitoring_tools": tools.ToolsetConfig{ - Name: "cloud_sql_postgres_cloud_monitoring_tools", - ToolNames: []string{"get_system_metrics", "get_query_metrics"}, - }, - }, - }, - { - name: "cloudsql mysql observability prebuilt tools", - in: cloudsqlmysqlobsvconfig, - wantToolset: server.ToolsetConfigs{ - "cloud_sql_mysql_cloud_monitoring_tools": tools.ToolsetConfig{ - Name: "cloud_sql_mysql_cloud_monitoring_tools", - ToolNames: []string{"get_system_metrics", "get_query_metrics"}, - }, - }, - }, - { - name: "cloudsql mssql observability prebuilt tools", - in: cloudsqlmssqlobsvconfig, - wantToolset: server.ToolsetConfigs{ - "cloud_sql_mssql_cloud_monitoring_tools": tools.ToolsetConfig{ - Name: "cloud_sql_mssql_cloud_monitoring_tools", - ToolNames: []string{"get_system_metrics"}, - }, - }, - }, - { - name: "cloud healthcare prebuilt tools", - in: cloudhealthcare_config, - wantToolset: server.ToolsetConfigs{ - "cloud_healthcare_dataset_tools": tools.ToolsetConfig{ - Name: "cloud_healthcare_dataset_tools", - ToolNames: []string{"get_dataset", "list_dicom_stores", "list_fhir_stores"}, - }, - "cloud_healthcare_fhir_tools": tools.ToolsetConfig{ - Name: "cloud_healthcare_fhir_tools", - ToolNames: []string{"get_fhir_store", "get_fhir_store_metrics", "get_fhir_resource", "fhir_patient_search", "fhir_patient_everything", "fhir_fetch_page"}, - }, - "cloud_healthcare_dicom_tools": tools.ToolsetConfig{ - Name: "cloud_healthcare_dicom_tools", - ToolNames: []string{"get_dicom_store", "get_dicom_store_metrics", "search_dicom_studies", "search_dicom_series", "search_dicom_instances", "retrieve_rendered_dicom_instance"}, - }, - }, - }, - { - name: "Snowflake prebuilt tool", - in: snowflake_config, - wantToolset: server.ToolsetConfigs{ - "snowflake_tools": tools.ToolsetConfig{ - Name: "snowflake_tools", - ToolNames: []string{"execute_sql", "list_tables"}, - }, - }, - }, - } - - for _, tc := range tcs { - t.Run(tc.name, func(t *testing.T) { - toolsFile, err := parseToolsFile(ctx, tc.in) - if err != nil { - t.Fatalf("failed to parse input: %v", err) - } - if diff := cmp.Diff(tc.wantToolset, toolsFile.Toolsets); diff != "" { - t.Fatalf("incorrect tools parse: diff %v", diff) - } - // Prebuilt configs do not have prompts, so assert empty maps. - if len(toolsFile.Prompts) != 0 { - t.Fatalf("expected empty prompts map for prebuilt config, got: %v", toolsFile.Prompts) - } - }) - } -} - func TestMutuallyExclusiveFlags(t *testing.T) { testCases := []struct { desc string @@ -1834,7 +638,9 @@ func TestMutuallyExclusiveFlags(t *testing.T) { for _, tc := range testCases { t.Run(tc.desc, func(t *testing.T) { - cmd := NewCommand() + buf := new(bytes.Buffer) + opts := internal.NewToolboxOptions(internal.WithIOStreams(buf, buf)) + cmd := NewCommand(opts) cmd.SetArgs(tc.args) err := cmd.Execute() if err == nil { @@ -1849,7 +655,9 @@ func TestMutuallyExclusiveFlags(t *testing.T) { func TestFileLoadingErrors(t *testing.T) { t.Run("non-existent tools-file", func(t *testing.T) { - cmd := NewCommand() + buf := new(bytes.Buffer) + opts := internal.NewToolboxOptions(internal.WithIOStreams(buf, buf)) + cmd := NewCommand(opts) // Use a file that is guaranteed not to exist nonExistentFile := filepath.Join(t.TempDir(), "non-existent-tools.yaml") cmd.SetArgs([]string{"--tools-file", nonExistentFile}) @@ -1864,7 +672,9 @@ func TestFileLoadingErrors(t *testing.T) { }) t.Run("non-existent tools-folder", func(t *testing.T) { - cmd := NewCommand() + buf := new(bytes.Buffer) + opts := internal.NewToolboxOptions(internal.WithIOStreams(buf, buf)) + cmd := NewCommand(opts) nonExistentFolder := filepath.Join(t.TempDir(), "non-existent-folder") cmd.SetArgs([]string{"--tools-folder", nonExistentFolder}) @@ -1878,109 +688,22 @@ func TestFileLoadingErrors(t *testing.T) { }) } -func TestMergeToolsFiles(t *testing.T) { - file1 := ToolsFile{ - Sources: server.SourceConfigs{"source1": httpsrc.Config{Name: "source1"}}, - Tools: server.ToolConfigs{"tool1": http.Config{Name: "tool1"}}, - Toolsets: server.ToolsetConfigs{"set1": tools.ToolsetConfig{Name: "set1"}}, - EmbeddingModels: server.EmbeddingModelConfigs{"model1": gemini.Config{Name: "gemini-text"}}, - } - file2 := ToolsFile{ - AuthServices: server.AuthServiceConfigs{"auth1": google.Config{Name: "auth1"}}, - Tools: server.ToolConfigs{"tool2": http.Config{Name: "tool2"}}, - Toolsets: server.ToolsetConfigs{"set2": tools.ToolsetConfig{Name: "set2"}}, - } - fileWithConflicts := ToolsFile{ - Sources: server.SourceConfigs{"source1": httpsrc.Config{Name: "source1"}}, - Tools: server.ToolConfigs{"tool2": http.Config{Name: "tool2"}}, - } - - testCases := []struct { - name string - files []ToolsFile - want ToolsFile - wantErr bool - }{ - { - name: "merge two distinct files", - files: []ToolsFile{file1, file2}, - want: ToolsFile{ - Sources: server.SourceConfigs{"source1": httpsrc.Config{Name: "source1"}}, - AuthServices: server.AuthServiceConfigs{"auth1": google.Config{Name: "auth1"}}, - Tools: server.ToolConfigs{"tool1": http.Config{Name: "tool1"}, "tool2": http.Config{Name: "tool2"}}, - Toolsets: server.ToolsetConfigs{"set1": tools.ToolsetConfig{Name: "set1"}, "set2": tools.ToolsetConfig{Name: "set2"}}, - Prompts: server.PromptConfigs{}, - EmbeddingModels: server.EmbeddingModelConfigs{"model1": gemini.Config{Name: "gemini-text"}}, - }, - wantErr: false, - }, - { - name: "merge with conflicts", - files: []ToolsFile{file1, file2, fileWithConflicts}, - wantErr: true, - }, - { - name: "merge single file", - files: []ToolsFile{file1}, - want: ToolsFile{ - Sources: file1.Sources, - AuthServices: make(server.AuthServiceConfigs), - EmbeddingModels: server.EmbeddingModelConfigs{"model1": gemini.Config{Name: "gemini-text"}}, - Tools: file1.Tools, - Toolsets: file1.Toolsets, - Prompts: server.PromptConfigs{}, - }, - }, - { - name: "merge empty list", - files: []ToolsFile{}, - want: ToolsFile{ - Sources: make(server.SourceConfigs), - AuthServices: make(server.AuthServiceConfigs), - EmbeddingModels: make(server.EmbeddingModelConfigs), - Tools: make(server.ToolConfigs), - Toolsets: make(server.ToolsetConfigs), - Prompts: server.PromptConfigs{}, - }, - }, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - got, err := mergeToolsFiles(tc.files...) - if (err != nil) != tc.wantErr { - t.Fatalf("mergeToolsFiles() error = %v, wantErr %v", err, tc.wantErr) - } - if !tc.wantErr { - if diff := cmp.Diff(tc.want, got); diff != "" { - t.Errorf("mergeToolsFiles() mismatch (-want +got):\n%s", diff) - } - } else { - if err == nil { - t.Fatal("expected an error for conflicting files but got none") - } - if !strings.Contains(err.Error(), "resource conflicts detected") { - t.Errorf("expected conflict error, but got: %v", err) - } - } - }) - } -} func TestPrebuiltAndCustomTools(t *testing.T) { t.Setenv("SQLITE_DATABASE", "test.db") // Setup custom tools file customContent := ` -tools: - custom_tool: - kind: http - source: my-http - method: GET - path: / - description: "A custom tool for testing" -sources: - my-http: - kind: http - baseUrl: http://example.com +kind: tools +name: custom_tool +type: http +source: my-http +method: GET +path: / +description: "A custom tool for testing" +--- +kind: sources +name: my-http +type: http +baseUrl: http://example.com ` customFile := filepath.Join(t.TempDir(), "custom.yaml") if err := os.WriteFile(customFile, []byte(customContent), 0644); err != nil { @@ -1990,17 +713,18 @@ sources: // Tool Conflict File // SQLite prebuilt has a tool named 'list_tables' toolConflictContent := ` -tools: - list_tables: - kind: http - source: my-http - method: GET - path: / - description: "Conflicting tool" -sources: - my-http: - kind: http - baseUrl: http://example.com +kind: tools +name: list_tables +type: http +source: my-http +method: GET +path: / +description: "Conflicting tool" +--- +kind: sources +name: my-http +type: http +baseUrl: http://example.com ` toolConflictFile := filepath.Join(t.TempDir(), "tool_conflict.yaml") if err := os.WriteFile(toolConflictFile, []byte(toolConflictContent), 0644); err != nil { @@ -2010,17 +734,18 @@ sources: // Source Conflict File // SQLite prebuilt has a source named 'sqlite-source' sourceConflictContent := ` -sources: - sqlite-source: - kind: http - baseUrl: http://example.com -tools: - dummy_tool: - kind: http - source: sqlite-source - method: GET - path: / - description: "Dummy" +kind: sources +name: sqlite-source +type: http +baseUrl: http://example.com +--- +kind: tools +name: dummy_tool +type: http +source: sqlite-source +method: GET +path: / +description: "Dummy" ` sourceConflictFile := filepath.Join(t.TempDir(), "source_conflict.yaml") if err := os.WriteFile(sourceConflictFile, []byte(sourceConflictContent), 0644); err != nil { @@ -2030,20 +755,23 @@ tools: // Toolset Conflict File // SQLite prebuilt has a toolset named 'sqlite_database_tools' toolsetConflictContent := ` -sources: - dummy-src: - kind: http - baseUrl: http://example.com +kind: sources +name: dummy-src +type: http +baseUrl: http://example.com +--- +kind: tools +name: dummy_tool +type: http +source: dummy-src +method: GET +path: / +description: "Dummy" +--- +kind: toolsets +name: sqlite_database_tools tools: - dummy_tool: - kind: http - source: dummy-src - method: GET - path: / - description: "Dummy" -toolsets: - sqlite_database_tools: - - dummy_tool +- dummy_tool ` toolsetConflictFile := filepath.Join(t.TempDir(), "toolset_conflict.yaml") if err := os.WriteFile(toolsetConflictFile, []byte(toolsetConflictContent), 0644); err != nil { @@ -2125,7 +853,7 @@ authSources: ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond) defer cancel() - cmd, output, err := invokeCommandWithContext(ctx, tc.args) + _, opts, output, err := invokeCommandWithContext(ctx, tc.args) if tc.wantErr { if err == nil { @@ -2142,7 +870,7 @@ authSources: t.Errorf("server did not start successfully (no ready message found). Output:\n%s", output) } if tc.cfgCheck != nil { - if err := tc.cfgCheck(cmd.cfg); err != nil { + if err := tc.cfgCheck(opts.Cfg); err != nil { t.Errorf("config check failed: %v", err) } } @@ -2176,7 +904,7 @@ func TestDefaultToolsFileBehavior(t *testing.T) { t.Run(tc.desc, func(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond) defer cancel() - _, output, err := invokeCommandWithContext(ctx, tc.args) + _, _, output, err := invokeCommandWithContext(ctx, tc.args) if tc.expectRun { if err != nil && err != context.DeadlineExceeded && err != context.Canceled { @@ -2198,114 +926,29 @@ func TestDefaultToolsFileBehavior(t *testing.T) { } } -func TestParameterReferenceValidation(t *testing.T) { - ctx, err := testutils.ContextWithNewLogger() - if err != nil { - t.Fatalf("unexpected error: %s", err) - } +func TestSubcommandWiring(t *testing.T) { + buf := new(bytes.Buffer) + opts := internal.NewToolboxOptions(internal.WithIOStreams(buf, buf)) + baseCmd := NewCommand(opts) - // Base template - baseYaml := ` -sources: - dummy-source: - kind: http - baseUrl: http://example.com -tools: - test-tool: - kind: postgres-sql - source: dummy-source - description: test tool - statement: SELECT 1; - parameters: -%s` - - tcs := []struct { - desc string - params string - wantErr bool - errSubstr string + tests := []struct { + args []string + expectedName string }{ - { - desc: "valid backward reference", - params: ` - - name: source_param - type: string - description: source - - name: copy_param - type: string - description: copy - valueFromParam: source_param`, - wantErr: false, - }, - { - desc: "valid forward reference (out of order)", - params: ` - - name: copy_param - type: string - description: copy - valueFromParam: source_param - - name: source_param - type: string - description: source`, - wantErr: false, - }, - { - desc: "invalid missing reference", - params: ` - - name: copy_param - type: string - description: copy - valueFromParam: non_existent_param`, - wantErr: true, - errSubstr: "references '\"non_existent_param\"' in the 'valueFromParam' field", - }, - { - desc: "invalid self reference", - params: ` - - name: myself - type: string - description: self - valueFromParam: myself`, - wantErr: true, - errSubstr: "parameter \"myself\" cannot copy value from itself", - }, - { - desc: "multiple valid references", - params: ` - - name: a - type: string - description: a - - name: b - type: string - description: b - valueFromParam: a - - name: c - type: string - description: c - valueFromParam: a`, - wantErr: false, - }, + {[]string{"invoke"}, "invoke"}, + {[]string{"skills-generate"}, "skills-generate"}, } - for _, tc := range tcs { - t.Run(tc.desc, func(t *testing.T) { - // Indent parameters to match YAML structure - yamlContent := fmt.Sprintf(baseYaml, tc.params) + for _, tc := range tests { + // Find returns the Command struct and the remaining args + cmd, _, err := baseCmd.Find(tc.args) - _, err := parseToolsFile(ctx, []byte(yamlContent)) + if err != nil { + t.Fatalf("Failed to find command %v: %v", tc.args, err) + } - if tc.wantErr { - if err == nil { - t.Fatal("expected error, got nil") - } - if !strings.Contains(err.Error(), tc.errSubstr) { - t.Errorf("error %q does not contain expected substring %q", err.Error(), tc.errSubstr) - } - } else { - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - } - }) + if cmd.Name() != tc.expectedName { + t.Errorf("Expected command name %q, got %q", tc.expectedName, cmd.Name()) + } } } diff --git a/cmd/version.txt b/cmd/version.txt index 4e8f395fa5..1b58cc1018 100644 --- a/cmd/version.txt +++ b/cmd/version.txt @@ -1 +1 @@ -0.26.0 +0.27.0 diff --git a/docs/ALLOYDBADMIN_README.md b/docs/ALLOYDBADMIN_README.md index 489dfb0cea..bea3f66ef1 100644 --- a/docs/ALLOYDBADMIN_README.md +++ b/docs/ALLOYDBADMIN_README.md @@ -23,6 +23,14 @@ To connect to the database to explore and query data, search the MCP store for t In the Antigravity MCP Store, click the "Install" button. +> [!NOTE] +> On first use, the installation process automatically downloads and uses +> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server) +> `>=0.26.0`. To update MCP Toolbox, use: +> ```npm i -g @toolbox-sdk/server@latest``` +> To always run the latest version, update the MCP server configuration to use: +> ```npx -y @toolbox-sdk/server@latest --prebuilt alloydb-postgres-admin```. + You'll now be able to see all enabled tools in the "Tools" tab. > [!NOTE] diff --git a/docs/ALLOYDBPG_README.md b/docs/ALLOYDBPG_README.md index 2e03b785d3..881aa5b354 100644 --- a/docs/ALLOYDBPG_README.md +++ b/docs/ALLOYDBPG_README.md @@ -27,6 +27,13 @@ For AlloyDB infrastructure management, search the MCP store for the AlloyDB for ## Install & Configuration 1. In the Antigravity MCP Store, click the "Install" button. + > [!NOTE] + > On first use, the installation process automatically downloads and uses + > [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server) + > `>=0.26.0`. To update MCP Toolbox, use: + > ```npm i -g @toolbox-sdk/server@latest``` + > To always run the latest version, update the MCP server configuration to use: + > ```npx -y @toolbox-sdk/server@latest --prebuilt alloydb-postgres```. 2. Add the required inputs for your [cluster](https://docs.cloud.google.com/alloydb/docs/cluster-list) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab. diff --git a/docs/BIGQUERY_README.md b/docs/BIGQUERY_README.md index dbc96564df..ad43da9b2f 100644 --- a/docs/BIGQUERY_README.md +++ b/docs/BIGQUERY_README.md @@ -21,6 +21,13 @@ An editor configured to use the BigQuery MCP server can use its AI capabilities ## Install & Configuration 1. In the Antigravity MCP Store, click the "Install" button. + > [!NOTE] + > On first use, the installation process automatically downloads and uses + > [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server) + > `>=0.26.0`. To update MCP Toolbox, use: + > ```npm i -g @toolbox-sdk/server@latest``` + > To always run the latest version, update the MCP server configuration to use: + > ```npx -y @toolbox-sdk/server@latest --prebuilt bigquery```. 2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab. diff --git a/docs/CLOUDSQLMSSQLADMIN_README.md b/docs/CLOUDSQLMSSQLADMIN_README.md index 69130434aa..51829bde22 100644 --- a/docs/CLOUDSQLMSSQLADMIN_README.md +++ b/docs/CLOUDSQLMSSQLADMIN_README.md @@ -23,6 +23,14 @@ To connect to the database to explore and query data, search the MCP store for t In the Antigravity MCP Store, click the "Install" button. +> [!NOTE] +> On first use, the installation process automatically downloads and uses +> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server) +> `>=0.26.0`. To update MCP Toolbox, use: +> ```npm i -g @toolbox-sdk/server@latest``` +> To always run the latest version, update the MCP server configuration to use: +> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mssql-admin```. + You'll now be able to see all enabled tools in the "Tools" tab. > [!NOTE] diff --git a/docs/CLOUDSQLMSSQL_README.md b/docs/CLOUDSQLMSSQL_README.md index 9b1385f8b8..bcd2b12735 100644 --- a/docs/CLOUDSQLMSSQL_README.md +++ b/docs/CLOUDSQLMSSQL_README.md @@ -24,6 +24,13 @@ For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL ## Install & Configuration 1. In the Antigravity MCP Store, click the "Install" button. + > [!NOTE] + > On first use, the installation process automatically downloads and uses + > [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server) + > `>=0.26.0`. To update MCP Toolbox, use: + > ```npm i -g @toolbox-sdk/server@latest``` + > To always run the latest version, update the MCP server configuration to use: + > ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mssql```. 2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/sqlserver/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab. diff --git a/docs/CLOUDSQLMYSQLADMIN_README.md b/docs/CLOUDSQLMYSQLADMIN_README.md index 5ab9258a21..83122376b9 100644 --- a/docs/CLOUDSQLMYSQLADMIN_README.md +++ b/docs/CLOUDSQLMYSQLADMIN_README.md @@ -23,6 +23,14 @@ To connect to the database to explore and query data, search the MCP store for t In the Antigravity MCP Store, click the "Install" button. +> [!NOTE] +> On first use, the installation process automatically downloads and uses +> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server) +> `>=0.26.0`. To update MCP Toolbox, use: +> ```npm i -g @toolbox-sdk/server@latest``` +> To always run the latest version, update the MCP server configuration to use: +> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mysql-admin```. + You'll now be able to see all enabled tools in the "Tools" tab. > [!NOTE] diff --git a/docs/CLOUDSQLMYSQL_README.md b/docs/CLOUDSQLMYSQL_README.md index 4eb5ee1975..f7f4c1d8c1 100644 --- a/docs/CLOUDSQLMYSQL_README.md +++ b/docs/CLOUDSQLMYSQL_README.md @@ -26,6 +26,13 @@ For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL ## Install & Configuration 1. In the Antigravity MCP Store, click the "Install" button. + > [!NOTE] + > On first use, the installation process automatically downloads and uses + > [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server) + > `>=0.26.0`. To update MCP Toolbox, use: + > ```npm i -g @toolbox-sdk/server@latest``` + > To always run the latest version, update the MCP server configuration to use: + > ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mysql```. 2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/mysql/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab. diff --git a/docs/CLOUDSQLPGADMIN_README.md b/docs/CLOUDSQLPGADMIN_README.md index c1b594ea49..530348c805 100644 --- a/docs/CLOUDSQLPGADMIN_README.md +++ b/docs/CLOUDSQLPGADMIN_README.md @@ -23,6 +23,14 @@ To connect to the database to explore and query data, search the MCP store for t In the Antigravity MCP Store, click the "Install" button. +> [!NOTE] +> On first use, the installation process automatically downloads and uses +> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server) +> `>=0.26.0`. To update MCP Toolbox, use: +> ```npm i -g @toolbox-sdk/server@latest``` +> To always run the latest version, update the MCP server configuration to use: +> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-postgres-admin```. + You'll now be able to see all enabled tools in the "Tools" tab. > [!NOTE] diff --git a/docs/CLOUDSQLPG_README.md b/docs/CLOUDSQLPG_README.md index a30bd002b8..eaeb5f8e5f 100644 --- a/docs/CLOUDSQLPG_README.md +++ b/docs/CLOUDSQLPG_README.md @@ -26,6 +26,13 @@ For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL ## Install & Configuration 1. In the Antigravity MCP Store, click the "Install" button. + > [!NOTE] + > On first use, the installation process automatically downloads and uses + > [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server) + > `>=0.26.0`. To update MCP Toolbox, use: + > ```npm i -g @toolbox-sdk/server@latest``` + > To always run the latest version, update the MCP server configuration to use: + > ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-postgres```. 2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/postgres/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab. diff --git a/docs/DATAPLEX_README.md b/docs/DATAPLEX_README.md index f40f4eb909..cd593e4c38 100644 --- a/docs/DATAPLEX_README.md +++ b/docs/DATAPLEX_README.md @@ -20,6 +20,13 @@ An editor configured to use the Dataplex MCP server can use its AI capabilities ## Install & Configuration 1. In the Antigravity MCP Store, click the "Install" button. + > [!NOTE] + > On first use, the installation process automatically downloads and uses + > [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server) + > `>=0.26.0`. To update MCP Toolbox, use: + > ```npm i -g @toolbox-sdk/server@latest``` + > To always run the latest version, update the MCP server configuration to use: + > ```npx -y @toolbox-sdk/server@latest --prebuilt dataplex```. 2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab. diff --git a/docs/LOOKER_README.md b/docs/LOOKER_README.md index e1fc6bab24..5283c3d14d 100644 --- a/docs/LOOKER_README.md +++ b/docs/LOOKER_README.md @@ -21,6 +21,13 @@ An editor configured to use the Looker MCP server can use its AI capabilities to ## Install & Configuration 1. In the Antigravity MCP Store, click the "Install" button. + > [!NOTE] + > On first use, the installation process automatically downloads and uses + > [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server) + > `>=0.26.0`. To update MCP Toolbox, use: + > ```npm i -g @toolbox-sdk/server@latest``` + > To always run the latest version, update the MCP server configuration to use: + > ```npx -y @toolbox-sdk/server@latest --prebuilt looker```. 2. Add the required inputs for your [instance](https://docs.cloud.google.com/looker/docs/set-up-and-administer-looker) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab. diff --git a/docs/SPANNER_README.md b/docs/SPANNER_README.md index f4bf32cd19..646699bf84 100644 --- a/docs/SPANNER_README.md +++ b/docs/SPANNER_README.md @@ -21,6 +21,13 @@ An editor configured to use the Cloud Spanner MCP server can use its AI capabili ## Install & Configuration 1. In the Antigravity MCP Store, click the "Install" button. + > [!NOTE] + > On first use, the installation process automatically downloads and uses + > [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server) + > `>=0.26.0`. To update MCP Toolbox, use: + > ```npm i -g @toolbox-sdk/server@latest``` + > To always run the latest version, update the MCP server configuration to use: + > ```npx -y @toolbox-sdk/server@latest --prebuilt spanner```. 2. Add the required inputs for your [instance](https://docs.cloud.google.com/spanner/docs/instances) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab. diff --git a/docs/TOOLBOX_README.md b/docs/TOOLBOX_README.md index 170c71e183..3e7c0cd972 100644 --- a/docs/TOOLBOX_README.md +++ b/docs/TOOLBOX_README.md @@ -12,10 +12,17 @@ The MCP Toolbox for Databases Server gives AI-powered development tools the abil ## Install & Configuration 1. In the Antigravity MCP Store, click the **Install** button. A configuration window will appear. + > [!NOTE] + > On first use, the installation process automatically downloads and uses + > [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server) + > `>=0.26.0`. To update MCP Toolbox, use: + > ```npm i -g @toolbox-sdk/server@latest``` + > To always run the latest version, update the MCP server configuration to use: + > ```npx -y @toolbox-sdk/server@latest```. -2. Create your [`tools.yaml` configuration file](https://googleapis.github.io/genai-toolbox/getting-started/configure/). +3. Create your [`tools.yaml` configuration file](https://googleapis.github.io/genai-toolbox/getting-started/configure/). -3. In the configuration window, enter the full absolute path to your `tools.yaml` file and click **Save**. +4. In the configuration window, enter the full absolute path to your `tools.yaml` file and click **Save**. > [!NOTE] > If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details. diff --git a/docs/en/about/faq.md b/docs/en/about/faq.md index 683c49c6b0..5ab557e099 100644 --- a/docs/en/about/faq.md +++ b/docs/en/about/faq.md @@ -45,7 +45,7 @@ most popular issues, so make sure to +1 ones you are the most interested in. ## Can Toolbox be used for non-database tools? **Yes!** While Toolbox is primarily focused on databases, it also supports generic -**HTTP tools** (`kind: http`). These allow you to connect your agents to REST APIs +**HTTP tools** (`type: http`). These allow you to connect your agents to REST APIs and other web services, enabling workflows that extend beyond database interactions. For configuration details, see the [HTTP Tools documentation](../resources/tools/http/http.md). diff --git a/docs/en/concepts/telemetry/index.md b/docs/en/concepts/telemetry/index.md index 49b7c9edca..9bd1598052 100644 --- a/docs/en/concepts/telemetry/index.md +++ b/docs/en/concepts/telemetry/index.md @@ -64,7 +64,7 @@ The structured logging outputs log as JSON: "timestamp":"2024-11-04T16:45:11.987299-08:00", "severity":"ERROR", "logging.googleapis.com/sourceLocation":{...}, - "message":"unable to parse tool file at \"tools.yaml\": \"cloud-sql-postgres1\" is not a valid kind of data source" + "message":"unable to parse tool file at \"tools.yaml\": \"cloud-sql-postgres1\" is not a valid type of data source" } ``` diff --git a/docs/en/getting-started/colab_quickstart.ipynb b/docs/en/getting-started/colab_quickstart.ipynb index 4260b60bb3..a12429180d 100644 --- a/docs/en/getting-started/colab_quickstart.ipynb +++ b/docs/en/getting-started/colab_quickstart.ipynb @@ -234,7 +234,7 @@ }, "outputs": [], "source": [ - "version = \"0.26.0\" # x-release-please-version\n", + "version = \"0.27.0\" # x-release-please-version\n", "! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n", "\n", "# Make the binary executable\n", @@ -300,78 +300,89 @@ "# You can also upload a tools file and use that to run toolbox.\n", "tools_file_name = \"tools.yml\"\n", "file_content = f\"\"\"\n", - "sources:\n", - " my-pg-source:\n", - " kind: postgres\n", - " host: 127.0.0.1\n", - " port: 5432\n", - " database: toolbox_db\n", - " user: toolbox_user\n", - " password: my-password\n", + "kind: sources\n", + "name: my-pg-source\n", + "type: postgres\n", + "host: 127.0.0.1\n", + "port: 5432\n", + "database: toolbox_db\n", + "user: toolbox_user\n", + "password: my-password\n", + "---\n", + "kind: tools\n", + "name: search-hotels-by-name\n", + "type: postgres-sql\n", + "source: my-pg-source\n", + "description: Search for hotels based on name.\n", + "parameters:\n", + " - name: name\n", + " type: string\n", + " description: The name of the hotel.\n", + "statement: SELECT * FROM hotels WHERE name ILIKE '%' || \\$1 || '%';\n", + "---\n", + "kind: tools\n", + "name: search-hotels-by-location\n", + "type: postgres-sql\n", + "source: my-pg-source\n", + "description: Search for hotels based on location.\n", + "parameters:\n", + " - name: location\n", + " type: string\n", + " description: The location of the hotel.\n", + "statement: SELECT * FROM hotels WHERE location ILIKE '%' || \\$1 || '%';\n", + "---\n", + "kind: tools\n", + "name: book-hotel\n", + "type: postgres-sql\n", + "source: my-pg-source\n", + "description: >-\n", + " Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.\n", + "parameters:\n", + " - name: hotel_id\n", + " type: string\n", + " description: The ID of the hotel to book.\n", + "statement: UPDATE hotels SET booked = B'1' WHERE id = \\$1;\n", + "---\n", + "kind: tools\n", + "name: update-hotel\n", + "type: postgres-sql\n", + "source: my-pg-source\n", + "description: >-\n", + " Update a hotel's check-in and check-out dates by its ID. Returns a message\n", + " indicating whether the hotel was successfully updated or not.\n", + "parameters:\n", + " - name: hotel_id\n", + " type: string\n", + " description: The ID of the hotel to update.\n", + " - name: checkin_date\n", + " type: string\n", + " description: The new check-in date of the hotel.\n", + " - name: checkout_date\n", + " type: string\n", + " description: The new check-out date of the hotel.\n", + "statement: >-\n", + " UPDATE hotels SET checkin_date = CAST(\\$2 as date), checkout_date = CAST(\\$3\n", + " as date) WHERE id = \\$1;\n", + "---\n", + "kind: tools\n", + "name: cancel-hotel\n", + "type: postgres-sql\n", + "source: my-pg-source\n", + "description: Cancel a hotel by its ID.\n", + "parameters:\n", + " - name: hotel_id\n", + " type: string\n", + " description: The ID of the hotel to cancel.\n", + "statement: UPDATE hotels SET booked = B'0' WHERE id = \\$1;\n", + "---\n", + "kind: toolsets\n", + "name: my-toolset\n", "tools:\n", - " search-hotels-by-name:\n", - " kind: postgres-sql\n", - " source: my-pg-source\n", - " description: Search for hotels based on name.\n", - " parameters:\n", - " - name: name\n", - " type: string\n", - " description: The name of the hotel.\n", - " statement: SELECT * FROM hotels WHERE name ILIKE '%' || \\$1 || '%';\n", - " search-hotels-by-location:\n", - " kind: postgres-sql\n", - " source: my-pg-source\n", - " description: Search for hotels based on location.\n", - " parameters:\n", - " - name: location\n", - " type: string\n", - " description: The location of the hotel.\n", - " statement: SELECT * FROM hotels WHERE location ILIKE '%' || \\$1 || '%';\n", - " book-hotel:\n", - " kind: postgres-sql\n", - " source: my-pg-source\n", - " description: >-\n", - " Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.\n", - " parameters:\n", - " - name: hotel_id\n", - " type: string\n", - " description: The ID of the hotel to book.\n", - " statement: UPDATE hotels SET booked = B'1' WHERE id = \\$1;\n", - " update-hotel:\n", - " kind: postgres-sql\n", - " source: my-pg-source\n", - " description: >-\n", - " Update a hotel's check-in and check-out dates by its ID. Returns a message\n", - " indicating whether the hotel was successfully updated or not.\n", - " parameters:\n", - " - name: hotel_id\n", - " type: string\n", - " description: The ID of the hotel to update.\n", - " - name: checkin_date\n", - " type: string\n", - " description: The new check-in date of the hotel.\n", - " - name: checkout_date\n", - " type: string\n", - " description: The new check-out date of the hotel.\n", - " statement: >-\n", - " UPDATE hotels SET checkin_date = CAST(\\$2 as date), checkout_date = CAST(\\$3\n", - " as date) WHERE id = \\$1;\n", - " cancel-hotel:\n", - " kind: postgres-sql\n", - " source: my-pg-source\n", - " description: Cancel a hotel by its ID.\n", - " parameters:\n", - " - name: hotel_id\n", - " type: string\n", - " description: The ID of the hotel to cancel.\n", - " statement: UPDATE hotels SET booked = B'0' WHERE id = \\$1;\n", - "toolsets:\n", - " my-toolset:\n", - " - search-hotels-by-name\n", - " - search-hotels-by-location\n", - " - book-hotel\n", - " - update-hotel\n", - " - cancel-hotel\n", + " - search-hotels-by-name\n", + " - search-hotels-by-location\n", + " - book-hotel\n", + " - update-hotel\n", + " - cancel-hotel\n", "\"\"\"" ] }, @@ -509,8 +520,7 @@ }, "outputs": [], "source": [ - "! pip install toolbox-core --quiet\n", - "! pip install google-adk --quiet" + "! pip install google-adk[toolbox] --quiet" ] }, { @@ -525,14 +535,18 @@ "from google.adk.runners import Runner\n", "from google.adk.sessions import InMemorySessionService\n", "from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService\n", + "from google.adk.tools.toolbox_toolset import ToolboxToolset\n", "from google.genai import types\n", - "from toolbox_core import ToolboxSyncClient\n", "\n", "import os\n", "# TODO(developer): replace this with your Google API key\n", "os.environ['GOOGLE_API_KEY'] = \"\"\n", "\n", - "toolbox_client = ToolboxSyncClient(\"http://127.0.0.1:5000\")\n", + "# Configure toolset\n", + "toolset = ToolboxToolset(\n", + " server_url=\"http://127.0.0.1:5000\",\n", + " toolset_name=\"my-toolset\"\n", + ")\n", "\n", "prompt = \"\"\"\n", " You're a helpful hotel assistant. You handle hotel searching, booking and\n", @@ -549,7 +563,7 @@ " name='hotel_agent',\n", " description='A helpful AI assistant.',\n", " instruction=prompt,\n", - " tools=toolbox_client.load_toolset(\"my-toolset\"),\n", + " tools=[toolset],\n", ")\n", "\n", "session_service = InMemorySessionService()\n", diff --git a/docs/en/getting-started/configure.md b/docs/en/getting-started/configure.md index 6155eb5608..e578c64dfc 100644 --- a/docs/en/getting-started/configure.md +++ b/docs/en/getting-started/configure.md @@ -36,14 +36,14 @@ Toolbox should have access to. Most tools will have at least one source to execute against. ```yaml -sources: - my-pg-source: - kind: postgres - host: 127.0.0.1 - port: 5432 - database: toolbox_db - user: ${USER_NAME} - password: ${PASSWORD} +kind: sources +name: my-pg-source +type: postgres +host: 127.0.0.1 +port: 5432 +database: toolbox_db +user: ${USER_NAME} +password: ${PASSWORD} ``` For more details on configuring different types of sources, see the @@ -52,20 +52,20 @@ For more details on configuring different types of sources, see the ### Tools The `tools` section of your `tools.yaml` defines the actions your agent can -take: what kind of tool it is, which source(s) it affects, what parameters it +take: what type of tool it is, which source(s) it affects, what parameters it uses, etc. ```yaml -tools: - search-hotels-by-name: - kind: postgres-sql - source: my-pg-source - description: Search for hotels based on name. - parameters: - - name: name - type: string - description: The name of the hotel. - statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%'; +kind: tools +name: search-hotels-by-name +type: postgres-sql +source: my-pg-source +description: Search for hotels based on name. +parameters: + - name: name + type: string + description: The name of the hotel. +statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%'; ``` For more details on configuring different types of tools, see the @@ -78,13 +78,17 @@ that you want to be able to load together. This can be useful for defining different sets for different agents or different applications. ```yaml -toolsets: - my_first_toolset: - - my_first_tool - - my_second_tool - my_second_toolset: - - my_second_tool - - my_third_tool +kind: toolsets +name: my_first_toolset +tools: + - my_first_tool + - my_second_tool +--- +kind: toolsets +name: my_second_toolset +tools: + - my_second_tool + - my_third_tool ``` You can load toolsets by name: @@ -103,14 +107,14 @@ The `prompts` section of your `tools.yaml` defines the templates containing structured messages and instructions for interacting with language models. ```yaml -prompts: - code_review: - description: "Asks the LLM to analyze code quality and suggest improvements." - messages: - - content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}" - arguments: - - name: "code" - description: "The code to review" +kind: prompts +name: code_review +description: "Asks the LLM to analyze code quality and suggest improvements." +messages: + - content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}" +arguments: + - name: "code" + description: "The code to review" ``` For more details on configuring different types of prompts, see the diff --git a/docs/en/getting-started/introduction/_index.md b/docs/en/getting-started/introduction/_index.md index 5fb0b757c8..65453b7030 100644 --- a/docs/en/getting-started/introduction/_index.md +++ b/docs/en/getting-started/introduction/_index.md @@ -16,6 +16,12 @@ Databases” as its initial development predated MCP, but was renamed to align with recently added MCP compatibility. {{< /notice >}} +{{< notice note >}} +This document has been updated to support the configuration file v2 format. To +view documentation with configuration file v1 format, please navigate to the +top-right menu and select versions v0.26.0 or older. +{{< /notice >}} + ## Why Toolbox? Toolbox helps you build Gen AI tools that let your agents access data in your @@ -71,7 +77,7 @@ redeploying your application. ## Getting Started -### (Non-production) Running Toolbox +### Quickstart: Running Toolbox using NPX You can run Toolbox directly with a [configuration file](../configure.md): @@ -103,7 +109,7 @@ To install Toolbox as a binary on Linux (AMD64): ```sh # see releases page for other versions -export VERSION=0.26.0 +export VERSION=0.27.0 curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox chmod +x toolbox ``` @@ -114,7 +120,7 @@ To install Toolbox as a binary on macOS (Apple Silicon): ```sh # see releases page for other versions -export VERSION=0.26.0 +export VERSION=0.27.0 curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox chmod +x toolbox ``` @@ -125,7 +131,7 @@ To install Toolbox as a binary on macOS (Intel): ```sh # see releases page for other versions -export VERSION=0.26.0 +export VERSION=0.27.0 curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox chmod +x toolbox ``` @@ -136,7 +142,7 @@ To install Toolbox as a binary on Windows (Command Prompt): ```cmd :: see releases page for other versions -set VERSION=0.26.0 +set VERSION=0.27.0 curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe" ``` @@ -146,7 +152,7 @@ To install Toolbox as a binary on Windows (PowerShell): ```powershell # see releases page for other versions -$VERSION = "0.26.0" +$VERSION = "0.27.0" curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe" ``` @@ -158,7 +164,7 @@ You can also install Toolbox as a container: ```sh # see releases page for other versions -export VERSION=0.26.0 +export VERSION=0.27.0 docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION ``` @@ -177,7 +183,7 @@ To install from source, ensure you have the latest version of [Go installed](https://go.dev/doc/install), and then run the following command: ```sh -go install github.com/googleapis/genai-toolbox@v0.26.0 +go install github.com/googleapis/genai-toolbox@v0.27.0 ``` {{% /tab %}} diff --git a/docs/en/getting-started/local_quickstart.md b/docs/en/getting-started/local_quickstart.md index 684b4b03e9..414156f672 100644 --- a/docs/en/getting-started/local_quickstart.md +++ b/docs/en/getting-started/local_quickstart.md @@ -52,7 +52,7 @@ runtime](https://research.google.com/colaboratory/local-runtimes.html). {{< tabpane persist=header >}} {{< tab header="ADK" lang="bash" >}} -pip install toolbox-core +pip install google-adk[toolbox] {{< /tab >}} {{< tab header="Langchain" lang="bash" >}} @@ -73,7 +73,7 @@ pip install toolbox-core {{< tabpane persist=header >}} {{< tab header="ADK" lang="bash" >}} -pip install google-adk +# No other dependencies required for ADK {{< /tab >}} {{< tab header="Langchain" lang="bash" >}} @@ -115,7 +115,7 @@ pip install google-genai 1. Update `my_agent/agent.py` with the following content to connect to Toolbox: ```py - {{< include "quickstart/python/adk/quickstart.py" >}} + {{< regionInclude "quickstart/python/adk/quickstart.py" "quickstart" >}} ```
diff --git a/docs/en/getting-started/mcp_quickstart/_index.md b/docs/en/getting-started/mcp_quickstart/_index.md index 7967d574a5..b005643a10 100644 --- a/docs/en/getting-started/mcp_quickstart/_index.md +++ b/docs/en/getting-started/mcp_quickstart/_index.md @@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a ```bash export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64 - curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox + curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox ``` @@ -125,78 +125,89 @@ In this section, we will download Toolbox, configure our tools in a {{< /notice >}} ```yaml - sources: - my-pg-source: - kind: postgres - host: 127.0.0.1 - port: 5432 - database: toolbox_db - user: toolbox_user - password: my-password + kind: sources + name: my-pg-source + type: postgres + host: 127.0.0.1 + port: 5432 + database: toolbox_db + user: toolbox_user + password: my-password + --- + kind: tools + name: search-hotels-by-name + type: postgres-sql + source: my-pg-source + description: Search for hotels based on name. + parameters: + - name: name + type: string + description: The name of the hotel. + statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%'; + --- + kind: tools + name: search-hotels-by-location + type: postgres-sql + source: my-pg-source + description: Search for hotels based on location. + parameters: + - name: location + type: string + description: The location of the hotel. + statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%'; + --- + kind: tools + name: book-hotel + type: postgres-sql + source: my-pg-source + description: >- + Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not. + parameters: + - name: hotel_id + type: string + description: The ID of the hotel to book. + statement: UPDATE hotels SET booked = B'1' WHERE id = $1; + --- + kind: tools + name: update-hotel + type: postgres-sql + source: my-pg-source + description: >- + Update a hotel's check-in and check-out dates by its ID. Returns a message + indicating whether the hotel was successfully updated or not. + parameters: + - name: hotel_id + type: string + description: The ID of the hotel to update. + - name: checkin_date + type: string + description: The new check-in date of the hotel. + - name: checkout_date + type: string + description: The new check-out date of the hotel. + statement: >- + UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3 + as date) WHERE id = $1; + --- + kind: tools + name: cancel-hotel + type: postgres-sql + source: my-pg-source + description: Cancel a hotel by its ID. + parameters: + - name: hotel_id + type: string + description: The ID of the hotel to cancel. + statement: UPDATE hotels SET booked = B'0' WHERE id = $1; + --- + kind: toolsets + name: my-toolset tools: - search-hotels-by-name: - kind: postgres-sql - source: my-pg-source - description: Search for hotels based on name. - parameters: - - name: name - type: string - description: The name of the hotel. - statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%'; - search-hotels-by-location: - kind: postgres-sql - source: my-pg-source - description: Search for hotels based on location. - parameters: - - name: location - type: string - description: The location of the hotel. - statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%'; - book-hotel: - kind: postgres-sql - source: my-pg-source - description: >- - Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not. - parameters: - - name: hotel_id - type: string - description: The ID of the hotel to book. - statement: UPDATE hotels SET booked = B'1' WHERE id = $1; - update-hotel: - kind: postgres-sql - source: my-pg-source - description: >- - Update a hotel's check-in and check-out dates by its ID. Returns a message - indicating whether the hotel was successfully updated or not. - parameters: - - name: hotel_id - type: string - description: The ID of the hotel to update. - - name: checkin_date - type: string - description: The new check-in date of the hotel. - - name: checkout_date - type: string - description: The new check-out date of the hotel. - statement: >- - UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3 - as date) WHERE id = $1; - cancel-hotel: - kind: postgres-sql - source: my-pg-source - description: Cancel a hotel by its ID. - parameters: - - name: hotel_id - type: string - description: The ID of the hotel to cancel. - statement: UPDATE hotels SET booked = B'0' WHERE id = $1; - toolsets: - my-toolset: - - search-hotels-by-name - - search-hotels-by-location - - book-hotel - - update-hotel - - cancel-hotel + - search-hotels-by-name + - search-hotels-by-location + - book-hotel + - update-hotel + - cancel-hotel ``` For more info on tools, check out the diff --git a/docs/en/getting-started/prompts_quickstart_gemini_cli.md b/docs/en/getting-started/prompts_quickstart_gemini_cli.md index 2061acd7fa..47140ed2e7 100644 --- a/docs/en/getting-started/prompts_quickstart_gemini_cli.md +++ b/docs/en/getting-started/prompts_quickstart_gemini_cli.md @@ -157,61 +157,67 @@ Create a file named `tools.yaml`. This file defines the database connection, the SQL tools available, and the prompts the agents will use. ```yaml -sources: - my-foodiefind-db: - kind: postgres - host: 127.0.0.1 - port: 5432 - database: toolbox_db - user: toolbox_user - password: my-password -tools: - find_user_by_email: - kind: postgres-sql - source: my-foodiefind-db - description: Find a user's ID by their email address. - parameters: - - name: email - type: string - description: The email address of the user to find. - statement: SELECT id FROM users WHERE email = $1; - find_restaurant_by_name: - kind: postgres-sql - source: my-foodiefind-db - description: Find a restaurant's ID by its exact name. - parameters: - - name: name - type: string - description: The name of the restaurant to find. - statement: SELECT id FROM restaurants WHERE name = $1; - find_review_by_user_and_restaurant: - kind: postgres-sql - source: my-foodiefind-db - description: Find the full record for a specific review using the user's ID and the restaurant's ID. - parameters: - - name: user_id - type: integer - description: The numerical ID of the user. - - name: restaurant_id - type: integer - description: The numerical ID of the restaurant. - statement: SELECT * FROM reviews WHERE user_id = $1 AND restaurant_id = $2; -prompts: - investigate_missing_review: - description: "Investigates a user's missing review by finding the user, restaurant, and the review itself, then analyzing its status." - arguments: - - name: "user_email" - description: "The email of the user who wrote the review." - - name: "restaurant_name" - description: "The name of the restaurant being reviewed." - messages: - - content: >- - **Goal:** Find the review written by the user with email '{{.user_email}}' for the restaurant named '{{.restaurant_name}}' and understand its status. - **Workflow:** - 1. Use the `find_user_by_email` tool with the email '{{.user_email}}' to get the `user_id`. - 2. Use the `find_restaurant_by_name` tool with the name '{{.restaurant_name}}' to get the `restaurant_id`. - 3. Use the `find_review_by_user_and_restaurant` tool with the `user_id` and `restaurant_id` you just found. - 4. Analyze the results from the final tool call. Examine the `is_published` and `moderation_status` fields and explain the review's status to the user in a clear, human-readable sentence. +kind: sources +name: my-foodiefind-db +type: postgres +host: 127.0.0.1 +port: 5432 +database: toolbox_db +user: toolbox_user +password: my-password +--- +kind: tools +name: find_user_by_email +type: postgres-sql +source: my-foodiefind-db +description: Find a user's ID by their email address. +parameters: + - name: email + type: string + description: The email address of the user to find. +statement: SELECT id FROM users WHERE email = $1; +--- +kind: tools +name: find_restaurant_by_name +type: postgres-sql +source: my-foodiefind-db +description: Find a restaurant's ID by its exact name. +parameters: + - name: name + type: string + description: The name of the restaurant to find. +statement: SELECT id FROM restaurants WHERE name = $1; +--- +kind: tools +name: find_review_by_user_and_restaurant +type: postgres-sql +source: my-foodiefind-db +description: Find the full record for a specific review using the user's ID and the restaurant's ID. +parameters: + - name: user_id + type: integer + description: The numerical ID of the user. + - name: restaurant_id + type: integer + description: The numerical ID of the restaurant. +statement: SELECT * FROM reviews WHERE user_id = $1 AND restaurant_id = $2; +--- +kind: prompts +name: investigate_missing_review +description: "Investigates a user's missing review by finding the user, restaurant, and the review itself, then analyzing its status." +arguments: + - name: "user_email" + description: "The email of the user who wrote the review." + - name: "restaurant_name" + description: "The name of the restaurant being reviewed." +messages: + - content: >- + **Goal:** Find the review written by the user with email '{{.user_email}}' for the restaurant named '{{.restaurant_name}}' and understand its status. + **Workflow:** + 1. Use the `find_user_by_email` tool with the email '{{.user_email}}' to get the `user_id`. + 2. Use the `find_restaurant_by_name` tool with the name '{{.restaurant_name}}' to get the `restaurant_id`. + 3. Use the `find_review_by_user_and_restaurant` tool with the `user_id` and `restaurant_id` you just found. + 4. Analyze the results from the final tool call. Examine the `is_published` and `moderation_status` fields and explain the review's status to the user in a clear, human-readable sentence. ``` ## Step 3: Connect to Gemini CLI diff --git a/docs/en/getting-started/quickstart/js/adk/package-lock.json b/docs/en/getting-started/quickstart/js/adk/package-lock.json index 84bc88e40a..9bcbc4080d 100644 --- a/docs/en/getting-started/quickstart/js/adk/package-lock.json +++ b/docs/en/getting-started/quickstart/js/adk/package-lock.json @@ -18,6 +18,7 @@ "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-5.0.2.tgz", "integrity": "sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg==", "license": "Apache-2.0", + "peer": true, "dependencies": { "arrify": "^2.0.0", "extend": "^3.0.2" @@ -31,6 +32,7 @@ "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz", "integrity": "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==", "license": "Apache-2.0", + "peer": true, "engines": { "node": ">=14.0.0" } @@ -40,15 +42,17 @@ "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.0.0.tgz", "integrity": "sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g==", "license": "Apache-2.0", + "peer": true, "engines": { "node": ">=14" } }, "node_modules/@google-cloud/storage": { - "version": "7.18.0", - "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.18.0.tgz", - "integrity": "sha512-r3ZwDMiz4nwW6R922Z1pwpePxyRwE5GdevYX63hRmAQUkUQJcBH/79EnQPDv5cOv1mFBgevdNWQfi3tie3dHrQ==", + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.19.0.tgz", + "integrity": "sha512-n2FjE7NAOYyshogdc7KQOl/VZb4sneqPjWouSyia9CMDdMhRX5+RIbqalNmC7LOLzuLAN89VlF2HvG8na9G+zQ==", "license": "Apache-2.0", + "peer": true, "dependencies": { "@google-cloud/paginator": "^5.0.0", "@google-cloud/projectify": "^4.0.0", @@ -56,7 +60,7 @@ "abort-controller": "^3.0.0", "async-retry": "^1.3.3", "duplexify": "^4.1.3", - "fast-xml-parser": "^4.4.1", + "fast-xml-parser": "^5.3.4", "gaxios": "^6.0.2", "google-auth-library": "^9.6.3", "html-entities": "^2.5.2", @@ -75,6 +79,7 @@ "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", "license": "MIT", + "peer": true, "bin": { "uuid": "dist/bin/uuid" } @@ -97,7 +102,6 @@ "resolved": "https://registry.npmjs.org/@google/genai/-/genai-1.14.0.tgz", "integrity": "sha512-jirYprAAJU1svjwSDVCzyVq+FrJpJd5CSxR/g2Ga/gZ0ZYZpcWjMS75KJl9y71K1mDN+tcx6s21CzCbB2R840g==", "license": "Apache-2.0", - "peer": true, "dependencies": { "google-auth-library": "^9.14.2", "ws": "^8.18.0" @@ -136,7 +140,6 @@ "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.17.5.tgz", "integrity": "sha512-QakrKIGniGuRVfWBdMsDea/dx1PNE739QJ7gCM41s9q+qaCYTHCdsIBXQVVXry3mfWAiaM9kT22Hyz53Uw8mfg==", "license": "MIT", - "peer": true, "dependencies": { "ajv": "^6.12.6", "content-type": "^1.0.5", @@ -299,6 +302,7 @@ "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", "license": "MIT", + "peer": true, "engines": { "node": ">= 10" } @@ -307,13 +311,15 @@ "version": "0.12.5", "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz", "integrity": "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==", - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/@types/node": { "version": "24.10.1", "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.1.tgz", "integrity": "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==", "license": "MIT", + "peer": true, "dependencies": { "undici-types": "~7.16.0" } @@ -323,6 +329,7 @@ "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.13.tgz", "integrity": "sha512-FGJ6udDNUCjd19pp0Q3iTiDkwhYup7J8hpMW9c4k53NrccQFFWKRho6hvtPPEhnXWKvukfwAlB6DbDz4yhH5Gg==", "license": "MIT", + "peer": true, "dependencies": { "@types/caseless": "*", "@types/node": "*", @@ -335,6 +342,7 @@ "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.5.tgz", "integrity": "sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A==", "license": "MIT", + "peer": true, "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", @@ -352,6 +360,7 @@ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", "license": "MIT", + "peer": true, "engines": { "node": ">= 0.6" } @@ -361,6 +370,7 @@ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", "license": "MIT", + "peer": true, "dependencies": { "mime-db": "1.52.0" }, @@ -372,13 +382,15 @@ "version": "4.0.5", "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz", "integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==", - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/abort-controller": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", "license": "MIT", + "peer": true, "dependencies": { "event-target-shim": "^5.0.0" }, @@ -453,6 +465,7 @@ "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", "license": "MIT", + "peer": true, "engines": { "node": ">=8" } @@ -462,6 +475,7 @@ "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz", "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==", "license": "MIT", + "peer": true, "dependencies": { "retry": "0.13.1" } @@ -754,6 +768,7 @@ "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz", "integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==", "license": "MIT", + "peer": true, "dependencies": { "end-of-stream": "^1.4.1", "inherits": "^2.0.3", @@ -802,6 +817,7 @@ "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", "license": "MIT", + "peer": true, "dependencies": { "once": "^1.4.0" } @@ -871,6 +887,7 @@ "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", "license": "MIT", + "peer": true, "engines": { "node": ">=6" } @@ -901,7 +918,6 @@ "resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz", "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==", "license": "MIT", - "peer": true, "dependencies": { "accepts": "^2.0.0", "body-parser": "^2.2.0", @@ -973,9 +989,9 @@ "license": "MIT" }, "node_modules/fast-xml-parser": { - "version": "4.5.3", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.3.tgz", - "integrity": "sha512-RKihhV+SHsIUGXObeVy9AXiBbFwkVk7Syp8XgwN5U3JV416+Gwp/GO9i0JYKmikykgz/UHRrrV4ROuZEo/T0ig==", + "version": "5.3.5", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.3.5.tgz", + "integrity": "sha512-JeaA2Vm9ffQKp9VjvfzObuMCjUYAp5WDYhRYL5LrBPY/jUDlUtOvDfot0vKSkB9tuX885BDHjtw4fZadD95wnA==", "funding": [ { "type": "github", @@ -983,8 +999,9 @@ } ], "license": "MIT", + "peer": true, "dependencies": { - "strnum": "^1.1.1" + "strnum": "^2.1.2" }, "bin": { "fxparser": "src/cli/cli.js" @@ -1333,7 +1350,8 @@ "url": "https://patreon.com/mdevils" } ], - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/http-errors": { "version": "2.0.0", @@ -1365,6 +1383,7 @@ "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", "license": "MIT", + "peer": true, "dependencies": { "@tootallnate/once": "2", "agent-base": "6", @@ -1379,6 +1398,7 @@ "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", "license": "MIT", + "peer": true, "dependencies": { "debug": "4" }, @@ -1555,6 +1575,7 @@ "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==", "license": "MIT", + "peer": true, "bin": { "mime": "cli.js" }, @@ -1715,6 +1736,7 @@ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "license": "MIT", + "peer": true, "dependencies": { "yocto-queue": "^0.1.0" }, @@ -1856,6 +1878,7 @@ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", "license": "MIT", + "peer": true, "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", @@ -1870,6 +1893,7 @@ "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==", "license": "MIT", + "peer": true, "engines": { "node": ">= 4" } @@ -1879,6 +1903,7 @@ "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz", "integrity": "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==", "license": "MIT", + "peer": true, "dependencies": { "@types/request": "^2.48.8", "extend": "^3.0.2", @@ -2107,6 +2132,7 @@ "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz", "integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==", "license": "MIT", + "peer": true, "dependencies": { "stubs": "^3.0.0" } @@ -2115,13 +2141,15 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz", "integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==", - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/string_decoder": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", "license": "MIT", + "peer": true, "dependencies": { "safe-buffer": "~5.2.0" } @@ -2223,28 +2251,31 @@ } }, "node_modules/strnum": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.1.2.tgz", - "integrity": "sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA==", + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-2.1.2.tgz", + "integrity": "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/NaturalIntelligence" } ], - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/stubs": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==", - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/teeny-request": { "version": "9.0.0", "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz", "integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==", "license": "Apache-2.0", + "peer": true, "dependencies": { "http-proxy-agent": "^5.0.0", "https-proxy-agent": "^5.0.0", @@ -2261,6 +2292,7 @@ "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", "license": "MIT", + "peer": true, "dependencies": { "debug": "4" }, @@ -2273,6 +2305,7 @@ "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", "license": "MIT", + "peer": true, "dependencies": { "agent-base": "6", "debug": "4" @@ -2314,7 +2347,8 @@ "version": "7.16.0", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/unpipe": { "version": "1.0.0", @@ -2338,7 +2372,8 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/uuid": { "version": "9.0.1", @@ -2525,6 +2560,7 @@ "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", "license": "MIT", + "peer": true, "engines": { "node": ">=10" }, @@ -2537,7 +2573,6 @@ "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz", "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==", "license": "MIT", - "peer": true, "funding": { "url": "https://github.com/sponsors/colinhacks" } diff --git a/docs/en/getting-started/quickstart/js/genkit/package-lock.json b/docs/en/getting-started/quickstart/js/genkit/package-lock.json index 22324c05f3..cdb5744245 100644 --- a/docs/en/getting-started/quickstart/js/genkit/package-lock.json +++ b/docs/en/getting-started/quickstart/js/genkit/package-lock.json @@ -24,12 +24,13 @@ } }, "node_modules/@dabh/diagnostics": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.3.tgz", - "integrity": "sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==", + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.8.tgz", + "integrity": "sha512-R4MSXTVnuMzGD7bzHdW2ZhhdPC/igELENcq5IjEverBvq5hn1SXCWcsi6eSsdWP0/Ur+SItRRjAktmdoX/8R/Q==", + "license": "MIT", "optional": true, "dependencies": { - "colorspace": "1.1.x", + "@so-ric/colorspace": "^1.1.6", "enabled": "2.0.x", "kuler": "^2.0.0" } @@ -578,9 +579,10 @@ } }, "node_modules/@google-cloud/firestore": { - "version": "7.11.3", - "resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.11.3.tgz", - "integrity": "sha512-qsM3/WHpawF07SRVvEJJVRwhYzM7o9qtuksyuqnrMig6fxIrwWnsezECWsG/D5TyYru51Fv5c/RTqNDQ2yU+4w==", + "version": "7.11.6", + "resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.11.6.tgz", + "integrity": "sha512-EW/O8ktzwLfyWBOsNuhRoMi8lrC3clHM5LVFhGvO1HCsLozCOOXRAlHrYBoE6HL42Sc8yYMuCb2XqcnJ4OOEpw==", + "license": "Apache-2.0", "optional": true, "peer": true, "dependencies": { @@ -2887,6 +2889,17 @@ "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==" }, + "node_modules/@so-ric/colorspace": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/@so-ric/colorspace/-/colorspace-1.1.6.tgz", + "integrity": "sha512-/KiKkpHNOBgkFJwu9sh48LkHSMYGyuTcSFK/qMBdnOAlrRJzRSXAOFB5qwzaVQuDl8wAvHVMkaASQDReTahxuw==", + "license": "MIT", + "optional": true, + "dependencies": { + "color": "^5.0.2", + "text-hex": "1.0.x" + } + }, "node_modules/@toolbox-sdk/core": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/@toolbox-sdk/core/-/core-0.1.2.tgz", @@ -3338,13 +3351,13 @@ "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "node_modules/axios": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz", - "integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==", + "version": "1.13.5", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.5.tgz", + "integrity": "sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==", "license": "MIT", "dependencies": { - "follow-redirects": "^1.15.6", - "form-data": "^4.0.4", + "follow-redirects": "^1.15.11", + "form-data": "^4.0.5", "proxy-from-env": "^1.1.0" } }, @@ -3515,38 +3528,53 @@ } }, "node_modules/color": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/color/-/color-3.2.1.tgz", - "integrity": "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==", + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/color/-/color-5.0.3.tgz", + "integrity": "sha512-ezmVcLR3xAVp8kYOm4GS45ZLLgIE6SPAFoduLr6hTDajwb3KZ2F46gulK3XpcwRFb5KKGCSezCBAY4Dw4HsyXA==", + "license": "MIT", "optional": true, "dependencies": { - "color-convert": "^1.9.3", - "color-string": "^1.6.0" + "color-convert": "^3.1.3", + "color-string": "^2.1.3" + }, + "engines": { + "node": ">=18" } }, "node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-3.1.3.tgz", + "integrity": "sha512-fasDH2ont2GqF5HpyO4w0+BcewlhHEZOFn9c1ckZdHpJ56Qb7MHhH/IcJZbBGgvdtwdwNbLvxiBEdg336iA9Sg==", + "license": "MIT", "optional": true, "dependencies": { - "color-name": "1.1.3" + "color-name": "^2.0.0" + }, + "engines": { + "node": ">=14.6" } }, "node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "optional": true + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-2.1.0.tgz", + "integrity": "sha512-1bPaDNFm0axzE4MEAzKPuqKWeRaT43U/hyxKPBdqTfmPF+d6n7FSoTFxLVULUJOmiLp01KjhIPPH+HrXZJN4Rg==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=12.20" + } }, "node_modules/color-string": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz", - "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==", + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/color-string/-/color-string-2.1.4.tgz", + "integrity": "sha512-Bb6Cq8oq0IjDOe8wJmi4JeNn763Xs9cfrBcaylK1tPypWzyoy2G3l90v9k64kjphl/ZJjPIShFztenRomi8WTg==", + "license": "MIT", "optional": true, "dependencies": { - "color-name": "^1.0.0", - "simple-swizzle": "^0.2.2" + "color-name": "^2.0.0" + }, + "engines": { + "node": ">=18" } }, "node_modules/colorette": { @@ -3554,16 +3582,6 @@ "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==" }, - "node_modules/colorspace": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.4.tgz", - "integrity": "sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w==", - "optional": true, - "dependencies": { - "color": "^3.1.3", - "text-hex": "1.0.x" - } - }, "node_modules/combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -4230,9 +4248,10 @@ } }, "node_modules/form-data": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", - "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz", + "integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==", + "license": "MIT", "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", @@ -4968,12 +4987,6 @@ "node": ">= 0.10" } }, - "node_modules/is-arrayish": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", - "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==", - "optional": true - }, "node_modules/is-core-module": { "version": "2.16.1", "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", @@ -5114,13 +5127,14 @@ } }, "node_modules/jsonwebtoken/node_modules/jws": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", - "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.3.tgz", + "integrity": "sha512-byiJ0FLRdLdSVSReO/U4E7RoEyOCKnEnEPMjq3HxWtvzLsV08/i5RQKsFVNkCldrCaPr2vDNAOMsfs8T/Hze7g==", + "license": "MIT", "optional": true, "peer": true, "dependencies": { - "jwa": "^1.4.1", + "jwa": "^1.4.2", "safe-buffer": "^5.0.1" } }, @@ -5153,11 +5167,12 @@ } }, "node_modules/jws": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", - "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz", + "integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==", + "license": "MIT", "dependencies": { - "jwa": "^2.0.0", + "jwa": "^2.0.1", "safe-buffer": "^5.0.1" } }, @@ -5424,9 +5439,10 @@ } }, "node_modules/node-forge": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", - "integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==", + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.3.tgz", + "integrity": "sha512-rLvcdSyRCyouf6jcOIPe/BgwG/d7hKjzMKOas33/pHEr6gbq18IK9zV7DiPvzsz0oBJPme6qr6H6kGZuI9/DZg==", + "license": "(BSD-3-Clause OR GPL-2.0)", "optional": true, "peer": true, "engines": { @@ -6038,15 +6054,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/simple-swizzle": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", - "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==", - "optional": true, - "dependencies": { - "is-arrayish": "^0.3.1" - } - }, "node_modules/source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", @@ -6233,6 +6240,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz", "integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==", + "license": "MIT", "optional": true }, "node_modules/thriftrw": { @@ -6416,13 +6424,14 @@ } }, "node_modules/winston": { - "version": "3.17.0", - "resolved": "https://registry.npmjs.org/winston/-/winston-3.17.0.tgz", - "integrity": "sha512-DLiFIXYC5fMPxaRg832S6F5mJYvePtmO5G9v9IgUFPhXm9/GkXarH/TUrBAVzhTCzAj9anE/+GjrgXp/54nOgw==", + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/winston/-/winston-3.19.0.tgz", + "integrity": "sha512-LZNJgPzfKR+/J3cHkxcpHKpKKvGfDZVPS4hfJCc4cCG0CgYzvlD6yE/S3CIL/Yt91ak327YCpiF/0MyeZHEHKA==", + "license": "MIT", "optional": true, "dependencies": { "@colors/colors": "^1.6.0", - "@dabh/diagnostics": "^2.0.2", + "@dabh/diagnostics": "^2.0.8", "async": "^3.2.3", "is-stream": "^2.0.0", "logform": "^2.7.0", diff --git a/docs/en/getting-started/quickstart/js/genkit/quickstart.js b/docs/en/getting-started/quickstart/js/genkit/quickstart.js index 2ec16cb843..1c1f203539 100644 --- a/docs/en/getting-started/quickstart/js/genkit/quickstart.js +++ b/docs/en/getting-started/quickstart/js/genkit/quickstart.js @@ -53,7 +53,7 @@ export async function main() { for (const query of queries) { conversationHistory.push({ role: "user", content: [{ text: query }] }); - const response = await ai.generate({ + let response = await ai.generate({ messages: conversationHistory, tools: tools, }); diff --git a/docs/en/getting-started/quickstart/js/langchain/package-lock.json b/docs/en/getting-started/quickstart/js/langchain/package-lock.json index a52001ef13..c71fb84620 100644 --- a/docs/en/getting-started/quickstart/js/langchain/package-lock.json +++ b/docs/en/getting-started/quickstart/js/langchain/package-lock.json @@ -18,7 +18,8 @@ "node_modules/@cfworker/json-schema": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/@cfworker/json-schema/-/json-schema-4.1.1.tgz", - "integrity": "sha512-gAmrUZSGtKc3AiBL71iNWxDsyUC5uMaKKGdvzYsBoTW/xi42JQHl7eKV2OYzCUqvc+D2RCcf7EXY2iCyFIk6og==" + "integrity": "sha512-gAmrUZSGtKc3AiBL71iNWxDsyUC5uMaKKGdvzYsBoTW/xi42JQHl7eKV2OYzCUqvc+D2RCcf7EXY2iCyFIk6og==", + "peer": true }, "node_modules/@google/generative-ai": { "version": "0.24.1", @@ -225,6 +226,7 @@ "version": "5.2.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "peer": true, "engines": { "node": ">=10" }, @@ -308,6 +310,7 @@ "version": "6.3.0", "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "peer": true, "engines": { "node": ">=10" }, @@ -420,6 +423,7 @@ "version": "1.2.0", "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", + "peer": true, "engines": { "node": ">=0.10.0" } @@ -821,6 +825,7 @@ "version": "1.0.21", "resolved": "https://registry.npmjs.org/js-tiktoken/-/js-tiktoken-1.0.21.tgz", "integrity": "sha512-biOj/6M5qdgx5TKjDnFT1ymSpM5tbd3ylwDtrQvFQSu0Z7bBYko2dF+W/aUkXUPuk6IVpRxk/3Q2sHOzGlS36g==", + "peer": true, "dependencies": { "base64-js": "^1.5.1" } @@ -873,9 +878,9 @@ } }, "node_modules/langsmith": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.4.3.tgz", - "integrity": "sha512-vuBAagBZulXj0rpZhUTxmHhrYIBk53z8e2Q8ty4OHVkahN4ul7Im3OZxD9jsXZB0EuncK1xRYtY8J3BW4vj1zw==", + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.5.2.tgz", + "integrity": "sha512-CfkcQsiajtTWknAcyItvJsKEQdY2VgDpm6U8pRI9wnM07mevnOv5EF+RcqWGwx37SEUxtyi2RXMwnKW8b06JtA==", "license": "MIT", "dependencies": { "@types/uuid": "^10.0.0", @@ -969,6 +974,7 @@ "version": "4.2.0", "resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz", "integrity": "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==", + "peer": true, "bin": { "mustache": "bin/mustache" } @@ -1407,7 +1413,6 @@ "version": "3.25.76", "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz", "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==", - "peer": true, "funding": { "url": "https://github.com/sponsors/colinhacks" } diff --git a/docs/en/getting-started/quickstart/python/adk/quickstart.py b/docs/en/getting-started/quickstart/python/adk/quickstart.py index d341e89842..42a72fa38e 100644 --- a/docs/en/getting-started/quickstart/python/adk/quickstart.py +++ b/docs/en/getting-started/quickstart/python/adk/quickstart.py @@ -1,15 +1,57 @@ +# [START quickstart] +import asyncio + from google.adk import Agent from google.adk.apps import App -from toolbox_core import ToolboxSyncClient +from google.adk.runners import InMemoryRunner +from google.adk.tools.toolbox_toolset import ToolboxToolset +from google.genai.types import Content, Part + +prompt = """ +You're a helpful hotel assistant. You handle hotel searching, booking and +cancellations. When the user searches for a hotel, mention it's name, id, +location and price tier. Always mention hotel ids while performing any +searches. This is very important for any operations. For any bookings or +cancellations, please provide the appropriate confirmation. Be sure to +update checkin or checkout dates if mentioned by the user. +Don't ask for confirmations from the user. +""" # TODO(developer): update the TOOLBOX_URL to your toolbox endpoint -client = ToolboxSyncClient("http://127.0.0.1:5000") +toolset = ToolboxToolset( + server_url="http://127.0.0.1:5000", +) root_agent = Agent( - name='root_agent', + name='hotel_assistant', model='gemini-2.5-flash', - instruction="You are a helpful AI assistant designed to provide accurate and useful information.", - tools=client.load_toolset(), + instruction=prompt, + tools=[toolset], ) app = App(root_agent=root_agent, name="my_agent") +# [END quickstart] + +queries = [ + "Find hotels in Basel with Basel in its name.", + "Can you book the Hilton Basel for me?", + "Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.", + "My check in dates would be from April 10, 2024 to April 19, 2024.", +] + +async def main(): + runner = InMemoryRunner(app=app) + session = await runner.session_service.create_session( + app_name=app.name, user_id="test_user" + ) + + for query in queries: + print(f"\nUser: {query}") + user_message = Content(parts=[Part.from_text(text=query)]) + + async for event in runner.run_async(user_id="test_user", session_id=session.id, new_message=user_message): + if event.is_final_response() and event.content and event.content.parts: + print(f"Agent: {event.content.parts[0].text}") + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/docs/en/getting-started/quickstart/python/adk/requirements.txt b/docs/en/getting-started/quickstart/python/adk/requirements.txt index 7fb84e8d67..b67361ecfd 100644 --- a/docs/en/getting-started/quickstart/python/adk/requirements.txt +++ b/docs/en/getting-started/quickstart/python/adk/requirements.txt @@ -1,3 +1,2 @@ -google-adk==1.21.0 -toolbox-core==0.5.4 +google-adk[toolbox]==1.23.0 pytest==9.0.2 \ No newline at end of file diff --git a/docs/en/getting-started/quickstart/python/quickstart_test.py b/docs/en/getting-started/quickstart/python/quickstart_test.py index eb46bee1f8..b6c6e3a8a8 100755 --- a/docs/en/getting-started/quickstart/python/quickstart_test.py +++ b/docs/en/getting-started/quickstart/python/quickstart_test.py @@ -41,31 +41,29 @@ def golden_keywords(): class TestExecution: """Test framework execution and output validation.""" + _cached_output = None + @pytest.fixture(scope="function") def script_output(self, capsys): """Run the quickstart function and return its output.""" - - # TODO: Add better validation for ADK once we have a way to capture its - # output. - if ORCH_NAME == "adk": - return quickstart.app.root_agent.name - else: + if TestExecution._cached_output is None: asyncio.run(quickstart.main()) - - return capsys.readouterr() + out, err = capsys.readouterr() + TestExecution._cached_output = (out, err) + + class Output: + def __init__(self, out, err): + self.out = out + self.err = err + + return Output(*TestExecution._cached_output) def test_script_runs_without_errors(self, script_output): """Test that the script runs and produces no stderr.""" - if ORCH_NAME == "adk": - return assert script_output.err == "", f"Script produced stderr: {script_output.err}" def test_keywords_in_output(self, script_output, golden_keywords): """Test that expected keywords are present in the script's output.""" - - if ORCH_NAME == "adk": - assert script_output == "root_agent" - return output = script_output.out missing_keywords = [kw for kw in golden_keywords if kw not in output] assert not missing_keywords, f"Missing keywords in output: {missing_keywords}" diff --git a/docs/en/getting-started/quickstart/shared/configure_toolbox.md b/docs/en/getting-started/quickstart/shared/configure_toolbox.md index 12436ee7d6..3a20508982 100644 --- a/docs/en/getting-started/quickstart/shared/configure_toolbox.md +++ b/docs/en/getting-started/quickstart/shared/configure_toolbox.md @@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a ```bash export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64 - curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox + curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox ``` @@ -33,78 +33,89 @@ In this section, we will download Toolbox, configure our tools in a {{< /notice >}} ```yaml - sources: - my-pg-source: - kind: postgres - host: 127.0.0.1 - port: 5432 - database: toolbox_db - user: ${USER_NAME} - password: ${PASSWORD} + kind: sources + name: my-pg-source + type: postgres + host: 127.0.0.1 + port: 5432 + database: toolbox_db + user: toolbox_user + password: my-password + --- + kind: tools + name: search-hotels-by-name + type: postgres-sql + source: my-pg-source + description: Search for hotels based on name. + parameters: + - name: name + type: string + description: The name of the hotel. + statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%'; + --- + kind: tools + name: search-hotels-by-location + type: postgres-sql + source: my-pg-source + description: Search for hotels based on location. + parameters: + - name: location + type: string + description: The location of the hotel. + statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%'; + --- + kind: tools + name: book-hotel + type: postgres-sql + source: my-pg-source + description: >- + Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not. + parameters: + - name: hotel_id + type: string + description: The ID of the hotel to book. + statement: UPDATE hotels SET booked = B'1' WHERE id = $1; + --- + kind: tools + name: update-hotel + type: postgres-sql + source: my-pg-source + description: >- + Update a hotel's check-in and check-out dates by its ID. Returns a message + indicating whether the hotel was successfully updated or not. + parameters: + - name: hotel_id + type: string + description: The ID of the hotel to update. + - name: checkin_date + type: string + description: The new check-in date of the hotel. + - name: checkout_date + type: string + description: The new check-out date of the hotel. + statement: >- + UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3 + as date) WHERE id = $1; + --- + kind: tools + name: cancel-hotel + type: postgres-sql + source: my-pg-source + description: Cancel a hotel by its ID. + parameters: + - name: hotel_id + type: string + description: The ID of the hotel to cancel. + statement: UPDATE hotels SET booked = B'0' WHERE id = $1; + --- + kind: toolsets + name: my-toolset tools: - search-hotels-by-name: - kind: postgres-sql - source: my-pg-source - description: Search for hotels based on name. - parameters: - - name: name - type: string - description: The name of the hotel. - statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%'; - search-hotels-by-location: - kind: postgres-sql - source: my-pg-source - description: Search for hotels based on location. - parameters: - - name: location - type: string - description: The location of the hotel. - statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%'; - book-hotel: - kind: postgres-sql - source: my-pg-source - description: >- - Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not. - parameters: - - name: hotel_id - type: string - description: The ID of the hotel to book. - statement: UPDATE hotels SET booked = B'1' WHERE id = $1; - update-hotel: - kind: postgres-sql - source: my-pg-source - description: >- - Update a hotel's check-in and check-out dates by its ID. Returns a message - indicating whether the hotel was successfully updated or not. - parameters: - - name: hotel_id - type: string - description: The ID of the hotel to update. - - name: checkin_date - type: string - description: The new check-in date of the hotel. - - name: checkout_date - type: string - description: The new check-out date of the hotel. - statement: >- - UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3 - as date) WHERE id = $1; - cancel-hotel: - kind: postgres-sql - source: my-pg-source - description: Cancel a hotel by its ID. - parameters: - - name: hotel_id - type: string - description: The ID of the hotel to cancel. - statement: UPDATE hotels SET booked = B'0' WHERE id = $1; - toolsets: - my-toolset: - - search-hotels-by-name - - search-hotels-by-location - - book-hotel - - update-hotel - - cancel-hotel + - search-hotels-by-name + - search-hotels-by-location + - book-hotel + - update-hotel + - cancel-hotel ``` For more info on tools, check out the `Resources` section of the docs. diff --git a/docs/en/how-to/connect-ide/looker_mcp.md b/docs/en/how-to/connect-ide/looker_mcp.md index 2972ddb341..ed8ea16a34 100644 --- a/docs/en/how-to/connect-ide/looker_mcp.md +++ b/docs/en/how-to/connect-ide/looker_mcp.md @@ -100,19 +100,19 @@ After you install Looker in the MCP Store, resources and tools from the server a {{< tabpane persist=header >}} {{< tab header="linux/amd64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/linux/amd64/toolbox {{< /tab >}} {{< tab header="darwin/arm64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/arm64/toolbox {{< /tab >}} {{< tab header="darwin/amd64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/amd64/toolbox {{< /tab >}} {{< tab header="windows/amd64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/windows/amd64/toolbox.exe {{< /tab >}} {{< /tabpane >}} diff --git a/docs/en/how-to/connect-ide/mssql_mcp.md b/docs/en/how-to/connect-ide/mssql_mcp.md index 722feeaebe..5a11f9c2e4 100644 --- a/docs/en/how-to/connect-ide/mssql_mcp.md +++ b/docs/en/how-to/connect-ide/mssql_mcp.md @@ -45,19 +45,19 @@ instance: {{< tabpane persist=header >}} {{< tab header="linux/amd64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/linux/amd64/toolbox {{< /tab >}} {{< tab header="darwin/arm64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/arm64/toolbox {{< /tab >}} {{< tab header="darwin/amd64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/amd64/toolbox {{< /tab >}} {{< tab header="windows/amd64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/windows/amd64/toolbox.exe {{< /tab >}} {{< /tabpane >}} diff --git a/docs/en/how-to/connect-ide/mysql_mcp.md b/docs/en/how-to/connect-ide/mysql_mcp.md index de1c15d839..c101a597ee 100644 --- a/docs/en/how-to/connect-ide/mysql_mcp.md +++ b/docs/en/how-to/connect-ide/mysql_mcp.md @@ -43,19 +43,19 @@ expose your developer assistant tools to a MySQL instance: {{< tabpane persist=header >}} {{< tab header="linux/amd64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/linux/amd64/toolbox {{< /tab >}} {{< tab header="darwin/arm64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/arm64/toolbox {{< /tab >}} {{< tab header="darwin/amd64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/amd64/toolbox {{< /tab >}} {{< tab header="windows/amd64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/windows/amd64/toolbox.exe {{< /tab >}} {{< /tabpane >}} diff --git a/docs/en/how-to/connect-ide/neo4j_mcp.md b/docs/en/how-to/connect-ide/neo4j_mcp.md index 624d7540aa..2f68c1d2de 100644 --- a/docs/en/how-to/connect-ide/neo4j_mcp.md +++ b/docs/en/how-to/connect-ide/neo4j_mcp.md @@ -44,19 +44,19 @@ expose your developer assistant tools to a Neo4j instance: {{< tabpane persist=header >}} {{< tab header="linux/amd64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/linux/amd64/toolbox {{< /tab >}} {{< tab header="darwin/arm64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/arm64/toolbox {{< /tab >}} {{< tab header="darwin/amd64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/amd64/toolbox {{< /tab >}} {{< tab header="windows/amd64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/windows/amd64/toolbox.exe {{< /tab >}} {{< /tabpane >}} diff --git a/docs/en/how-to/connect-ide/postgres_mcp.md b/docs/en/how-to/connect-ide/postgres_mcp.md index 44e3e09ade..e93e37492c 100644 --- a/docs/en/how-to/connect-ide/postgres_mcp.md +++ b/docs/en/how-to/connect-ide/postgres_mcp.md @@ -32,7 +32,7 @@ to expose your developer assistant tools to a Postgres instance: {{< notice tip >}} This guide can be used with [AlloyDB -Omni](https://cloud.google.com/alloydb/omni/current/docs/overview). +Omni](https://cloud.google.com/alloydb/omni/docs/overview). {{< /notice >}} ## Set up the database @@ -40,10 +40,10 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview). 1. Create or select a PostgreSQL instance. * [Install PostgreSQL locally](https://www.postgresql.org/download/) - * [Install AlloyDB Omni](https://cloud.google.com/alloydb/omni/current/docs/quickstart) + * [Install AlloyDB Omni](https://cloud.google.com/alloydb/omni/docs/quickstart) 1. Create or reuse [a database - user](https://cloud.google.com/alloydb/omni/current/docs/database-users/manage-users) + user](https://docs.cloud.google.com/alloydb/omni/containers/current/docs/database-users/manage-users) and have the username and password ready. ## Install MCP Toolbox @@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview). {{< tabpane persist=header >}} {{< tab header="linux/amd64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/linux/amd64/toolbox {{< /tab >}} {{< tab header="darwin/arm64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/arm64/toolbox {{< /tab >}} {{< tab header="darwin/amd64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/amd64/toolbox {{< /tab >}} {{< tab header="windows/amd64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/windows/amd64/toolbox.exe {{< /tab >}} {{< /tabpane >}} diff --git a/docs/en/how-to/connect-ide/sqlite_mcp.md b/docs/en/how-to/connect-ide/sqlite_mcp.md index 8f0bdf4dac..65d0c39ef0 100644 --- a/docs/en/how-to/connect-ide/sqlite_mcp.md +++ b/docs/en/how-to/connect-ide/sqlite_mcp.md @@ -43,19 +43,19 @@ to expose your developer assistant tools to a SQLite instance: {{< tabpane persist=header >}} {{< tab header="linux/amd64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/linux/amd64/toolbox {{< /tab >}} {{< tab header="darwin/arm64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/arm64/toolbox {{< /tab >}} {{< tab header="darwin/amd64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/amd64/toolbox {{< /tab >}} {{< tab header="windows/amd64" lang="bash" >}} -curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe +curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/windows/amd64/toolbox.exe {{< /tab >}} {{< /tabpane >}} diff --git a/docs/en/how-to/deploy_adk_agent.md b/docs/en/how-to/deploy_adk_agent.md index adb390352a..cc247d4831 100644 --- a/docs/en/how-to/deploy_adk_agent.md +++ b/docs/en/how-to/deploy_adk_agent.md @@ -46,10 +46,10 @@ with the necessary configuration for deployment to Vertex AI Agent Engine. process will generate deployment configuration files (like a `Makefile` and `Dockerfile`) in your project directory. -4. Add `toolbox-core` as a dependency to the new project: +4. Add `google-adk[toolbox]` as a dependency to the new project: ```bash - uv add toolbox-core + uv add google-adk[toolbox] ``` ## Step 3: Configure Google Cloud Authentication @@ -83,34 +83,32 @@ Toolbox instead of the local address. 2. Open your agent file (`my_agent/agent.py`). -3. Update the `ToolboxSyncClient` initialization to use your Cloud Run URL. +3. Update the `ToolboxToolset` initialization to point to your Cloud Run service URL. Replace the existing initialization code with the following: - {{% alert color="info" %}} -Since Cloud Run services are secured by default, you also need to provide an -authentication token. + {{% alert color="info" title="Note" %}} +Since Cloud Run services are secured by default, you also need to provide a workload identity. {{% /alert %}} - Replace your existing client initialization code with the following: - ```python from google.adk import Agent from google.adk.apps import App - from toolbox_core import ToolboxSyncClient, auth_methods + from google.adk.tools.toolbox_toolset import ToolboxToolset + from toolbox_adk import CredentialStrategy # TODO(developer): Replace with your Toolbox Cloud Run Service URL TOOLBOX_URL = "https://your-toolbox-service-xyz.a.run.app" - # Initialize the client with the Cloud Run URL and Auth headers - client = ToolboxSyncClient( - TOOLBOX_URL, - client_headers={"Authorization": auth_methods.get_google_id_token(TOOLBOX_URL)} + # Initialize the toolset with Workload Identity (generates ID token for the URL) + toolset = ToolboxToolset( + server_url=TOOLBOX_URL, + credentials=CredentialStrategy.workload_identity(target_audience=TOOLBOX_URL) ) root_agent = Agent( name='root_agent', model='gemini-2.5-flash', instruction="You are a helpful AI assistant designed to provide accurate and useful information.", - tools=client.load_toolset(), + tools=[toolset], ) app = App(root_agent=root_agent, name="my_agent") @@ -131,14 +129,14 @@ app = App(root_agent=root_agent, name="my_agent") Run the deployment command: ```bash -make backend +make deploy ``` This command will build your agent's container image and deploy it to Vertex AI. ## Step 6: Test your Deployment -Once the deployment command (`make backend`) completes, it will output the URL +Once the deployment command (`make deploy`) completes, it will output the URL for the Agent Engine Playground. You can click on this URL to open the Playground in your browser and start chatting with your agent to test the tools. diff --git a/docs/en/how-to/generate_skill.md b/docs/en/how-to/generate_skill.md new file mode 100644 index 0000000000..7fa731e85b --- /dev/null +++ b/docs/en/how-to/generate_skill.md @@ -0,0 +1,112 @@ +--- +title: "Generate Agent Skills" +type: docs +weight: 10 +description: > + How to generate agent skills from a toolset. +--- + +The `skills-generate` command allows you to convert a **toolset** into an **Agent Skill**. A toolset is a collection of tools, and the generated skill will contain metadata and execution scripts for all tools within that toolset, complying with the [Agent Skill specification](https://agentskills.io/specification). + +## Before you begin + +1. Make sure you have the `toolbox` executable in your PATH. +2. Make sure you have [Node.js](https://nodejs.org/) installed on your system. + +## Generating a Skill from a Toolset + +A skill package consists of a `SKILL.md` file (with required YAML frontmatter) and a set of Node.js scripts. Each tool defined in your toolset maps to a corresponding script in the generated Node.js scripts (`.js`) that work across different platforms (Linux, macOS, Windows). + + +### Command Usage + +The basic syntax for the command is: + +```bash +toolbox skills-generate \ + --name \ + --toolset \ + --description \ + --output-dir +``` + +- ``: Can be `--tools-file`, `--tools-files`, `--tools-folder`, and `--prebuilt`. See the [CLI Reference](../reference/cli.md) for details. +- `--name`: Name of the generated skill. +- `--description`: Description of the generated skill. +- `--toolset`: (Optional) Name of the toolset to convert into a skill. If not provided, all tools will be included. +- `--output-dir`: (Optional) Directory to output generated skills (default: "skills"). + +{{< notice note >}} +**Note:** The `` must follow the Agent Skill [naming convention](https://agentskills.io/specification): it must contain only lowercase alphanumeric characters and hyphens, cannot start or end with a hyphen, and cannot contain consecutive hyphens (e.g., `my-skill`, `data-processing`). +{{< /notice >}} + +### Example: Custom Tools File + +1. Create a `tools.yaml` file with a toolset and some tools: + + ```yaml + tools: + tool_a: + description: "First tool" + run: + command: "echo 'Tool A'" + tool_b: + description: "Second tool" + run: + command: "echo 'Tool B'" + toolsets: + my_toolset: + tools: + - tool_a + - tool_b + ``` + +2. Generate the skill: + + ```bash + toolbox --tools-file tools.yaml skills-generate \ + --name "my-skill" \ + --toolset "my_toolset" \ + --description "A skill containing multiple tools" \ + --output-dir "generated-skills" + ``` + +3. The generated skill directory structure: + + ```text + generated-skills/ + └── my-skill/ + ├── SKILL.md + ├── assets/ + │ ├── tool_a.yaml + │ └── tool_b.yaml + └── scripts/ + ├── tool_a.js + └── tool_b.js + ``` + + In this example, the skill contains two Node.js scripts (`tool_a.js` and `tool_b.js`), each mapping to a tool in the original toolset. + +### Example: Prebuilt Configuration + +You can also generate skills from prebuilt toolsets: + +```bash +toolbox --prebuilt alloydb-postgres-admin skills-generate \ + --name "alloydb-postgres-admin" \ + --description "skill for performing administrative operations on alloydb" +``` + +## Installing the Generated Skill in Gemini CLI + +Once you have generated a skill, you can install it into the Gemini CLI using the `gemini skills install` command. + +### Installation Command + +Provide the path to the directory containing the generated skill: + +```bash +gemini skills install /path/to/generated-skills/my-skill +``` + +Alternatively, use ~/.gemini/skills as the `--output-dir` to generate the skill straight to the Gemini CLI. diff --git a/docs/en/how-to/invoke_tool.md b/docs/en/how-to/invoke_tool.md new file mode 100644 index 0000000000..4fc23d3a2c --- /dev/null +++ b/docs/en/how-to/invoke_tool.md @@ -0,0 +1,75 @@ +--- +title: "Invoke Tools via CLI" +type: docs +weight: 10 +description: > + Learn how to invoke your tools directly from the command line using the `invoke` command. +--- + +The `invoke` command allows you to invoke tools defined in your configuration directly from the CLI. This is useful for: + +- **Ephemeral Invocation:** Executing a tool without spinning up a full MCP server/client. +- **Debugging:** Isolating tool execution logic and testing with various parameter combinations. + +{{< notice tip >}} +**Keep configurations minimal:** The `invoke` command initializes *all* resources (sources, tools, etc.) defined in your configuration files during execution. To ensure fast response times, consider using a minimal configuration file containing only the tools you need for the specific invocation. +{{< /notice >}} + +## Before you begin + +1. Make sure you have the `toolbox` binary installed or built. +2. Make sure you have a valid tool configuration file (e.g., `tools.yaml`). + +### Command Usage + +The basic syntax for the command is: + +```bash +toolbox invoke [params] +``` + +- ``: Can be `--tools-file`, `--tools-files`, `--tools-folder`, and `--prebuilt`. See the [CLI Reference](../reference/cli.md) for details. +- ``: The name of the tool you want to call. This must match the name defined in your `tools.yaml`. +- `[params]`: (Optional) A JSON string representing the arguments for the tool. + +## Examples + +### 1. Calling a Tool without Parameters + +If your tool takes no parameters, simply provide the tool name: + +```bash +toolbox --tools-file tools.yaml invoke my-simple-tool +``` + +### 2. Calling a Tool with Parameters + +For tools that require arguments, pass them as a JSON string. Ensure you escape quotes correctly for your shell. + +**Example: A tool that takes parameters** + +Assuming a tool named `mytool` taking `a` and `b`: + +```bash +toolbox --tools-file tools.yaml invoke mytool '{"a": 10, "b": 20}' +``` + +**Example: A tool that queries a database** + +```bash +toolbox --tools-file tools.yaml invoke db-query '{"sql": "SELECT * FROM users LIMIT 5"}' +``` + +### 3. Using Prebuilt Configurations + +You can also use the `--prebuilt` flag to load prebuilt toolsets. + +```bash +toolbox --prebuilt cloudsql-postgres invoke cloudsql-postgres-list-instances +``` + +## Troubleshooting + +- **Tool not found:** Ensure the `` matches exactly what is in your YAML file and that the file is correctly loaded via `--tools-file`. +- **Invalid parameters:** Double-check your JSON syntax. The error message will usually indicate if the JSON parsing failed or if the parameters didn't match the tool's schema. +- **Auth errors:** The `invoke` command currently does not support flows requiring client-side authorization (like OAuth flow initiation via the CLI). It works best for tools using service-side authentication (e.g., Application Default Credentials). diff --git a/docs/en/reference/cli.md b/docs/en/reference/cli.md index 686dbc0c73..11549c2830 100644 --- a/docs/en/reference/cli.md +++ b/docs/en/reference/cli.md @@ -27,9 +27,53 @@ description: > | | `--ui` | Launches the Toolbox UI web server. | | | | `--allowed-origins` | Specifies a list of origins permitted to access this server for CORs access. | `*` | | | `--allowed-hosts` | Specifies a list of hosts permitted to access this server to prevent DNS rebinding attacks. | `*` | -| | `--user-agent-extra` | Appends additional metadata to the User-Agent. | | +| | `--user-agent-metadata` | Appends additional metadata to the User-Agent. | | | `-v` | `--version` | version for toolbox | | +## Sub Commands + +
+invoke + +Executes a tool directly with the provided parameters. This is useful for testing tool configurations and parameters without needing a full client setup. + +**Syntax:** + +```bash +toolbox invoke [params] +``` + +**Arguments:** + +- `tool-name`: The name of the tool to execute (as defined in your configuration). +- `params`: (Optional) A JSON string containing the parameters for the tool. + +For more detailed instructions, see [Invoke Tools via CLI](../how-to/invoke_tool.md). + +
+ +
+skills-generate + +Generates a skill package from a specified toolset. Each tool in the toolset will have a corresponding Node.js execution script in the generated skill. + +**Syntax:** + +```bash +toolbox skills-generate --name --description --toolset --output-dir +``` + +**Flags:** + +- `--name`: Name of the generated skill. +- `--description`: Description of the generated skill. +- `--toolset`: (Optional) Name of the toolset to convert into a skill. If not provided, all tools will be included. +- `--output-dir`: (Optional) Directory to output generated skills (default: "skills"). + +For more detailed instructions, see [Generate Agent Skills](../how-to/generate_skill.md). + +
+ ## Examples ### Transport Configuration diff --git a/docs/en/reference/prebuilt-tools.md b/docs/en/reference/prebuilt-tools.md index a539af9fb2..7a52236dfa 100644 --- a/docs/en/reference/prebuilt-tools.md +++ b/docs/en/reference/prebuilt-tools.md @@ -54,9 +54,9 @@ See [Usage Examples](../reference/cli.md#examples). * `get_query_plan`: Generate the execution plan of a statement. * `list_views`: Lists views in the database from pg_views with a default limit of 50 rows. Returns schemaname, viewname and the ownername. - * `list_schemas`: Lists schemas in the database. - * `database_overview`: Fetches the current state of the PostgreSQL server. - * `list_triggers`: Lists triggers in the database. + * `list_schemas`: Lists schemas in the database. + * `database_overview`: Fetches the current state of the PostgreSQL server. + * `list_triggers`: Lists triggers in the database. * `list_indexes`: List available user indexes in a PostgreSQL database. * `list_sequences`: List sequences in a PostgreSQL database. * `list_publication_tables`: List publication tables in a PostgreSQL database. @@ -64,7 +64,7 @@ See [Usage Examples](../reference/cli.md#examples). * `list_pg_settings`: List configuration parameters for the PostgreSQL server. * `list_database_stats`: Lists the key performance and activity statistics for each database in the AlloyDB instance. - * `list_roles`: Lists all the user-created roles in PostgreSQL database. + * `list_roles`: Lists all the user-created roles in PostgreSQL database. ## AlloyDB Postgres Admin @@ -100,6 +100,43 @@ See [Usage Examples](../reference/cli.md#examples). (timeseries metrics) for queries running in an AlloyDB instance using a PromQL query. +## AlloyDB Omni + +* `--prebuilt` value: `alloydb-omni` +* **Environment Variables:** + * `ALLOYDB_OMNI_HOST`: (Optional) The hostname or IP address (Default: localhost). + * `ALLOYDB_OMNI_PORT`: (Optional) The port number (Default: 5432). + * `ALLOYDB_OMNI_DATABASE`: The name of the database to connect to. + * `ALLOYDB_OMNI_USER`: The database username. + * `ALLOYDB_OMNI_PASSWORD`: (Optional) The password for the database user. + * `ALLOYDB_OMNI_QUERY_PARAMS`: (Optional) Connection query parameters. +* **Tools:** + * `execute_sql`: Executes a SQL query. + * `list_tables`: Lists tables in the database. + * `list_autovacuum_configurations`: Lists autovacuum configurations in the + database. + * `list_columnar_configurations`: List AlloyDB Omni columnar-related configurations. + * `list_columnar_recommended_columns`: Lists columns that AlloyDB Omni recommends adding to the columnar engine. + * `list_memory_configurations`: Lists memory-related configurations in the + database. + * `list_top_bloated_tables`: List top bloated tables in the database. + * `list_replication_slots`: Lists replication slots in the database. + * `list_invalid_indexes`: Lists invalid indexes in the database. + * `get_query_plan`: Generate the execution plan of a statement. + * `list_views`: Lists views in the database from pg_views with a default + limit of 50 rows. Returns schemaname, viewname and the ownername. + * `list_schemas`: Lists schemas in the database. + * `database_overview`: Fetches the current state of the PostgreSQL server. + * `list_triggers`: Lists triggers in the database. + * `list_indexes`: List available user indexes in a PostgreSQL database. + * `list_sequences`: List sequences in a PostgreSQL database. + * `list_publication_tables`: List publication tables in a PostgreSQL database. + * `list_tablespaces`: Lists tablespaces in the database. + * `list_pg_settings`: List configuration parameters for the PostgreSQL server. + * `list_database_stats`: Lists the key performance and activity statistics for + each database in the AlloyDB instance. + * `list_roles`: Lists all the user-created roles in PostgreSQL database. + ## BigQuery * `--prebuilt` value: `bigquery` @@ -243,9 +280,9 @@ See [Usage Examples](../reference/cli.md#examples). * `get_query_plan`: Generate the execution plan of a statement. * `list_views`: Lists views in the database from pg_views with a default limit of 50 rows. Returns schemaname, viewname and the ownername. - * `list_schemas`: Lists schemas in the database. - * `database_overview`: Fetches the current state of the PostgreSQL server. - * `list_triggers`: Lists triggers in the database. + * `list_schemas`: Lists schemas in the database. + * `database_overview`: Fetches the current state of the PostgreSQL server. + * `list_triggers`: Lists triggers in the database. * `list_indexes`: List available user indexes in a PostgreSQL database. * `list_sequences`: List sequences in a PostgreSQL database. * `list_publication_tables`: List publication tables in a PostgreSQL database. @@ -253,7 +290,7 @@ See [Usage Examples](../reference/cli.md#examples). * `list_pg_settings`: List configuration parameters for the PostgreSQL server. * `list_database_stats`: Lists the key performance and activity statistics for each database in the postgreSQL instance. - * `list_roles`: Lists all the user-created roles in PostgreSQL database. + * `list_roles`: Lists all the user-created roles in PostgreSQL database. ## Cloud SQL for PostgreSQL Observability @@ -377,10 +414,10 @@ See [Usage Examples](../reference/cli.md#examples). entries. * **Dataplex Editor** (`roles/dataplex.editor`) to modify entries. * **Tools:** - * `dataplex_search_entries`: Searches for entries in Dataplex Catalog. - * `dataplex_lookup_entry`: Retrieves a specific entry from Dataplex + * `search_entries`: Searches for entries in Dataplex Catalog. + * `lookup_entry`: Retrieves a specific entry from Dataplex Catalog. - * `dataplex_search_aspect_types`: Finds aspect types relevant to the + * `search_aspect_types`: Finds aspect types relevant to the query. ## Firestore @@ -451,6 +488,7 @@ See [Usage Examples](../reference/cli.md#examples). * `create_project_file`: Create a new LookML file. * `update_project_file`: Update an existing LookML file. * `delete_project_file`: Delete a LookML file. + * `validate_project`: Check the syntax of a LookML project. * `get_connections`: Get the available connections in a Looker instance. * `get_connection_schemas`: Get the available schemas in a connection. * `get_connection_databases`: Get the available databases in a connection. @@ -564,9 +602,9 @@ See [Usage Examples](../reference/cli.md#examples). * `get_query_plan`: Generate the execution plan of a statement. * `list_views`: Lists views in the database from pg_views with a default limit of 50 rows. Returns schemaname, viewname and the ownername. - * `list_schemas`: Lists schemas in the database. - * `database_overview`: Fetches the current state of the PostgreSQL server. - * `list_triggers`: Lists triggers in the database. + * `list_schemas`: Lists schemas in the database. + * `database_overview`: Fetches the current state of the PostgreSQL server. + * `list_triggers`: Lists triggers in the database. * `list_indexes`: List available user indexes in a PostgreSQL database. * `list_sequences`: List sequences in a PostgreSQL database. * `list_publication_tables`: List publication tables in a PostgreSQL database. @@ -574,7 +612,7 @@ See [Usage Examples](../reference/cli.md#examples). * `list_pg_settings`: List configuration parameters for the PostgreSQL server. * `list_database_stats`: Lists the key performance and activity statistics for each database in the PostgreSQL server. - * `list_roles`: Lists all the user-created roles in PostgreSQL database. + * `list_roles`: Lists all the user-created roles in PostgreSQL database. ## Google Cloud Serverless for Apache Spark diff --git a/docs/en/resources/authServices/_index.md b/docs/en/resources/authServices/_index.md index 757b58a1a3..a40ce182d1 100644 --- a/docs/en/resources/authServices/_index.md +++ b/docs/en/resources/authServices/_index.md @@ -28,17 +28,19 @@ The following configurations are placed at the top level of a `tools.yaml` file. {{< notice tip >}} If you are accessing Toolbox with multiple applications, each application should register their own Client ID even if they use the same - "kind" of auth provider. + "type" of auth provider. {{< /notice >}} ```yaml -authServices: - my_auth_app_1: - kind: google - clientId: ${YOUR_CLIENT_ID_1} - my_auth_app_2: - kind: google - clientId: ${YOUR_CLIENT_ID_2} +kind: authServices +name: my_auth_app_1 +type: google +clientId: ${YOUR_CLIENT_ID_1} +--- +kind: authServices +name: my_auth_app_2 +type: google +clientId: ${YOUR_CLIENT_ID_2} ``` {{< notice tip >}} diff --git a/docs/en/resources/authServices/google.md b/docs/en/resources/authServices/google.md index b0950040d3..f44284731e 100644 --- a/docs/en/resources/authServices/google.md +++ b/docs/en/resources/authServices/google.md @@ -40,10 +40,10 @@ id-token][provided-claims] can be used for the parameter. ## Example ```yaml -authServices: - my-google-auth: - kind: google - clientId: ${YOUR_GOOGLE_CLIENT_ID} +kind: authServices +name: my-google-auth +type: google +clientId: ${YOUR_GOOGLE_CLIENT_ID} ``` {{< notice tip >}} @@ -55,5 +55,5 @@ instead of hardcoding your secrets into the configuration file. | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|------------------------------------------------------------------| -| kind | string | true | Must be "google". | +| type | string | true | Must be "google". | | clientId | string | true | Client ID of your application from registering your application. | diff --git a/docs/en/resources/embeddingModels/_index.md b/docs/en/resources/embeddingModels/_index.md index d9da2b71c3..8fb867306f 100644 --- a/docs/en/resources/embeddingModels/_index.md +++ b/docs/en/resources/embeddingModels/_index.md @@ -54,12 +54,12 @@ ${ENV_NAME} instead of hardcoding your API keys into the configuration file. Define an embedding model in the `embeddingModels` section: ```yaml -embeddingModels: - gemini-model: # Name of the embedding model - kind: gemini - model: gemini-embedding-001 - apiKey: ${GOOGLE_API_KEY} - dimension: 768 +kind: embeddingModels +name: gemini-model # Name of the embedding model +type: gemini +model: gemini-embedding-001 +apiKey: ${GOOGLE_API_KEY} +dimension: 768 ``` ### Step 2 - Embed Tool Parameters @@ -68,38 +68,39 @@ Use the defined embedding model, embed your query parameters using the `embeddedBy` field. Only string-typed parameters can be embedded: ```yaml -tools: - # Vector ingestion tool - insert_embedding: - kind: postgres-sql - source: my-pg-instance - statement: | - INSERT INTO documents (content, embedding) - VALUES ($1, $2); - parameters: - - name: content - type: string - description: The raw text content to be stored in the database. - - name: vector_string - type: string - # This parameter is hidden from the LLM. - # It automatically copies the value from 'content' and embeds it. - valueFromParam: content - embeddedBy: gemini-model - - # Semantic search tool - search_embedding: - kind: postgres-sql - source: my-pg-instance - statement: | - SELECT id, content, embedding <-> $1 AS distance - FROM documents - ORDER BY distance LIMIT 1 - parameters: - - name: semantic_search_string - type: string - description: The search query that will be converted to a vector. - embeddedBy: gemini-model # refers to the name of a defined embedding model +# Vector ingestion tool +kind: tools +name: insert_embedding +type: postgres-sql +source: my-pg-instance +statement: | + INSERT INTO documents (content, embedding) + VALUES ($1, $2); +parameters: + - name: content + type: string + description: The raw text content to be stored in the database. + - name: vector_string + type: string + # This parameter is hidden from the LLM. + # It automatically copies the value from 'content' and embeds it. + valueFromParam: content + embeddedBy: gemini-model +--- +# Semantic search tool +kind: tools +name: search_embedding +type: postgres-sql +source: my-pg-instance +statement: | + SELECT id, content, embedding <-> $1 AS distance + FROM documents + ORDER BY distance LIMIT 1 +parameters: + - name: semantic_search_string + type: string + description: The search query that will be converted to a vector. + embeddedBy: gemini-model # refers to the name of a defined embedding model ``` ## Kinds of Embedding Models diff --git a/docs/en/resources/embeddingModels/gemini.md b/docs/en/resources/embeddingModels/gemini.md index 8fcf8cf358..7555c91f85 100644 --- a/docs/en/resources/embeddingModels/gemini.md +++ b/docs/en/resources/embeddingModels/gemini.md @@ -50,12 +50,12 @@ information. ## Example ```yaml -embeddingModels: - gemini-model: - kind: gemini - model: gemini-embedding-001 - apiKey: ${GOOGLE_API_KEY} - dimension: 768 +kind: embeddingModels +name: gemini-model +type: gemini +model: gemini-embedding-001 +apiKey: ${GOOGLE_API_KEY} +dimension: 768 ``` {{< notice tip >}} @@ -67,7 +67,7 @@ instead of hardcoding your secrets into the configuration file. | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|--------------------------------------------------------------| -| kind | string | true | Must be `gemini`. | +| type | string | true | Must be `gemini`. | | model | string | true | The Gemini model ID to use (e.g., `gemini-embedding-001`). | | apiKey | string | false | Your API Key from Google AI Studio. | | dimension | integer | false | The number of dimensions in the output vector (e.g., `768`). | diff --git a/docs/en/resources/prompts/_index.md b/docs/en/resources/prompts/_index.md index 5227ba8470..b62f8d17ae 100644 --- a/docs/en/resources/prompts/_index.md +++ b/docs/en/resources/prompts/_index.md @@ -16,14 +16,14 @@ can be sent to a Large Language Model (LLM). The Toolbox server implements the specification, allowing clients to discover and retrieve these prompts. ```yaml -prompts: - code_review: - description: "Asks the LLM to analyze code quality and suggest improvements." - messages: - - content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}" - arguments: - - name: "code" - description: "The code to review" +kind: prompts +name: code_review +description: "Asks the LLM to analyze code quality and suggest improvements." +messages: + - content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}" +arguments: + - name: "code" + description: "The code to review" ``` ## Prompt Schema @@ -31,7 +31,7 @@ prompts: | **field** | **type** | **required** | **description** | |-------------|--------------------------------|--------------|--------------------------------------------------------------------------| | description | string | No | A brief explanation of what the prompt does. | -| kind | string | No | The kind of prompt. Defaults to `"custom"`. | +| type | string | No | The type of prompt. Defaults to `"custom"`. | | messages | [][Message](#message-schema) | Yes | A list of one or more message objects that make up the prompt's content. | | arguments | [][Argument](#argument-schema) | No | A list of arguments that can be interpolated into the prompt's content. | diff --git a/docs/en/resources/prompts/custom/_index.md b/docs/en/resources/prompts/custom/_index.md index 01eedafdbf..01a8db4ca9 100644 --- a/docs/en/resources/prompts/custom/_index.md +++ b/docs/en/resources/prompts/custom/_index.md @@ -17,14 +17,14 @@ Here is an example of a simple prompt that takes a single argument, code, and asks an LLM to review it. ```yaml -prompts: - code_review: - description: "Asks the LLM to analyze code quality and suggest improvements." - messages: - - content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}" - arguments: - - name: "code" - description: "The code to review" +kind: prompts +name: code_review +description: "Asks the LLM to analyze code quality and suggest improvements." +messages: + - content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}" +arguments: + - name: "code" + description: "The code to review" ``` ### Multi-message prompt @@ -33,19 +33,19 @@ You can define prompts with multiple messages to set up more complex conversational contexts, like a role-playing scenario. ```yaml -prompts: - roleplay_scenario: - description: "Sets up a roleplaying scenario with initial messages." - arguments: - - name: "character" - description: "The character the AI should embody." - - name: "situation" - description: "The initial situation for the roleplay." - messages: - - role: "user" - content: "Let's roleplay. You are {{.character}}. The situation is: {{.situation}}" - - role: "assistant" - content: "Okay, I understand. I am ready. What happens next?" +kind: prompts +name: roleplay_scenario +description: "Sets up a roleplaying scenario with initial messages." +arguments: + - name: "character" + description: "The character the AI should embody." + - name: "situation" + description: "The initial situation for the roleplay." +messages: + - role: "user" + content: "Let's roleplay. You are {{.character}}. The situation is: {{.situation}}" + - role: "assistant" + content: "Okay, I understand. I am ready. What happens next?" ``` ## Reference @@ -54,7 +54,7 @@ prompts: | **field** | **type** | **required** | **description** | |-------------|--------------------------------|--------------|--------------------------------------------------------------------------| -| kind | string | No | The kind of prompt. Must be `"custom"`. | +| type | string | No | The type of prompt. Must be `"custom"`. | | description | string | No | A brief explanation of what the prompt does. | | messages | [][Message](#message-schema) | Yes | A list of one or more message objects that make up the prompt's content. | | arguments | [][Argument](#argument-schema) | No | A list of arguments that can be interpolated into the prompt's content. | diff --git a/docs/en/resources/sources/_index.md b/docs/en/resources/sources/_index.md index a5f916726f..21d281f6a3 100644 --- a/docs/en/resources/sources/_index.md +++ b/docs/en/resources/sources/_index.md @@ -17,15 +17,15 @@ instead of hardcoding your secrets into the configuration file. {{< /notice >}} ```yaml -sources: - my-cloud-sql-source: - kind: cloud-sql-postgres - project: my-project-id - region: us-central1 - instance: my-instance-name - database: my_db - user: ${USER_NAME} - password: ${PASSWORD} +kind: sources +name: my-cloud-sql-source +type: cloud-sql-postgres +project: my-project-id +region: us-central1 +instance: my-instance-name +database: my_db +user: ${USER_NAME} +password: ${PASSWORD} ``` In implementation, each source is a different connection pool or client that used diff --git a/docs/en/resources/sources/alloydb-admin.md b/docs/en/resources/sources/alloydb-admin.md index cddff47533..0584994ab1 100644 --- a/docs/en/resources/sources/alloydb-admin.md +++ b/docs/en/resources/sources/alloydb-admin.md @@ -25,19 +25,20 @@ Authentication can be handled in two ways: ## Example ```yaml -sources: - my-alloydb-admin: - kind: alloy-admin - - my-oauth-alloydb-admin: - kind: alloydb-admin - useClientOAuth: true +kind: sources +name: my-alloydb-admin +type: alloydb-admin +--- +kind: sources +name: my-oauth-alloydb-admin +type: alloydb-admin +useClientOAuth: true ``` ## Reference | **field** | **type** | **required** | **description** | | -------------- | :------: | :----------: | ---------------------------------------------------------------------------------------------------------------------------------------------- | -| kind | string | true | Must be "alloydb-admin". | +| type | string | true | Must be "alloydb-admin". | | defaultProject | string | false | The Google Cloud project ID to use for AlloyDB infrastructure tools. | | useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. | diff --git a/docs/en/resources/sources/alloydb-pg.md b/docs/en/resources/sources/alloydb-pg.md index 2fa502aaec..b7fe99c759 100644 --- a/docs/en/resources/sources/alloydb-pg.md +++ b/docs/en/resources/sources/alloydb-pg.md @@ -176,17 +176,17 @@ To connect using IAM authentication: ## Example ```yaml -sources: - my-alloydb-pg-source: - kind: alloydb-postgres - project: my-project-id - region: us-central1 - cluster: my-cluster - instance: my-instance - database: my_db - user: ${USER_NAME} - password: ${PASSWORD} - # ipType: "public" +kind: sources +name: my-alloydb-pg-source +type: alloydb-postgres +project: my-project-id +region: us-central1 +cluster: my-cluster +instance: my-instance +database: my_db +user: ${USER_NAME} +password: ${PASSWORD} +# ipType: "public" ``` {{< notice tip >}} @@ -194,11 +194,20 @@ Use environment variable replacement with the format ${ENV_NAME} instead of hardcoding your secrets into the configuration file. {{< /notice >}} +### Managed Connection Pooling + +Toolbox automatically supports [Managed Connection Pooling][alloydb-mcp]. If your AlloyDB instance has Managed Connection Pooling enabled, the connection will immediately benefit from increased throughput and reduced latency. + +The interface is identical, so there's no additional configuration required on the client. For more information on configuring your instance, see the [AlloyDB Managed Connection Pooling documentation][alloydb-mcp-docs]. + +[alloydb-mcp]: https://cloud.google.com/blog/products/databases/alloydb-managed-connection-pooling +[alloydb-mcp-docs]: https://cloud.google.com/alloydb/docs/configure-managed-connection-pooling + ## Reference | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "alloydb-postgres". | +| type | string | true | Must be "alloydb-postgres". | | project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). | | region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). | | cluster | string | true | Name of the AlloyDB cluster (e.g. "my-cluster"). | diff --git a/docs/en/resources/sources/bigquery.md b/docs/en/resources/sources/bigquery.md index 8cb05ccbb8..b898f9f383 100644 --- a/docs/en/resources/sources/bigquery.md +++ b/docs/en/resources/sources/bigquery.md @@ -121,47 +121,47 @@ identity used has been granted the correct IAM permissions. Initialize a BigQuery source that uses ADC: ```yaml -sources: - my-bigquery-source: - kind: "bigquery" - project: "my-project-id" - # location: "US" # Optional: Specifies the location for query jobs. - # writeMode: "allowed" # One of: allowed, blocked, protected. Defaults to "allowed". - # allowedDatasets: # Optional: Restricts tool access to a specific list of datasets. - # - "my_dataset_1" - # - "other_project.my_dataset_2" - # impersonateServiceAccount: "service-account@project-id.iam.gserviceaccount.com" # Optional: Service account to impersonate - # scopes: # Optional: List of OAuth scopes to request. - # - "https://www.googleapis.com/auth/bigquery" - # - "https://www.googleapis.com/auth/drive.readonly" - # maxQueryResultRows: 50 # Optional: Limits the number of rows returned by queries. Defaults to 50. +kind: sources +name: my-bigquery-source +type: "bigquery" +project: "my-project-id" +# location: "US" # Optional: Specifies the location for query jobs. +# writeMode: "allowed" # One of: allowed, blocked, protected. Defaults to "allowed". +# allowedDatasets: # Optional: Restricts tool access to a specific list of datasets. +# - "my_dataset_1" +# - "other_project.my_dataset_2" +# impersonateServiceAccount: "service-account@project-id.iam.gserviceaccount.com" # Optional: Service account to impersonate +# scopes: # Optional: List of OAuth scopes to request. +# - "https://www.googleapis.com/auth/bigquery" +# - "https://www.googleapis.com/auth/drive.readonly" +# maxQueryResultRows: 50 # Optional: Limits the number of rows returned by queries. Defaults to 50. ``` Initialize a BigQuery source that uses the client's access token: ```yaml -sources: - my-bigquery-client-auth-source: - kind: "bigquery" - project: "my-project-id" - useClientOAuth: true - # location: "US" # Optional: Specifies the location for query jobs. - # writeMode: "allowed" # One of: allowed, blocked, protected. Defaults to "allowed". - # allowedDatasets: # Optional: Restricts tool access to a specific list of datasets. - # - "my_dataset_1" - # - "other_project.my_dataset_2" - # impersonateServiceAccount: "service-account@project-id.iam.gserviceaccount.com" # Optional: Service account to impersonate - # scopes: # Optional: List of OAuth scopes to request. - # - "https://www.googleapis.com/auth/bigquery" - # - "https://www.googleapis.com/auth/drive.readonly" - # maxQueryResultRows: 50 # Optional: Limits the number of rows returned by queries. Defaults to 50. +kind: sources +name: my-bigquery-client-auth-source +type: "bigquery" +project: "my-project-id" +useClientOAuth: true +# location: "US" # Optional: Specifies the location for query jobs. +# writeMode: "allowed" # One of: allowed, blocked, protected. Defaults to "allowed". +# allowedDatasets: # Optional: Restricts tool access to a specific list of datasets. +# - "my_dataset_1" +# - "other_project.my_dataset_2" +# impersonateServiceAccount: "service-account@project-id.iam.gserviceaccount.com" # Optional: Service account to impersonate +# scopes: # Optional: List of OAuth scopes to request. +# - "https://www.googleapis.com/auth/bigquery" +# - "https://www.googleapis.com/auth/drive.readonly" +# maxQueryResultRows: 50 # Optional: Limits the number of rows returned by queries. Defaults to 50. ``` ## Reference | **field** | **type** | **required** | **description** | |---------------------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "bigquery". | +| type | string | true | Must be "bigquery". | | project | string | true | Id of the Google Cloud project to use for billing and as the default project for BigQuery resources. | | location | string | false | Specifies the location (e.g., 'us', 'asia-northeast1') in which to run the query job. This location must match the location of any tables referenced in the query. Defaults to the table's location or 'US' if the location cannot be determined. [Learn More](https://cloud.google.com/bigquery/docs/locations) | | writeMode | string | false | Controls the write behavior for tools. `allowed` (default): All queries are permitted. `blocked`: Only `SELECT` statements are allowed for the `bigquery-execute-sql` tool. `protected`: Enables session-based execution where all tools associated with this source instance share the same [BigQuery session](https://cloud.google.com/bigquery/docs/sessions-intro). This allows for stateful operations using temporary tables (e.g., `CREATE TEMP TABLE`). For `bigquery-execute-sql`, `SELECT` statements can be used on all tables, but write operations are restricted to the session's temporary dataset. For tools like `bigquery-sql`, `bigquery-forecast`, and `bigquery-analyze-contribution`, the `writeMode` restrictions do not apply, but they will operate within the shared session. **Note:** The `protected` mode cannot be used with `useClientOAuth: true`. It is also not recommended for multi-user server environments, as all users would share the same session. A session is terminated automatically after 24 hours of inactivity or after 7 days, whichever comes first. A new session is created on the next request, and any temporary data from the previous session will be lost. | diff --git a/docs/en/resources/sources/bigtable.md b/docs/en/resources/sources/bigtable.md index e05653ac05..7e4207e9c5 100644 --- a/docs/en/resources/sources/bigtable.md +++ b/docs/en/resources/sources/bigtable.md @@ -59,17 +59,17 @@ applying IAM permissions and roles to an identity. ## Example ```yaml -sources: - my-bigtable-source: - kind: "bigtable" - project: "my-project-id" - instance: "test-instance" +kind: sources +name: my-bigtable-source +type: "bigtable" +project: "my-project-id" +instance: "test-instance" ``` ## Reference | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|-------------------------------------------------------------------------------| -| kind | string | true | Must be "bigtable". | +| type | string | true | Must be "bigtable". | | project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). | | instance | string | true | Name of the Bigtable instance. | diff --git a/docs/en/resources/sources/cassandra.md b/docs/en/resources/sources/cassandra.md index 6a149718b4..1009a2bce0 100644 --- a/docs/en/resources/sources/cassandra.md +++ b/docs/en/resources/sources/cassandra.md @@ -23,19 +23,19 @@ distributed architectures, and a flexible approach to schema definition. ## Example ```yaml -sources: - my-cassandra-source: - kind: cassandra - hosts: - - 127.0.0.1 - keyspace: my_keyspace - protoVersion: 4 - username: ${USER_NAME} - password: ${PASSWORD} - caPath: /path/to/ca.crt # Optional: path to CA certificate - certPath: /path/to/client.crt # Optional: path to client certificate - keyPath: /path/to/client.key # Optional: path to client key - enableHostVerification: true # Optional: enable host verification +kind: sources +name: my-cassandra-source +type: cassandra +hosts: + - 127.0.0.1 +keyspace: my_keyspace +protoVersion: 4 +username: ${USER_NAME} +password: ${PASSWORD} +caPath: /path/to/ca.crt # Optional: path to CA certificate +certPath: /path/to/client.crt # Optional: path to client certificate +keyPath: /path/to/client.key # Optional: path to client key +enableHostVerification: true # Optional: enable host verification ``` {{< notice tip >}} @@ -47,7 +47,7 @@ instead of hardcoding your secrets into the configuration file. | **field** | **type** | **required** | **description** | |------------------------|:--------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "cassandra". | +| type | string | true | Must be "cassandra". | | hosts | string[] | true | List of IP addresses to connect to (e.g., ["192.168.1.1:9042", "192.168.1.2:9042","192.168.1.3:9042"]). The default port is 9042 if not specified. | | keyspace | string | true | Name of the Cassandra keyspace to connect to (e.g., "my_keyspace"). | | protoVersion | integer | false | Protocol version for the Cassandra connection (e.g., 4). | diff --git a/docs/en/resources/sources/clickhouse.md b/docs/en/resources/sources/clickhouse.md index 2442899ce5..378e52b22c 100644 --- a/docs/en/resources/sources/clickhouse.md +++ b/docs/en/resources/sources/clickhouse.md @@ -46,31 +46,31 @@ ClickHouse supports multiple protocols: ### Secure Connection Example ```yaml -sources: - secure-clickhouse-source: - kind: clickhouse - host: clickhouse.example.com - port: "8443" - database: analytics - user: ${CLICKHOUSE_USER} - password: ${CLICKHOUSE_PASSWORD} - protocol: https - secure: true +kind: sources +name: secure-clickhouse-source +type: clickhouse +host: clickhouse.example.com +port: "8443" +database: analytics +user: ${CLICKHOUSE_USER} +password: ${CLICKHOUSE_PASSWORD} +protocol: https +secure: true ``` ### HTTP Protocol Example ```yaml -sources: - http-clickhouse-source: - kind: clickhouse - host: localhost - port: "8123" - database: logs - user: ${CLICKHOUSE_USER} - password: ${CLICKHOUSE_PASSWORD} - protocol: http - secure: false +kind: sources +name: http-clickhouse-source +type: clickhouse +host: localhost +port: "8123" +database: logs +user: ${CLICKHOUSE_USER} +password: ${CLICKHOUSE_PASSWORD} +protocol: http +secure: false ``` {{< notice tip >}} @@ -82,7 +82,7 @@ instead of hardcoding your secrets into the configuration file. | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|-------------------------------------------------------------------------------------| -| kind | string | true | Must be "clickhouse". | +| type | string | true | Must be "clickhouse". | | host | string | true | IP address or hostname to connect to (e.g. "127.0.0.1" or "clickhouse.example.com") | | port | string | true | Port to connect to (e.g. "8443" for HTTPS, "8123" for HTTP) | | database | string | true | Name of the ClickHouse database to connect to (e.g. "my_database"). | diff --git a/docs/en/resources/sources/cloud-gda.md b/docs/en/resources/sources/cloud-gda.md index dc400f17e8..5d65df3cb7 100644 --- a/docs/en/resources/sources/cloud-gda.md +++ b/docs/en/resources/sources/cloud-gda.md @@ -20,21 +20,22 @@ Authentication can be handled in two ways: ## Example ```yaml -sources: - my-gda-source: - kind: cloud-gemini-data-analytics - projectId: my-project-id - - my-oauth-gda-source: - kind: cloud-gemini-data-analytics - projectId: my-project-id - useClientOAuth: true +kind: sources +name: my-gda-source +type: cloud-gemini-data-analytics +projectId: my-project-id +--- +kind: sources +name: my-oauth-gda-source +type: cloud-gemini-data-analytics +projectId: my-project-id +useClientOAuth: true ``` ## Reference | **field** | **type** | **required** | **description** | | -------------- | :------: | :----------: | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| kind | string | true | Must be "cloud-gemini-data-analytics". | +| type | string | true | Must be "cloud-gemini-data-analytics". | | projectId | string | true | The Google Cloud Project ID where the API is enabled. | | useClientOAuth | boolean | false | If true, the source uses the token provided by the caller (forwarded to the API). Otherwise, it uses server-side Application Default Credentials (ADC). Defaults to `false`. | diff --git a/docs/en/resources/sources/cloud-healthcare.md b/docs/en/resources/sources/cloud-healthcare.md index 117079688a..b1bf3d6c35 100644 --- a/docs/en/resources/sources/cloud-healthcare.md +++ b/docs/en/resources/sources/cloud-healthcare.md @@ -123,41 +123,41 @@ identity used has been granted the correct IAM permissions. Initialize a Cloud Healthcare API source that uses ADC: ```yaml -sources: - my-healthcare-source: - kind: "cloud-healthcare" - project: "my-project-id" - region: "us-central1" - dataset: "my-healthcare-dataset-id" - # allowedFhirStores: # Optional: Restricts tool access to a specific list of FHIR store IDs. - # - "my_fhir_store_1" - # allowedDicomStores: # Optional: Restricts tool access to a specific list of DICOM store IDs. - # - "my_dicom_store_1" - # - "my_dicom_store_2" +kind: sources +name: my-healthcare-source +type: "cloud-healthcare" +project: "my-project-id" +region: "us-central1" +dataset: "my-healthcare-dataset-id" +# allowedFhirStores: # Optional: Restricts tool access to a specific list of FHIR store IDs. +# - "my_fhir_store_1" +# allowedDicomStores: # Optional: Restricts tool access to a specific list of DICOM store IDs. +# - "my_dicom_store_1" +# - "my_dicom_store_2" ``` Initialize a Cloud Healthcare API source that uses the client's access token: ```yaml -sources: - my-healthcare-client-auth-source: - kind: "cloud-healthcare" - project: "my-project-id" - region: "us-central1" - dataset: "my-healthcare-dataset-id" - useClientOAuth: true - # allowedFhirStores: # Optional: Restricts tool access to a specific list of FHIR store IDs. - # - "my_fhir_store_1" - # allowedDicomStores: # Optional: Restricts tool access to a specific list of DICOM store IDs. - # - "my_dicom_store_1" - # - "my_dicom_store_2" +kind: sources +name: my-healthcare-client-auth-source +type: "cloud-healthcare" +project: "my-project-id" +region: "us-central1" +dataset: "my-healthcare-dataset-id" +useClientOAuth: true +# allowedFhirStores: # Optional: Restricts tool access to a specific list of FHIR store IDs. +# - "my_fhir_store_1" +# allowedDicomStores: # Optional: Restricts tool access to a specific list of DICOM store IDs. +# - "my_dicom_store_1" +# - "my_dicom_store_2" ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:--------:|:------------:|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "cloud-healthcare". | +| type | string | true | Must be "cloud-healthcare". | | project | string | true | ID of the GCP project that the dataset lives in. | | region | string | true | Specifies the region (e.g., 'us', 'asia-northeast1') of the healthcare dataset. [Learn More](https://cloud.google.com/healthcare-api/docs/regions) | | dataset | string | true | ID of the healthcare dataset. | diff --git a/docs/en/resources/sources/cloud-logging-admin.md b/docs/en/resources/sources/cloud-logging-admin.md new file mode 100644 index 0000000000..199352ef6e --- /dev/null +++ b/docs/en/resources/sources/cloud-logging-admin.md @@ -0,0 +1,71 @@ +--- +title: "Cloud Logging Admin" +type: docs +weight: 1 +description: > + The Cloud Logging Admin source enables tools to interact with the Cloud Logging API, allowing for the retrieval of log names, monitored resource types, and the querying of log data. +--- + +## About + +The Cloud Logging Admin source provides a client to interact with the [Google +Cloud Logging API](https://cloud.google.com/logging/docs). This allows tools to list log names, monitored resource types, and query log entries. + +Authentication can be handled in two ways: + +1. **Application Default Credentials (ADC):** By default, the source uses ADC + to authenticate with the API. +2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will + expect an OAuth 2.0 access token to be provided by the client (e.g., a web + browser) for each request. + +## Available Tools + +- [`cloud-logging-admin-list-log-names`](../tools/cloudloggingadmin/cloud-logging-admin-list-log-names.md) + Lists the log names in the project. + +- [`cloud-logging-admin-list-resource-types`](../tools/cloudloggingadmin/cloud-logging-admin-list-resource-types.md) + Lists the monitored resource types. + +- [`cloud-logging-admin-query-logs`](../tools/cloudloggingadmin/cloud-logging-admin-query-logs.md) + Queries log entries. + +## Example + +Initialize a Cloud Logging Admin source that uses ADC: + +```yaml +kind: sources +name: my-cloud-logging +type: cloud-logging-admin +project: my-project-id +``` + +Initialize a Cloud Logging Admin source that uses client-side OAuth: + +```yaml +kind: sources +name: my-oauth-cloud-logging +type: cloud-logging-admin +project: my-project-id +useClientOAuth: true +``` + +Initialize a Cloud Logging Admin source that uses service account impersonation: + +```yaml +kind: sources +name: my-impersonated-cloud-logging +type: cloud-logging-admin +project: my-project-id +impersonateServiceAccount: "my-service-account@my-project.iam.gserviceaccount.com" +``` + +## Reference + +| **field** | **type** | **required** | **description** | +|-----------------------------|:--------:|:------------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| type | string | true | Must be "cloud-logging-admin". | +| project | string | true | ID of the GCP project. | +| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. Cannot be used with `impersonateServiceAccount`. | +| impersonateServiceAccount | string | false | The service account to impersonate for API calls. Cannot be used with `useClientOAuth`. | diff --git a/docs/en/resources/sources/cloud-monitoring.md b/docs/en/resources/sources/cloud-monitoring.md index 84b700721f..dc4847549a 100644 --- a/docs/en/resources/sources/cloud-monitoring.md +++ b/docs/en/resources/sources/cloud-monitoring.md @@ -25,18 +25,19 @@ Authentication can be handled in two ways: ## Example ```yaml -sources: - my-cloud-monitoring: - kind: cloud-monitoring - - my-oauth-cloud-monitoring: - kind: cloud-monitoring - useClientOAuth: true +kind: sources +name: my-cloud-monitoring +type: cloud-monitoring +--- +kind: sources +name: my-oauth-cloud-monitoring +type: cloud-monitoring +useClientOAuth: true ``` ## Reference | **field** | **type** | **required** | **description** | |----------------|:--------:|:------------:|------------------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "cloud-monitoring". | +| type | string | true | Must be "cloud-monitoring". | | useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. | diff --git a/docs/en/resources/sources/cloud-sql-admin.md b/docs/en/resources/sources/cloud-sql-admin.md index e66448725e..51462e982d 100644 --- a/docs/en/resources/sources/cloud-sql-admin.md +++ b/docs/en/resources/sources/cloud-sql-admin.md @@ -24,19 +24,20 @@ Authentication can be handled in two ways: ## Example ```yaml -sources: - my-cloud-sql-admin: - kind: cloud-sql-admin - - my-oauth-cloud-sql-admin: - kind: cloud-sql-admin - useClientOAuth: true +kind: sources +name: my-cloud-sql-admin +type: cloud-sql-admin +--- +kind: sources +name: my-oauth-cloud-sql-admin +type: cloud-sql-admin +useClientOAuth: true ``` ## Reference | **field** | **type** | **required** | **description** | | -------------- | :------: | :----------: | ---------------------------------------------------------------------------------------------------------------------------------------------- | -| kind | string | true | Must be "cloud-sql-admin". | +| type | string | true | Must be "cloud-sql-admin". | | defaultProject | string | false | The Google Cloud project ID to use for Cloud SQL infrastructure tools. | | useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. | diff --git a/docs/en/resources/sources/cloud-sql-mssql.md b/docs/en/resources/sources/cloud-sql-mssql.md index 9477e44c93..ef3ce3cebe 100644 --- a/docs/en/resources/sources/cloud-sql-mssql.md +++ b/docs/en/resources/sources/cloud-sql-mssql.md @@ -87,16 +87,16 @@ Currently, this source only uses standard authentication. You will need to ## Example ```yaml -sources: - my-cloud-sql-mssql-instance: - kind: cloud-sql-mssql - project: my-project - region: my-region - instance: my-instance - database: my_db - user: ${USER_NAME} - password: ${PASSWORD} - # ipType: private +kind: sources +name: my-cloud-sql-mssql-instance +type: cloud-sql-mssql +project: my-project +region: my-region +instance: my-instance +database: my_db +user: ${USER_NAME} +password: ${PASSWORD} +# ipType: private ``` {{< notice tip >}} @@ -108,7 +108,7 @@ instead of hardcoding your secrets into the configuration file. | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "cloud-sql-mssql". | +| type | string | true | Must be "cloud-sql-mssql". | | project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). | | region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). | | instance | string | true | Name of the Cloud SQL instance within the cluster (e.g. "my-instance"). | diff --git a/docs/en/resources/sources/cloud-sql-mysql.md b/docs/en/resources/sources/cloud-sql-mysql.md index e9f89f22a9..ba0c1b1f26 100644 --- a/docs/en/resources/sources/cloud-sql-mysql.md +++ b/docs/en/resources/sources/cloud-sql-mysql.md @@ -128,16 +128,16 @@ To connect using IAM authentication: ## Example ```yaml -sources: - my-cloud-sql-mysql-source: - kind: cloud-sql-mysql - project: my-project-id - region: us-central1 - instance: my-instance - database: my_db - user: ${USER_NAME} - password: ${PASSWORD} - # ipType: "private" +kind: sources +name: my-cloud-sql-mysql-source +type: cloud-sql-mysql +project: my-project-id +region: us-central1 +instance: my-instance +database: my_db +user: ${USER_NAME} +password: ${PASSWORD} +# ipType: "private" ``` {{< notice tip >}} @@ -149,7 +149,7 @@ instead of hardcoding your secrets into the configuration file. | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "cloud-sql-mysql". | +| type | string | true | Must be "cloud-sql-mysql". | | project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). | | region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). | | instance | string | true | Name of the Cloud SQL instance within the cluster (e.g. "my-instance"). | diff --git a/docs/en/resources/sources/cloud-sql-pg.md b/docs/en/resources/sources/cloud-sql-pg.md index 2ecdcee6ac..182b54e914 100644 --- a/docs/en/resources/sources/cloud-sql-pg.md +++ b/docs/en/resources/sources/cloud-sql-pg.md @@ -178,16 +178,16 @@ To connect using IAM authentication: ## Example ```yaml -sources: - my-cloud-sql-pg-source: - kind: cloud-sql-postgres - project: my-project-id - region: us-central1 - instance: my-instance - database: my_db - user: ${USER_NAME} - password: ${PASSWORD} - # ipType: "private" +kind: sources +name: my-cloud-sql-pg-source +type: cloud-sql-postgres +project: my-project-id +region: us-central1 +instance: my-instance +database: my_db +user: ${USER_NAME} +password: ${PASSWORD} +# ipType: "private" ``` {{< notice tip >}} @@ -195,11 +195,20 @@ Use environment variable replacement with the format ${ENV_NAME} instead of hardcoding your secrets into the configuration file. {{< /notice >}} +### Managed Connection Pooling + +Toolbox automatically supports [Managed Connection Pooling][csql-mcp]. If your Cloud SQL for PostgreSQL instance has Managed Connection Pooling enabled, the connection will immediately benefit from increased throughput and reduced latency. + +The interface is identical, so there's no additional configuration required on the client. For more information on configuring your instance, see the [Cloud SQL Managed Connection Pooling documentation][csql-mcp-docs]. + +[csql-mcp]: https://docs.cloud.google.com/sql/docs/postgres/managed-connection-pooling +[csql-mcp-docs]: https://docs.cloud.google.com/sql/docs/postgres/configure-mcp + ## Reference | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "cloud-sql-postgres". | +| type | string | true | Must be "cloud-sql-postgres". | | project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). | | region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). | | instance | string | true | Name of the Cloud SQL instance within the cluster (e.g. "my-instance"). | diff --git a/docs/en/resources/sources/cockroachdb.md b/docs/en/resources/sources/cockroachdb.md new file mode 100644 index 0000000000..9ecf884ce5 --- /dev/null +++ b/docs/en/resources/sources/cockroachdb.md @@ -0,0 +1,242 @@ +--- +title: "CockroachDB" +type: docs +weight: 1 +description: > + CockroachDB is a distributed SQL database built for cloud applications. + +--- + +## About + +[CockroachDB][crdb-docs] is a distributed SQL database designed for cloud-native applications. It provides strong consistency, horizontal scalability, and built-in resilience with automatic failover and recovery. CockroachDB uses the PostgreSQL wire protocol, making it compatible with many PostgreSQL tools and drivers while providing unique features like multi-region deployments and distributed transactions. + +**Minimum Version:** CockroachDB v25.1 or later is recommended for full tool compatibility. + +[crdb-docs]: https://www.cockroachlabs.com/docs/ + +## Available Tools + +- [`cockroachdb-sql`](../tools/cockroachdb/cockroachdb-sql.md) + Execute SQL queries as prepared statements in CockroachDB (alias for execute-sql). + +- [`cockroachdb-execute-sql`](../tools/cockroachdb/cockroachdb-execute-sql.md) + Run parameterized SQL statements in CockroachDB. + +- [`cockroachdb-list-schemas`](../tools/cockroachdb/cockroachdb-list-schemas.md) + List schemas in a CockroachDB database. + +- [`cockroachdb-list-tables`](../tools/cockroachdb/cockroachdb-list-tables.md) + List tables in a CockroachDB database. + +## Requirements + +### Database User + +This source uses standard authentication. You will need to [create a CockroachDB user][crdb-users] to login to the database with. For CockroachDB Cloud deployments, SSL/TLS is required. + +[crdb-users]: https://www.cockroachlabs.com/docs/stable/create-user.html + +### SSL/TLS Configuration + +CockroachDB Cloud clusters require SSL/TLS connections. Use the `queryParams` section to configure SSL settings: + +- **For CockroachDB Cloud**: Use `sslmode: require` at minimum +- **For self-hosted with certificates**: Use `sslmode: verify-full` with certificate paths +- **For local development only**: Use `sslmode: disable` (not recommended for production) + +## Example + +```yaml +sources: + my_cockroachdb: + type: cockroachdb + host: your-cluster.cockroachlabs.cloud + port: "26257" + user: myuser + password: mypassword + database: defaultdb + maxRetries: 5 + retryBaseDelay: 500ms + queryParams: + sslmode: require + application_name: my-app + + # MCP Security Settings (recommended for production) + readOnlyMode: true # Read-only by default (MCP best practice) + enableWriteMode: false # Set to true to allow write operations + maxRowLimit: 1000 # Limit query results + queryTimeoutSec: 30 # Prevent long-running queries + enableTelemetry: true # Enable observability + telemetryVerbose: false # Set true for detailed logs + clusterID: "my-cluster" # Optional identifier + +tools: + list_expenses: + type: cockroachdb-sql + source: my_cockroachdb + description: List all expenses + statement: SELECT id, description, amount, category FROM expenses WHERE user_id = $1 + parameters: + - name: user_id + type: string + description: The user's ID + + describe_expenses: + type: cockroachdb-describe-table + source: my_cockroachdb + description: Describe the expenses table schema + + list_expenses_indexes: + type: cockroachdb-list-indexes + source: my_cockroachdb + description: List indexes on the expenses table +``` + +## Configuration Parameters + +### Required Parameters + +| Parameter | Type | Description | +|-----------|------|-------------| +| `type` | string | Must be `cockroachdb` | +| `host` | string | The hostname or IP address of the CockroachDB cluster | +| `port` | string | The port number (typically "26257") | +| `user` | string | The database user name | +| `database` | string | The database name to connect to | + +### Optional Parameters + +| Parameter | Type | Default | Description | +|-----------|------|---------|-------------| +| `password` | string | "" | The database password (can be empty for certificate-based auth) | +| `maxRetries` | integer | 5 | Maximum number of connection retry attempts | +| `retryBaseDelay` | string | "500ms" | Base delay between retry attempts (exponential backoff) | +| `queryParams` | map | {} | Additional connection parameters (e.g., SSL configuration) | + +### MCP Security Parameters + +CockroachDB integration includes security features following the [Model Context Protocol (MCP)](https://modelcontextprotocol.io/) specification: + +| Parameter | Type | Default | Description | +|-----------|------|---------|-------------| +| `readOnlyMode` | boolean | true | Enables read-only mode by default (MCP requirement) | +| `enableWriteMode` | boolean | false | Explicitly enable write operations (INSERT/UPDATE/DELETE/CREATE/DROP) | +| `maxRowLimit` | integer | 1000 | Maximum rows returned per SELECT query (auto-adds LIMIT clause) | +| `queryTimeoutSec` | integer | 30 | Query timeout in seconds to prevent long-running queries | +| `enableTelemetry` | boolean | true | Enable structured logging of tool invocations | +| `telemetryVerbose` | boolean | false | Enable detailed JSON telemetry output | +| `clusterID` | string | "" | Optional cluster identifier for telemetry | + +### Query Parameters + +Common query parameters for CockroachDB connections: + +| Parameter | Values | Description | +|-----------|--------|-------------| +| `sslmode` | `disable`, `require`, `verify-ca`, `verify-full` | SSL/TLS mode (CockroachDB Cloud requires `require` or higher) | +| `sslrootcert` | file path | Path to root certificate for SSL verification | +| `sslcert` | file path | Path to client certificate | +| `sslkey` | file path | Path to client key | +| `application_name` | string | Application name for connection tracking | + +## Best Practices + +### Security and MCP Compliance + +**Read-Only by Default**: The integration follows MCP best practices by defaulting to read-only mode. This prevents accidental data modifications: + +```yaml +sources: + my_cockroachdb: + readOnlyMode: true # Default behavior + enableWriteMode: false # Explicit write opt-in required +``` + +To enable write operations: + +```yaml +sources: + my_cockroachdb: + readOnlyMode: false # Disable read-only protection + enableWriteMode: true # Explicitly allow writes +``` + +**Query Limits**: Automatic row limits prevent excessive data retrieval: +- SELECT queries automatically get `LIMIT 1000` appended (configurable via `maxRowLimit`) +- Queries are terminated after 30 seconds (configurable via `queryTimeoutSec`) + +**Observability**: Structured telemetry provides visibility into tool usage: +- Tool invocations are logged with status, latency, and row counts +- SQL queries are redacted to protect sensitive values +- Set `telemetryVerbose: true` for detailed JSON logs + +### Use UUID Primary Keys + +CockroachDB performs best with UUID primary keys rather than sequential integers to avoid transaction hotspots: + +```sql +CREATE TABLE expenses ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + description TEXT, + amount DECIMAL(10,2) +); +``` + +### Automatic Transaction Retry + +This source uses the official `cockroach-go/v2` library which provides automatic transaction retry for serialization conflicts. For write operations requiring explicit transaction control, tools can use the `ExecuteTxWithRetry` method. + +### Multi-Region Deployments + +CockroachDB supports multi-region deployments with automatic data distribution. Configure your cluster's regions and survival goals separately from the Toolbox configuration. The source will connect to any node in the cluster. + +### Connection Pooling + +The source maintains a connection pool to the CockroachDB cluster. The pool automatically handles: +- Load balancing across cluster nodes +- Connection retry with exponential backoff +- Health checking of connections + +## Troubleshooting + +### SSL/TLS Errors + +If you encounter "server requires encryption" errors: + +1. For CockroachDB Cloud, ensure `sslmode` is set to `require` or higher: + ```yaml + queryParams: + sslmode: require + ``` + +2. For certificate verification, download your cluster's root certificate and configure: + ```yaml + queryParams: + sslmode: verify-full + sslrootcert: /path/to/ca.crt + ``` + +### Connection Timeouts + +If experiencing connection timeouts: + +1. Check network connectivity to the CockroachDB cluster +2. Verify firewall rules allow connections on port 26257 +3. For CockroachDB Cloud, ensure IP allowlisting is configured +4. Increase `maxRetries` or `retryBaseDelay` if needed + +### Transaction Retry Errors + +CockroachDB may encounter serializable transaction conflicts. The integration automatically handles these retries using the cockroach-go library. If you see retry-related errors, check: + +1. Database load and contention +2. Query patterns that might cause conflicts +3. Consider using `SELECT FOR UPDATE` for explicit locking + +## Additional Resources + +- [CockroachDB Documentation](https://www.cockroachlabs.com/docs/) +- [CockroachDB Best Practices](https://www.cockroachlabs.com/docs/stable/performance-best-practices-overview.html) +- [Multi-Region Capabilities](https://www.cockroachlabs.com/docs/stable/multiregion-overview.html) +- [Connection Parameters](https://www.cockroachlabs.com/docs/stable/connection-parameters.html) diff --git a/docs/en/resources/sources/couchbase.md b/docs/en/resources/sources/couchbase.md index b571d11cfe..18703f68b8 100644 --- a/docs/en/resources/sources/couchbase.md +++ b/docs/en/resources/sources/couchbase.md @@ -19,14 +19,14 @@ allowing tools to execute SQL queries against it. ## Example ```yaml -sources: - my-couchbase-instance: - kind: couchbase - connectionString: couchbase://localhost - bucket: travel-sample - scope: inventory - username: Administrator - password: password +kind: sources +name: my-couchbase-instance +type: couchbase +connectionString: couchbase://localhost +bucket: travel-sample +scope: inventory +username: Administrator +password: password ``` {{< notice note >}} @@ -38,7 +38,7 @@ Connections](https://docs.couchbase.com/java-sdk/current/howtos/managing-connect | **field** | **type** | **required** | **description** | |----------------------|:--------:|:------------:|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "couchbase". | +| type | string | true | Must be "couchbase". | | connectionString | string | true | Connection string for the Couchbase cluster. | | bucket | string | true | Name of the bucket to connect to. | | scope | string | true | Name of the scope within the bucket. | diff --git a/docs/en/resources/sources/dataplex.md b/docs/en/resources/sources/dataplex.md index 828ee5b698..52c44166f5 100644 --- a/docs/en/resources/sources/dataplex.md +++ b/docs/en/resources/sources/dataplex.md @@ -23,10 +23,10 @@ applying artificial intelligence and machine learning. ## Example ```yaml -sources: - my-dataplex-source: - kind: "dataplex" - project: "my-project-id" +kind: sources +name: my-dataplex-source +type: "dataplex" +project: "my-project-id" ``` ## Sample System Prompt @@ -355,5 +355,5 @@ This abbreviated syntax works for the qualified predicates except for `label` in | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|----------------------------------------------------------------------------------| -| kind | string | true | Must be "dataplex". | +| type | string | true | Must be "dataplex". | | project | string | true | ID of the GCP project used for quota and billing purposes (e.g. "my-project-id").| diff --git a/docs/en/resources/sources/dgraph.md b/docs/en/resources/sources/dgraph.md index 8ac82e03c4..e99721b73c 100644 --- a/docs/en/resources/sources/dgraph.md +++ b/docs/en/resources/sources/dgraph.md @@ -51,14 +51,14 @@ and user credentials for that namespace. ## Example ```yaml -sources: - my-dgraph-source: - kind: dgraph - dgraphUrl: https://xxxx.cloud.dgraph.io - user: ${USER_NAME} - password: ${PASSWORD} - apiKey: ${API_KEY} - namespace : 0 +kind: sources +name: my-dgraph-source +type: dgraph +dgraphUrl: https://xxxx.cloud.dgraph.io +user: ${USER_NAME} +password: ${PASSWORD} +apiKey: ${API_KEY} +namespace : 0 ``` {{< notice tip >}} @@ -70,7 +70,7 @@ instead of hardcoding your secrets into the configuration file. | **Field** | **Type** | **Required** | **Description** | |-------------|:--------:|:------------:|--------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "dgraph". | +| type | string | true | Must be "dgraph". | | dgraphUrl | string | true | Connection URI (e.g. "", ""). | | user | string | false | Name of the Dgraph user to connect as (e.g., "groot"). | | password | string | false | Password of the Dgraph user (e.g., "password"). | diff --git a/docs/en/resources/sources/elasticsearch.md b/docs/en/resources/sources/elasticsearch.md index d0451c16b1..57527e6800 100644 --- a/docs/en/resources/sources/elasticsearch.md +++ b/docs/en/resources/sources/elasticsearch.md @@ -59,18 +59,18 @@ applying permissions to an API key. ## Example ```yaml -sources: - my-elasticsearch-source: - kind: "elasticsearch" - addresses: - - "http://localhost:9200" - apikey: "my-api-key" +kind: sources +name: my-elasticsearch-source +type: "elasticsearch" +addresses: + - "http://localhost:9200" +apikey: "my-api-key" ``` ## Reference | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|--------------------------------------------| -| kind | string | true | Must be "elasticsearch". | +| type | string | true | Must be "elasticsearch". | | addresses | []string | true | List of Elasticsearch hosts to connect to. | | apikey | string | true | The API key to use for authentication. | diff --git a/docs/en/resources/sources/firebird.md b/docs/en/resources/sources/firebird.md index 1c3a8a7696..fe7a1003a1 100644 --- a/docs/en/resources/sources/firebird.md +++ b/docs/en/resources/sources/firebird.md @@ -36,14 +36,14 @@ user][fb-users] to login to the database with. ## Example ```yaml -sources: - my_firebird_db: - kind: firebird - host: "localhost" - port: 3050 - database: "/path/to/your/database.fdb" - user: ${FIREBIRD_USER} - password: ${FIREBIRD_PASS} +kind: sources +name: my_firebird_db +type: firebird +host: "localhost" +port: 3050 +database: "/path/to/your/database.fdb" +user: ${FIREBIRD_USER} +password: ${FIREBIRD_PASS} ``` {{< notice tip >}} @@ -55,7 +55,7 @@ instead of hardcoding your secrets into the configuration file. | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|------------------------------------------------------------------------------| -| kind | string | true | Must be "firebird". | +| type | string | true | Must be "firebird". | | host | string | true | IP address to connect to (e.g. "127.0.0.1") | | port | string | true | Port to connect to (e.g. "3050") | | database | string | true | Path to the Firebird database file (e.g. "/var/lib/firebird/data/test.fdb"). | diff --git a/docs/en/resources/sources/firestore.md b/docs/en/resources/sources/firestore.md index 5fe3c875d1..d15a44507d 100644 --- a/docs/en/resources/sources/firestore.md +++ b/docs/en/resources/sources/firestore.md @@ -61,17 +61,17 @@ database named `(default)` will be used. ## Example ```yaml -sources: - my-firestore-source: - kind: "firestore" - project: "my-project-id" - # database: "my-database" # Optional, defaults to "(default)" +kind: sources +name: my-firestore-source +type: "firestore" +project: "my-project-id" +# database: "my-database" # Optional, defaults to "(default)" ``` ## Reference | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|----------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "firestore". | +| type | string | true | Must be "firestore". | | project | string | true | Id of the GCP project that contains the Firestore database (e.g. "my-project-id"). | | database | string | false | Name of the Firestore database to connect to. Defaults to "(default)" if not specified. | diff --git a/docs/en/resources/sources/http.md b/docs/en/resources/sources/http.md index edb25002f8..cd840318b8 100644 --- a/docs/en/resources/sources/http.md +++ b/docs/en/resources/sources/http.md @@ -21,18 +21,18 @@ and other HTTP-accessible resources. ## Example ```yaml -sources: - my-http-source: - kind: http - baseUrl: https://api.example.com/data - timeout: 10s # default to 30s - headers: - Authorization: Bearer ${API_KEY} - Content-Type: application/json - queryParams: - param1: value1 - param2: value2 - # disableSslVerification: false +kind: sources +name: my-http-source +type: http +baseUrl: https://api.example.com/data +timeout: 10s # default to 30s +headers: + Authorization: Bearer ${API_KEY} + Content-Type: application/json +queryParams: + param1: value1 + param2: value2 +# disableSslVerification: false ``` {{< notice tip >}} @@ -44,7 +44,7 @@ instead of hardcoding your secrets into the configuration file. | **field** | **type** | **required** | **description** | |------------------------|:-----------------:|:------------:|------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "http". | +| type | string | true | Must be "http". | | baseUrl | string | true | The base URL for the HTTP requests (e.g., `https://api.example.com`). | | timeout | string | false | The timeout for HTTP requests (e.g., "5s", "1m", refer to [ParseDuration][parse-duration-doc] for more examples). Defaults to 30s. | | headers | map[string]string | false | Default headers to include in the HTTP requests. | diff --git a/docs/en/resources/sources/looker.md b/docs/en/resources/sources/looker.md index 75bebf37ea..d731f05138 100644 --- a/docs/en/resources/sources/looker.md +++ b/docs/en/resources/sources/looker.md @@ -56,16 +56,16 @@ To initialize the application default credential run `gcloud auth login ## Example ```yaml -sources: - my-looker-source: - kind: looker - base_url: http://looker.example.com - client_id: ${LOOKER_CLIENT_ID} - client_secret: ${LOOKER_CLIENT_SECRET} - project: ${LOOKER_PROJECT} - location: ${LOOKER_LOCATION} - verify_ssl: true - timeout: 600s +kind: sources +name: my-looker-source +type: looker +base_url: http://looker.example.com +client_id: ${LOOKER_CLIENT_ID} +client_secret: ${LOOKER_CLIENT_SECRET} +project: ${LOOKER_PROJECT} +location: ${LOOKER_LOCATION} +verify_ssl: true +timeout: 600s ``` The Looker base url will look like "https://looker.example.com", don't include @@ -93,7 +93,7 @@ instead of hardcoding your secrets into the configuration file. | **field** | **type** | **required** | **description** | |----------------------|:--------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "looker". | +| type | string | true | Must be "looker". | | base_url | string | true | The URL of your Looker server with no trailing /. | | client_id | string | false | The client id assigned by Looker. | | client_secret | string | false | The client secret assigned by Looker. | diff --git a/docs/en/resources/sources/mariadb.md b/docs/en/resources/sources/mariadb.md index c956274fde..67854f4a3d 100644 --- a/docs/en/resources/sources/mariadb.md +++ b/docs/en/resources/sources/mariadb.md @@ -45,18 +45,18 @@ MariaDB user][mariadb-users] to log in to the database. ## Example ```yaml -sources: - my_mariadb_db: - kind: mysql - host: 127.0.0.1 - port: 3306 - database: my_db - user: ${MARIADB_USER} - password: ${MARIADB_PASS} - # Optional TLS and other driver parameters. For example, enable preferred TLS: - # queryParams: - # tls: preferred - queryTimeout: 30s # Optional: query timeout duration +kind: sources +name: my_mariadb_db +type: mysql +host: 127.0.0.1 +port: 3306 +database: my_db +user: ${MARIADB_USER} +password: ${MARIADB_PASS} +# Optional TLS and other driver parameters. For example, enable preferred TLS: +# queryParams: +# tls: preferred +queryTimeout: 30s # Optional: query timeout duration ``` {{< notice tip >}} @@ -68,7 +68,7 @@ Use environment variables instead of committing credentials to source files. | **field** | **type** | **required** | **description** | | ------------ | :------: | :----------: | ----------------------------------------------------------------------------------------------- | -| kind | string | true | Must be `mysql`. | +| type | string | true | Must be `mysql`. | | host | string | true | IP address to connect to (e.g. "127.0.0.1"). | | port | string | true | Port to connect to (e.g. "3307"). | | database | string | true | Name of the MariaDB database to connect to (e.g. "my_db"). | diff --git a/docs/en/resources/sources/mindsdb.md b/docs/en/resources/sources/mindsdb.md index 5ea8f4a147..6ec7e57021 100644 --- a/docs/en/resources/sources/mindsdb.md +++ b/docs/en/resources/sources/mindsdb.md @@ -125,15 +125,15 @@ can omit the password field. ## Example ```yaml -sources: - my-mindsdb-source: - kind: mindsdb - host: 127.0.0.1 - port: 3306 - database: my_db - user: ${USER_NAME} - password: ${PASSWORD} # Optional: omit if MindsDB is configured without authentication - queryTimeout: 30s # Optional: query timeout duration +kind: sources +name: my-mindsdb-source +type: mindsdb +host: 127.0.0.1 +port: 3306 +database: my_db +user: ${USER_NAME} +password: ${PASSWORD} # Optional: omit if MindsDB is configured without authentication +queryTimeout: 30s # Optional: query timeout duration ``` ### Working Configuration Example @@ -141,13 +141,13 @@ sources: Here's a working configuration that has been tested: ```yaml -sources: - my-pg-source: - kind: mindsdb - host: 127.0.0.1 - port: 47335 - database: files - user: mindsdb +kind: sources +name: my-pg-source +type: mindsdb +host: 127.0.0.1 +port: 47335 +database: files +user: mindsdb ``` {{< notice tip >}} @@ -176,7 +176,7 @@ With MindsDB integration, you can: | **field** | **type** | **required** | **description** | |--------------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "mindsdb". | +| type | string | true | Must be "mindsdb". | | host | string | true | IP address to connect to (e.g. "127.0.0.1"). | | port | string | true | Port to connect to (e.g. "3306"). | | database | string | true | Name of the MindsDB database to connect to (e.g. "my_db"). | diff --git a/docs/en/resources/sources/mongodb.md b/docs/en/resources/sources/mongodb.md index 91fbe23957..50e3a63dce 100644 --- a/docs/en/resources/sources/mongodb.md +++ b/docs/en/resources/sources/mongodb.md @@ -17,10 +17,10 @@ flexible, JSON-like documents, making it easy to develop and scale applications. ## Example ```yaml -sources: - my-mongodb: - kind: mongodb - uri: "mongodb+srv://username:password@host.mongodb.net" +kind: sources +name: my-mongodb +type: mongodb +uri: "mongodb+srv://username:password@host.mongodb.net" ``` @@ -28,5 +28,5 @@ sources: | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|-------------------------------------------------------------------| -| kind | string | true | Must be "mongodb". | +| type | string | true | Must be "mongodb". | | uri | string | true | connection string to connect to MongoDB | diff --git a/docs/en/resources/sources/mssql.md b/docs/en/resources/sources/mssql.md index 424d781c33..e1b36228ad 100644 --- a/docs/en/resources/sources/mssql.md +++ b/docs/en/resources/sources/mssql.md @@ -39,15 +39,15 @@ SQL Server user][mssql-users] to login to the database with. ## Example ```yaml -sources: - my-mssql-source: - kind: mssql - host: 127.0.0.1 - port: 1433 - database: my_db - user: ${USER_NAME} - password: ${PASSWORD} - # encrypt: strict +kind: sources +name: my-mssql-source +type: mssql +host: 127.0.0.1 +port: 1433 +database: my_db +user: ${USER_NAME} +password: ${PASSWORD} +# encrypt: strict ``` {{< notice tip >}} @@ -59,7 +59,7 @@ instead of hardcoding your secrets into the configuration file. | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "mssql". | +| type | string | true | Must be "mssql". | | host | string | true | IP address to connect to (e.g. "127.0.0.1"). | | port | string | true | Port to connect to (e.g. "1433"). | | database | string | true | Name of the SQL Server database to connect to (e.g. "my_db"). | diff --git a/docs/en/resources/sources/mysql.md b/docs/en/resources/sources/mysql.md index 95f2b96d7c..ee294bc8ff 100644 --- a/docs/en/resources/sources/mysql.md +++ b/docs/en/resources/sources/mysql.md @@ -49,18 +49,18 @@ MySQL user][mysql-users] to login to the database with. ## Example ```yaml -sources: - my-mysql-source: - kind: mysql - host: 127.0.0.1 - port: 3306 - database: my_db - user: ${USER_NAME} - password: ${PASSWORD} - # Optional TLS and other driver parameters. For example, enable preferred TLS: - # queryParams: - # tls: preferred - queryTimeout: 30s # Optional: query timeout duration +kind: sources +name: my-mysql-source +type: mysql +host: 127.0.0.1 +port: 3306 +database: my_db +user: ${USER_NAME} +password: ${PASSWORD} +# Optional TLS and other driver parameters. For example, enable preferred TLS: +# queryParams: +# tls: preferred +queryTimeout: 30s # Optional: query timeout duration ``` {{< notice tip >}} @@ -72,7 +72,7 @@ instead of hardcoding your secrets into the configuration file. | **field** | **type** | **required** | **description** | | ------------ | :------: | :----------: | ----------------------------------------------------------------------------------------------- | -| kind | string | true | Must be "mysql". | +| type | string | true | Must be "mysql". | | host | string | true | IP address to connect to (e.g. "127.0.0.1"). | | port | string | true | Port to connect to (e.g. "3306"). | | database | string | true | Name of the MySQL database to connect to (e.g. "my_db"). | diff --git a/docs/en/resources/sources/neo4j.md b/docs/en/resources/sources/neo4j.md index 223915c337..0400ea25d5 100644 --- a/docs/en/resources/sources/neo4j.md +++ b/docs/en/resources/sources/neo4j.md @@ -33,13 +33,13 @@ user if available. ## Example ```yaml -sources: - my-neo4j-source: - kind: neo4j - uri: neo4j+s://xxxx.databases.neo4j.io:7687 - user: ${USER_NAME} - password: ${PASSWORD} - database: "neo4j" +kind: sources +name: my-neo4j-source +type: neo4j +uri: neo4j+s://xxxx.databases.neo4j.io:7687 +user: ${USER_NAME} +password: ${PASSWORD} +database: "neo4j" ``` {{< notice tip >}} @@ -51,7 +51,7 @@ instead of hardcoding your secrets into the configuration file. | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|----------------------------------------------------------------------| -| kind | string | true | Must be "neo4j". | +| type | string | true | Must be "neo4j". | | uri | string | true | Connect URI ("bolt://localhost", "neo4j+s://xxx.databases.neo4j.io") | | user | string | true | Name of the Neo4j user to connect as (e.g. "neo4j"). | | password | string | true | Password of the Neo4j user (e.g. "my-password"). | diff --git a/docs/en/resources/sources/oceanbase.md b/docs/en/resources/sources/oceanbase.md index b26b46b8be..24e24f5bfe 100644 --- a/docs/en/resources/sources/oceanbase.md +++ b/docs/en/resources/sources/oceanbase.md @@ -33,15 +33,15 @@ with SSL). ## Example ```yaml -sources: - my-oceanbase-source: - kind: oceanbase - host: 127.0.0.1 - port: 2881 - database: my_db - user: ${USER_NAME} - password: ${PASSWORD} - queryTimeout: 30s # Optional: query timeout duration +kind: sources +name: my-oceanbase-source +type: oceanbase +host: 127.0.0.1 +port: 2881 +database: my_db +user: ${USER_NAME} +password: ${PASSWORD} +queryTimeout: 30s # Optional: query timeout duration ``` {{< notice tip >}} @@ -53,7 +53,7 @@ instead of hardcoding your secrets into the configuration file. | **field** | **type** | **required** | **description** | | ------------ | :------: | :----------: |-------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "oceanbase". | +| type | string | true | Must be "oceanbase". | | host | string | true | IP address to connect to (e.g. "127.0.0.1"). | | port | string | true | Port to connect to (e.g. "2881"). | | database | string | true | Name of the OceanBase database to connect to (e.g. "my_db"). | diff --git a/docs/en/resources/sources/oracle.md b/docs/en/resources/sources/oracle.md index 51fa18fe13..8778838cf6 100644 --- a/docs/en/resources/sources/oracle.md +++ b/docs/en/resources/sources/oracle.md @@ -90,27 +90,27 @@ using a TNS (Transparent Network Substrate) alias. This example demonstrates the four connection methods you could choose from: ```yaml -sources: - my-oracle-source: - kind: oracle - - # --- Choose one connection method --- - # 1. Host, Port, and Service Name - host: 127.0.0.1 - port: 1521 - serviceName: XEPDB1 +kind: sources +name: my-oracle-source +type: oracle - # 2. Direct Connection String - connectionString: "127.0.0.1:1521/XEPDB1" +# --- Choose one connection method --- +# 1. Host, Port, and Service Name +host: 127.0.0.1 +port: 1521 +serviceName: XEPDB1 - # 3. TNS Alias (requires tnsnames.ora) - tnsAlias: "MY_DB_ALIAS" - tnsAdmin: "/opt/oracle/network/admin" # Optional: overrides TNS_ADMIN env var +# 2. Direct Connection String +connectionString: "127.0.0.1:1521/XEPDB1" - user: ${USER_NAME} - password: ${PASSWORD} +# 3. TNS Alias (requires tnsnames.ora) +tnsAlias: "MY_DB_ALIAS" +tnsAdmin: "/opt/oracle/network/admin" # Optional: overrides TNS_ADMIN env var - # Optional: Set to true to use the OCI-based driver for advanced features (Requires Oracle Instant Client) +user: ${USER_NAME} +password: ${PASSWORD} + +# Optional: Set to true to use the OCI-based driver for advanced features (Requires Oracle Instant Client) ``` ### Using an Oracle Wallet @@ -122,15 +122,15 @@ Oracle Wallet allows you to store credentails used for database connection. Depe The `go-ora` driver uses the `walletLocation` field to connect to a database secured with an Oracle Wallet without standard username and password. ```yaml -sources: - pure-go-wallet: - kind: oracle - connectionString: "127.0.0.1:1521/XEPDB1" - user: ${USER_NAME} - password: ${PASSWORD} - # The TNS Alias is often required to connect to a service registered in tnsnames.ora - tnsAlias: "SECURE_DB_ALIAS" - walletLocation: "/path/to/my/wallet/directory" +kind: sources +name: pure-go-wallet +type: oracle +connectionString: "127.0.0.1:1521/XEPDB1" +user: ${USER_NAME} +password: ${PASSWORD} +# The TNS Alias is often required to connect to a service registered in tnsnames.ora +tnsAlias: "SECURE_DB_ALIAS" +walletLocation: "/path/to/my/wallet/directory" ``` #### OCI-Based Driver (`useOCI: true`) - Oracle Wallet @@ -138,15 +138,15 @@ sources: For the OCI-based driver, wallet authentication is triggered by setting tnsAdmin to the wallet directory and connecting via a tnsAlias. ```yaml -sources: - oci-wallet: - kind: oracle - connectionString: "127.0.0.1:1521/XEPDB1" - user: ${USER_NAME} - password: ${PASSWORD} - tnsAlias: "WALLET_DB_ALIAS" - tnsAdmin: "/opt/oracle/wallet" # Directory containing tnsnames.ora, sqlnet.ora, and wallet files - useOCI: true +kind: sources +name: oci-wallet +type: oracle +connectionString: "127.0.0.1:1521/XEPDB1" +user: ${USER_NAME} +password: ${PASSWORD} +tnsAlias: "WALLET_DB_ALIAS" +tnsAdmin: "/opt/oracle/wallet" # Directory containing tnsnames.ora, sqlnet.ora, and wallet files +useOCI: true ``` {{< notice tip >}} @@ -158,7 +158,7 @@ instead of hardcoding your secrets into the configuration file. | **field** | **type** | **required** | **description** | |------------------|:--------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "oracle". | +| type | string | true | Must be "oracle". | | user | string | true | Name of the Oracle user to connect as (e.g. "my-oracle-user"). | | password | string | true | Password of the Oracle user (e.g. "my-password"). | | host | string | false | IP address or hostname to connect to (e.g. "127.0.0.1"). Required if not using `connectionString` or `tnsAlias`. | diff --git a/docs/en/resources/sources/postgres.md b/docs/en/resources/sources/postgres.md index 8668b46190..ed7c77aeee 100644 --- a/docs/en/resources/sources/postgres.md +++ b/docs/en/resources/sources/postgres.md @@ -107,14 +107,14 @@ PostgreSQL user][pg-users] to login to the database with. ## Example ```yaml -sources: - my-pg-source: - kind: postgres - host: 127.0.0.1 - port: 5432 - database: my_db - user: ${USER_NAME} - password: ${PASSWORD} +kind: sources +name: my-pg-source +type: postgres +host: 127.0.0.1 +port: 5432 +database: my_db +user: ${USER_NAME} +password: ${PASSWORD} ``` {{< notice tip >}} @@ -126,7 +126,7 @@ instead of hardcoding your secrets into the configuration file. | **field** | **type** | **required** | **description** | |-------------|:------------------:|:------------:|------------------------------------------------------------------------| -| kind | string | true | Must be "postgres". | +| type | string | true | Must be "postgres". | | host | string | true | IP address to connect to (e.g. "127.0.0.1") | | port | string | true | Port to connect to (e.g. "5432") | | database | string | true | Name of the Postgres database to connect to (e.g. "my_db"). | diff --git a/docs/en/resources/sources/redis.md b/docs/en/resources/sources/redis.md index c0dee699e0..51c8cfde00 100644 --- a/docs/en/resources/sources/redis.md +++ b/docs/en/resources/sources/redis.md @@ -34,16 +34,16 @@ connections must authenticate in order to connect. Specify your AUTH string in the password field: ```yaml -sources: - my-redis-instance: - kind: redis - address: - - 127.0.0.1:6379 - username: ${MY_USER_NAME} - password: ${MY_AUTH_STRING} # Omit this field if you don't have a password. - # database: 0 - # clusterEnabled: false - # useGCPIAM: false +kind: sources +name: my-redis-instance +type: redis +address: + - 127.0.0.1:6379 +username: ${MY_USER_NAME} +password: ${MY_AUTH_STRING} # Omit this field if you don't have a password. +# database: 0 +# clusterEnabled: false +# useGCPIAM: false ``` {{< notice tip >}} @@ -59,14 +59,14 @@ string. Here is an example tools.yaml config with [AUTH][auth] enabled: ```yaml -sources: - my-redis-cluster-instance: - kind: memorystore-redis - address: - - 127.0.0.1:6379 - password: ${MY_AUTH_STRING} - # useGCPIAM: false - # clusterEnabled: false +kind: sources +name: my-redis-cluster-instance +type: memorystore-redis +address: + - 127.0.0.1:6379 +password: ${MY_AUTH_STRING} +# useGCPIAM: false +# clusterEnabled: false ``` Memorystore Redis Cluster supports IAM authentication instead. Grant your @@ -76,13 +76,13 @@ Here is an example tools.yaml config for Memorystore Redis Cluster instances using IAM authentication: ```yaml -sources: - my-redis-cluster-instance: - kind: memorystore-redis - address: - - 127.0.0.1:6379 - useGCPIAM: true - clusterEnabled: true +kind: sources +name: my-redis-cluster-instance +type: memorystore-redis +address: + - 127.0.0.1:6379 +useGCPIAM: true +clusterEnabled: true ``` [iam]: https://cloud.google.com/memorystore/docs/cluster/about-iam-auth @@ -91,7 +91,7 @@ sources: | **field** | **type** | **required** | **description** | |----------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "memorystore-redis". | +| type | string | true | Must be "memorystore-redis". | | address | string | true | Primary endpoint for the Memorystore Redis instance to connect to. | | username | string | false | If you are using a non-default user, specify the user name here. If you are using Memorystore for Redis, leave this field blank | | password | string | false | If you have [Redis AUTH][auth] enabled, specify the AUTH string here | diff --git a/docs/en/resources/sources/serverless-spark.md b/docs/en/resources/sources/serverless-spark.md index 1f2afc3cec..f08c6c5e79 100644 --- a/docs/en/resources/sources/serverless-spark.md +++ b/docs/en/resources/sources/serverless-spark.md @@ -49,17 +49,17 @@ set up your ADC. ## Example ```yaml -sources: - my-serverless-spark-source: - kind: serverless-spark - project: my-project-id - location: us-central1 +kind: sources +name: my-serverless-spark-source +type: serverless-spark +project: my-project-id +location: us-central1 ``` ## Reference | **field** | **type** | **required** | **description** | | --------- | :------: | :----------: | ----------------------------------------------------------------- | -| kind | string | true | Must be "serverless-spark". | +| type | string | true | Must be "serverless-spark". | | project | string | true | ID of the GCP project with Serverless for Apache Spark resources. | | location | string | true | Location containing Serverless for Apache Spark resources. | diff --git a/docs/en/resources/sources/singlestore.md b/docs/en/resources/sources/singlestore.md index fef332a7d8..9acde19ff0 100644 --- a/docs/en/resources/sources/singlestore.md +++ b/docs/en/resources/sources/singlestore.md @@ -39,15 +39,15 @@ database user][singlestore-user] to login to the database with. ## Example ```yaml -sources: - my-singlestore-source: - kind: singlestore - host: 127.0.0.1 - port: 3306 - database: my_db - user: ${USER_NAME} - password: ${PASSWORD} - queryTimeout: 30s # Optional: query timeout duration +kind: sources +name: my-singlestore-source +type: singlestore +host: 127.0.0.1 +port: 3306 +database: my_db +user: ${USER_NAME} +password: ${PASSWORD} +queryTimeout: 30s # Optional: query timeout duration ``` {{< notice tip >}} @@ -59,7 +59,7 @@ instead of hardcoding your secrets into the configuration file. | **field** | **type** | **required** | **description** | |--------------|:--------:|:------------:|-------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "singlestore". | +| type | string | true | Must be "singlestore". | | host | string | true | IP address to connect to (e.g. "127.0.0.1"). | | port | string | true | Port to connect to (e.g. "3306"). | | database | string | true | Name of the SingleStore database to connect to (e.g. "my_db"). | diff --git a/docs/en/resources/sources/snowflake.md b/docs/en/resources/sources/snowflake.md index 5981538ec5..3a048470e7 100644 --- a/docs/en/resources/sources/snowflake.md +++ b/docs/en/resources/sources/snowflake.md @@ -31,16 +31,16 @@ Snowflake user to login to the database with. ## Example ```yaml -sources: - my-sf-source: - kind: snowflake - account: ${SNOWFLAKE_ACCOUNT} - user: ${SNOWFLAKE_USER} - password: ${SNOWFLAKE_PASSWORD} - database: ${SNOWFLAKE_DATABASE} - schema: ${SNOWFLAKE_SCHEMA} - warehouse: ${SNOWFLAKE_WAREHOUSE} - role: ${SNOWFLAKE_ROLE} +kind: sources +name: my-sf-source +type: snowflake +account: ${SNOWFLAKE_ACCOUNT} +user: ${SNOWFLAKE_USER} +password: ${SNOWFLAKE_PASSWORD} +database: ${SNOWFLAKE_DATABASE} +schema: ${SNOWFLAKE_SCHEMA} +warehouse: ${SNOWFLAKE_WAREHOUSE} +role: ${SNOWFLAKE_ROLE} ``` {{< notice tip >}} @@ -52,7 +52,7 @@ instead of hardcoding your secrets into the configuration file. | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|------------------------------------------------------------------------| -| kind | string | true | Must be "snowflake". | +| type | string | true | Must be "snowflake". | | account | string | true | Your Snowflake account identifier. | | user | string | true | Name of the Snowflake user to connect as (e.g. "my-sf-user"). | | password | string | true | Password of the Snowflake user (e.g. "my-password"). | diff --git a/docs/en/resources/sources/spanner.md b/docs/en/resources/sources/spanner.md index 373ba75446..c8039583ac 100644 --- a/docs/en/resources/sources/spanner.md +++ b/docs/en/resources/sources/spanner.md @@ -64,20 +64,20 @@ applying IAM permissions and roles to an identity. ## Example ```yaml -sources: - my-spanner-source: - kind: "spanner" - project: "my-project-id" - instance: "my-instance" - database: "my_db" - # dialect: "googlesql" +kind: sources +name: my-spanner-source +type: "spanner" +project: "my-project-id" +instance: "my-instance" +database: "my_db" +# dialect: "googlesql" ``` ## Reference | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "spanner". | +| type | string | true | Must be "spanner". | | project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). | | instance | string | true | Name of the Spanner instance. | | database | string | true | Name of the database on the Spanner instance | diff --git a/docs/en/resources/sources/sqlite.md b/docs/en/resources/sources/sqlite.md index 5a3c6df918..aae3030347 100644 --- a/docs/en/resources/sources/sqlite.md +++ b/docs/en/resources/sources/sqlite.md @@ -48,19 +48,19 @@ You need a SQLite database file. This can be: ## Example ```yaml -sources: - my-sqlite-db: - kind: "sqlite" - database: "/path/to/database.db" +kind: sources +name: my-sqlite-db +type: "sqlite" +database: "/path/to/database.db" ``` For an in-memory database: ```yaml -sources: - my-sqlite-memory-db: - kind: "sqlite" - database: ":memory:" +kind: sources +name: my-sqlite-memory-db +type: "sqlite" +database: ":memory:" ``` ## Reference @@ -69,7 +69,7 @@ sources: | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "sqlite". | +| type | string | true | Must be "sqlite". | | database | string | true | Path to SQLite database file, or ":memory:" for an in-memory database. | ### Connection Properties diff --git a/docs/en/resources/sources/tidb.md b/docs/en/resources/sources/tidb.md index 3b22db0bdb..2bb4b62c66 100644 --- a/docs/en/resources/sources/tidb.md +++ b/docs/en/resources/sources/tidb.md @@ -46,29 +46,29 @@ console. - TiDB Cloud ```yaml - sources: - my-tidb-cloud-source: - kind: tidb - host: gateway01.us-west-2.prod.aws.tidbcloud.com - port: 4000 - database: my_db - user: ${TIDB_USERNAME} - password: ${TIDB_PASSWORD} - # SSL is automatically enabled for TiDB Cloud + kind: sources + name: my-tidb-cloud-source + type: tidb + host: gateway01.us-west-2.prod.aws.tidbcloud.com + port: 4000 + database: my_db + user: ${TIDB_USERNAME} + password: ${TIDB_PASSWORD} + # SSL is automatically enabled for TiDB Cloud ``` - Self-Hosted TiDB ```yaml - sources: - my-tidb-source: - kind: tidb - host: 127.0.0.1 - port: 4000 - database: my_db - user: ${TIDB_USERNAME} - password: ${TIDB_PASSWORD} - # ssl: true # Optional: enable SSL for secure connections + kind: sources + name: my-tidb-source + type: tidb + host: 127.0.0.1 + port: 4000 + database: my_db + user: ${TIDB_USERNAME} + password: ${TIDB_PASSWORD} + # ssl: true # Optional: enable SSL for secure connections ``` {{< notice tip >}} @@ -80,7 +80,7 @@ instead of hardcoding your secrets into the configuration file. | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------| -| kind | string | true | Must be "tidb". | +| type | string | true | Must be "tidb". | | host | string | true | IP address or hostname to connect to (e.g. "127.0.0.1" or "gateway01.*.tidbcloud.com"). | | port | string | true | Port to connect to (typically "4000" for TiDB). | | database | string | true | Name of the TiDB database to connect to (e.g. "my_db"). | diff --git a/docs/en/resources/sources/trino.md b/docs/en/resources/sources/trino.md index e6a9122681..d3ae1e6fcf 100644 --- a/docs/en/resources/sources/trino.md +++ b/docs/en/resources/sources/trino.md @@ -32,15 +32,15 @@ the catalogs and schemas you want to query. ## Example ```yaml -sources: - my-trino-source: - kind: trino - host: trino.example.com - port: "8080" - user: ${TRINO_USER} # Optional for anonymous access - password: ${TRINO_PASSWORD} # Optional - catalog: hive - schema: default +kind: sources +name: my-trino-source +type: trino +host: trino.example.com +port: "8080" +user: ${TRINO_USER} # Optional for anonymous access +password: ${TRINO_PASSWORD} # Optional +catalog: hive +schema: default ``` {{< notice tip >}} @@ -52,7 +52,7 @@ instead of hardcoding your secrets into the configuration file. | **field** | **type** | **required** | **description** | | ---------------------- | :------: | :----------: | ---------------------------------------------------------------------------- | -| kind | string | true | Must be "trino". | +| type | string | true | Must be "trino". | | host | string | true | Trino coordinator hostname (e.g. "trino.example.com") | | port | string | true | Trino coordinator port (e.g. "8080", "8443") | | user | string | false | Username for authentication (e.g. "analyst"). Optional for anonymous access. | diff --git a/docs/en/resources/sources/valkey.md b/docs/en/resources/sources/valkey.md index 8b1e68e947..aedb8759aa 100644 --- a/docs/en/resources/sources/valkey.md +++ b/docs/en/resources/sources/valkey.md @@ -27,16 +27,16 @@ the [official Valkey website](https://valkey.io/topics/quickstart/). ## Example ```yaml -sources: - my-valkey-instance: - kind: valkey - address: - - 127.0.0.1:6379 - username: ${YOUR_USERNAME} - password: ${YOUR_PASSWORD} - # database: 0 - # useGCPIAM: false - # disableCache: false +kind: sources +name: my-valkey-instance +type: valkey +address: + - 127.0.0.1:6379 +username: ${YOUR_USERNAME} +password: ${YOUR_PASSWORD} +# database: 0 +# useGCPIAM: false +# disableCache: false ``` {{< notice tip >}} @@ -51,12 +51,12 @@ authentication. Grant your account the required [IAM role][iam] and set `useGCPIAM` to `true`: ```yaml -sources: - my-valkey-instance: - kind: valkey - address: - - 127.0.0.1:6379 - useGCPIAM: true +kind: sources +name: my-valkey-instance +type: valkey +address: + - 127.0.0.1:6379 +useGCPIAM: true ``` [iam]: https://cloud.google.com/memorystore/docs/valkey/about-iam-auth @@ -65,7 +65,7 @@ sources: | **field** | **type** | **required** | **description** | |--------------|:--------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "valkey". | +| type | string | true | Must be "valkey". | | address | []string | true | Endpoints for the Valkey instance to connect to. | | username | string | false | If you are using a non-default user, specify the user name here. If you are using Memorystore for Valkey, leave this field blank | | password | string | false | Password for the Valkey instance | diff --git a/docs/en/resources/sources/yugabytedb.md b/docs/en/resources/sources/yugabytedb.md index cdd1362197..b923327cdc 100644 --- a/docs/en/resources/sources/yugabytedb.md +++ b/docs/en/resources/sources/yugabytedb.md @@ -17,23 +17,23 @@ compatibility. ## Example ```yaml -sources: - my-yb-source: - kind: yugabytedb - host: 127.0.0.1 - port: 5433 - database: yugabyte - user: ${USER_NAME} - password: ${PASSWORD} - loadBalance: true - topologyKeys: cloud.region.zone1:1,cloud.region.zone2:2 +kind: sources +name: my-yb-source +type: yugabytedb +host: 127.0.0.1 +port: 5433 +database: yugabyte +user: ${USER_NAME} +password: ${PASSWORD} +loadBalance: true +topologyKeys: cloud.region.zone1:1,cloud.region.zone2:2 ``` ## Reference | **field** | **type** | **required** | **description** | |------------------------------|:--------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "yugabytedb". | +| type | string | true | Must be "yugabytedb". | | host | string | true | IP address to connect to. | | port | integer | true | Port to connect to. The default port is 5433. | | database | string | true | Name of the YugabyteDB database to connect to. The default database name is yugabyte. | diff --git a/docs/en/resources/tools/_index.md b/docs/en/resources/tools/_index.md index 43a72427f7..aaf8f73f74 100644 --- a/docs/en/resources/tools/_index.md +++ b/docs/en/resources/tools/_index.md @@ -12,41 +12,41 @@ statement. You can define Tools as a map in the `tools` section of your `tools.yaml` file. Typically, a tool will require a source to act on: ```yaml -tools: - search_flights_by_number: - kind: postgres-sql - source: my-pg-instance - statement: | - SELECT * FROM flights - WHERE airline = $1 - AND flight_number = $2 - LIMIT 10 - description: | - Use this tool to get information for a specific flight. - Takes an airline code and flight number and returns info on the flight. - Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. - An airline code is a code for an airline service consisting of a two-character - airline designator and followed by a flight number, which is a 1 to 4 digit number. - For example, if given CY 0123, the airline is "CY", and flight_number is "123". - Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". - If the tool returns more than one option choose the date closest to today. - Example: - {{ - "airline": "CY", - "flight_number": "888", - }} - Example: - {{ - "airline": "DL", - "flight_number": "1234", - }} - parameters: - - name: airline - type: string - description: Airline unique 2 letter identifier - - name: flight_number - type: string - description: 1 to 4 digit number +kind: tools +name: search_flights_by_number +type: postgres-sql +source: my-pg-instance +statement: | + SELECT * FROM flights + WHERE airline = $1 + AND flight_number = $2 + LIMIT 10 +description: | + Use this tool to get information for a specific flight. + Takes an airline code and flight number and returns info on the flight. + Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. + An airline code is a code for an airline service consisting of a two-character + airline designator and followed by a flight number, which is a 1 to 4 digit number. + For example, if given CY 0123, the airline is "CY", and flight_number is "123". + Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". + If the tool returns more than one option choose the date closest to today. + Example: + {{ + "airline": "CY", + "flight_number": "888", + }} + Example: + {{ + "airline": "DL", + "flight_number": "1234", + }} +parameters: + - name: airline + type: string + description: Airline unique 2 letter identifier + - name: flight_number + type: string + description: 1 to 4 digit number ``` ## Specifying Parameters @@ -55,13 +55,13 @@ Parameters for each Tool will define what inputs the agent will need to provide to invoke them. Parameters should be pass as a list of Parameter objects: ```yaml - parameters: - - name: airline - type: string - description: Airline unique 2 letter identifier - - name: flight_number - type: string - description: 1 to 4 digit number +parameters: + - name: airline + type: string + description: Airline unique 2 letter identifier + - name: flight_number + type: string + description: 1 to 4 digit number ``` ### Basic Parameters @@ -71,10 +71,10 @@ most cases, the description will be provided to the LLM as context on specifying the parameter. ```yaml - parameters: - - name: airline - type: string - description: Airline unique 2 letter identifier +parameters: + - name: airline + type: string + description: Airline unique 2 letter identifier ``` | **field** | **type** | **required** | **description** | @@ -97,16 +97,16 @@ To use the `array` type, you must also specify what kind of items are in the list using the items field: ```yaml - parameters: - - name: preferred_airlines - type: array - description: A list of airline, ordered by preference. - items: - name: name - type: string - description: Name of the airline. - statement: | - SELECT * FROM airlines WHERE preferred_airlines = ANY($1); +parameters: + - name: preferred_airlines + type: array + description: A list of airline, ordered by preference. + items: + name: name + type: string + description: Name of the airline. +statement: | + SELECT * FROM airlines WHERE preferred_airlines = ANY($1); ``` | **field** | **type** | **required** | **description** | @@ -141,10 +141,10 @@ This is the default behavior when valueType is omitted. It's useful for passing a flexible group of settings. ```yaml - parameters: - - name: execution_context - type: map - description: A flexible set of key-value pairs for the execution environment. +parameters: + - name: execution_context + type: map + description: A flexible set of key-value pairs for the execution environment. ``` #### Typed Map @@ -153,11 +153,11 @@ Specify valueType to ensure all values in the map are of the same type. An error will be thrown in case of value type mismatch. ```yaml - parameters: - - name: user_scores - type: map - description: A map of user IDs to their scores. All scores must be integers. - valueType: integer # This enforces the value type for all entries. +parameters: + - name: user_scores + type: map + description: A map of user IDs to their scores. All scores must be integers. + valueType: integer # This enforces the value type for all entries. ``` ### Authenticated Parameters @@ -171,21 +171,21 @@ the required [authServices](../authServices/) to specific claims within the user's ID token. ```yaml - tools: - search_flights_by_user_id: - kind: postgres-sql - source: my-pg-instance - statement: | - SELECT * FROM flights WHERE user_id = $1 - parameters: - - name: user_id - type: string - description: Auto-populated from Google login - authServices: - # Refer to one of the `authServices` defined - - name: my-google-auth - # `sub` is the OIDC claim field for user ID - field: sub +kind: tools +name: search_flights_by_user_id +type: postgres-sql +source: my-pg-instance +statement: | + SELECT * FROM flights WHERE user_id = $1 +parameters: + - name: user_id + type: string + description: Auto-populated from Google login + authServices: + # Refer to one of the `authServices` defined + - name: my-google-auth + # `sub` is the OIDC claim field for user ID + field: sub ``` | **field** | **type** | **required** | **description** | @@ -222,31 +222,31 @@ can use `minValue` and `maxValue` to define the allowable range. {{< /notice >}} ```yaml -tools: - select_columns_from_table: - kind: postgres-sql - source: my-pg-instance - statement: | - SELECT {{array .columnNames}} FROM {{.tableName}} - description: | - Use this tool to list all information from a specific table. - Example: - {{ - "tableName": "flights", - "columnNames": ["id", "name"] - }} - templateParameters: - - name: tableName - type: string - description: Table to select from - - name: columnNames - type: array - description: The columns to select - items: - name: column - type: string - description: Name of a column to select - escape: double-quotes # with this, the statement will resolve to `SELECT "id", "name" FROM flights` +kind: tools +name: select_columns_from_table +type: postgres-sql +source: my-pg-instance +statement: | + SELECT {{array .columnNames}} FROM {{.tableName}} +description: | + Use this tool to list all information from a specific table. + Example: + {{ + "tableName": "flights", + "columnNames": ["id", "name"] + }} +templateParameters: + - name: tableName + type: string + description: Table to select from + - name: columnNames + type: array + description: The columns to select + items: + name: column + type: string + description: Name of a column to select + escape: double-quotes # with this, the statement will resolve to `SELECT "id", "name" FROM flights` ``` | **field** | **type** | **required** | **description** | @@ -267,16 +267,16 @@ specifying an `authRequired` field. Specify a list of [authServices](../authServices/) defined in the previous section. ```yaml -tools: - search_all_flight: - kind: postgres-sql - source: my-pg-instance - statement: | - SELECT * FROM flights - # A list of `authServices` defined previously - authRequired: - - my-google-auth - - other-auth-service +kind: tools +name: search_all_flight +type: postgres-sql +source: my-pg-instance +statement: | + SELECT * FROM flights +# A list of `authServices` defined previously +authRequired: + - my-google-auth + - other-auth-service ``` ## Kinds of tools diff --git a/docs/en/resources/tools/alloydb/alloydb-create-cluster.md b/docs/en/resources/tools/alloydb/alloydb-create-cluster.md index b70d320213..621feb1852 100644 --- a/docs/en/resources/tools/alloydb/alloydb-create-cluster.md +++ b/docs/en/resources/tools/alloydb/alloydb-create-cluster.md @@ -40,17 +40,17 @@ The tool takes the following input parameters: ## Example ```yaml -tools: - create_cluster: - kind: alloydb-create-cluster - source: alloydb-admin-source - description: Use this tool to create a new AlloyDB cluster in a given project and location. +kind: tools +name: create_cluster +type: alloydb-create-cluster +source: alloydb-admin-source +description: Use this tool to create a new AlloyDB cluster in a given project and location. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------| -| kind | string | true | Must be alloydb-create-cluster. | +| type | string | true | Must be alloydb-create-cluster. | | source | string | true | The name of an `alloydb-admin` source. | | description | string | false | Description of the tool that is passed to the agent. | diff --git a/docs/en/resources/tools/alloydb/alloydb-create-instance.md b/docs/en/resources/tools/alloydb/alloydb-create-instance.md index 6a8680af94..19f6aaa95c 100644 --- a/docs/en/resources/tools/alloydb/alloydb-create-instance.md +++ b/docs/en/resources/tools/alloydb/alloydb-create-instance.md @@ -45,17 +45,17 @@ The tool takes the following input parameters: ## Example ```yaml -tools: - create_instance: - kind: alloydb-create-instance - source: alloydb-admin-source - description: Use this tool to create a new AlloyDB instance within a specified cluster. +kind: tools +name: create_instance +type: alloydb-create-instance +source: alloydb-admin-source +description: Use this tool to create a new AlloyDB instance within a specified cluster. ``` ## Reference | **field** | **type** | **required** | **description** | | ----------- | :------: | :----------: | ---------------------------------------------------- | -| kind | string | true | Must be alloydb-create-instance. | +| type | string | true | Must be alloydb-create-instance. | | source | string | true | The name of an `alloydb-admin` source. | -| description | string | false | Description of the tool that is passed to the agent. | +| description | string | false | Description of the tool that is passed to the agent. | \ No newline at end of file diff --git a/docs/en/resources/tools/alloydb/alloydb-create-user.md b/docs/en/resources/tools/alloydb/alloydb-create-user.md index b4ca9bcbb3..3c7e450781 100644 --- a/docs/en/resources/tools/alloydb/alloydb-create-user.md +++ b/docs/en/resources/tools/alloydb/alloydb-create-user.md @@ -39,17 +39,17 @@ The tool takes the following input parameters: ## Example ```yaml -tools: - create_user: - kind: alloydb-create-user - source: alloydb-admin-source - description: Use this tool to create a new database user for an AlloyDB cluster. +kind: tools +name: create_user +type: alloydb-create-user +source: alloydb-admin-source +description: Use this tool to create a new database user for an AlloyDB cluster. ``` ## Reference | **field** | **type** | **required** | **description** | | ----------- | :------: | :----------: | ---------------------------------------------------- | -| kind | string | true | Must be alloydb-create-user. | +| type | string | true | Must be alloydb-create-user. | | source | string | true | The name of an `alloydb-admin` source. | -| description | string | false | Description of the tool that is passed to the agent. | +| description | string | false | Description of the tool that is passed to the agent. | \ No newline at end of file diff --git a/docs/en/resources/tools/alloydb/alloydb-get-cluster.md b/docs/en/resources/tools/alloydb/alloydb-get-cluster.md index 29f5af0cf5..fcd5049393 100644 --- a/docs/en/resources/tools/alloydb/alloydb-get-cluster.md +++ b/docs/en/resources/tools/alloydb/alloydb-get-cluster.md @@ -3,7 +3,7 @@ title: alloydb-get-cluster type: docs weight: 1 description: "The \"alloydb-get-cluster\" tool retrieves details for a specific AlloyDB cluster.\n" -aliases: [/resources/tools/alloydb-get-cluster] +alias: [/resources/tools/alloydb-get-cluster] --- ## About @@ -21,17 +21,17 @@ specified AlloyDB cluster. It is compatible with ## Example ```yaml -tools: - get_specific_cluster: - kind: alloydb-get-cluster - source: my-alloydb-admin-source - description: Use this tool to retrieve details for a specific AlloyDB cluster. +kind: tools +name: get_specific_cluster +type: alloydb-get-cluster +source: my-alloydb-admin-source +description: Use this tool to retrieve details for a specific AlloyDB cluster. ``` ## Reference | **field** | **type** | **required** | **description** | | ----------- | :------: | :----------: | ---------------------------------------------------- | -| kind | string | true | Must be alloydb-get-cluster. | +| type | string | true | Must be alloydb-get-cluster. | | source | string | true | The name of an `alloydb-admin` source. | -| description | string | false | Description of the tool that is passed to the agent. | +| description | string | false | Description of the tool that is passed to the agent. | \ No newline at end of file diff --git a/docs/en/resources/tools/alloydb/alloydb-get-instance.md b/docs/en/resources/tools/alloydb/alloydb-get-instance.md index 0305c4bcc0..65a9a7ec81 100644 --- a/docs/en/resources/tools/alloydb/alloydb-get-instance.md +++ b/docs/en/resources/tools/alloydb/alloydb-get-instance.md @@ -22,17 +22,17 @@ specified AlloyDB instance. It is compatible with ## Example ```yaml -tools: - get_specific_instance: - kind: alloydb-get-instance - source: my-alloydb-admin-source - description: Use this tool to retrieve details for a specific AlloyDB instance. +kind: tools +name: get_specific_instance +type: alloydb-get-instance +source: my-alloydb-admin-source +description: Use this tool to retrieve details for a specific AlloyDB instance. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------| -| kind | string | true | Must be alloydb-get-instance. | +| type | string | true | Must be alloydb-get-instance. | | source | string | true | The name of an `alloydb-admin` source. | -| description | string | false | Description of the tool that is passed to the agent. | +| description | string | false | Description of the tool that is passed to the agent. | \ No newline at end of file diff --git a/docs/en/resources/tools/alloydb/alloydb-get-user.md b/docs/en/resources/tools/alloydb/alloydb-get-user.md index afee54aab5..a223afe8f7 100644 --- a/docs/en/resources/tools/alloydb/alloydb-get-user.md +++ b/docs/en/resources/tools/alloydb/alloydb-get-user.md @@ -22,17 +22,17 @@ specified AlloyDB user. It is compatible with ## Example ```yaml -tools: - get_specific_user: - kind: alloydb-get-user - source: my-alloydb-admin-source - description: Use this tool to retrieve details for a specific AlloyDB user. +kind: tools +name: get_specific_user +type: alloydb-get-user +source: my-alloydb-admin-source +description: Use this tool to retrieve details for a specific AlloyDB user. ``` ## Reference | **field** | **type** | **required** | **description** | | ----------- | :------: | :----------: | ---------------------------------------------------- | -| kind | string | true | Must be alloydb-get-user. | +| type | string | true | Must be alloydb-get-user. | | source | string | true | The name of an `alloydb-admin` source. | -| description | string | false | Description of the tool that is passed to the agent. | +| description | string | false | Description of the tool that is passed to the agent. | \ No newline at end of file diff --git a/docs/en/resources/tools/alloydb/alloydb-list-clusters.md b/docs/en/resources/tools/alloydb/alloydb-list-clusters.md index 6f0beb3a67..644f98729b 100644 --- a/docs/en/resources/tools/alloydb/alloydb-list-clusters.md +++ b/docs/en/resources/tools/alloydb/alloydb-list-clusters.md @@ -24,17 +24,17 @@ location. The tool takes the following input parameters: ## Example ```yaml -tools: - list_clusters: - kind: alloydb-list-clusters - source: alloydb-admin-source - description: Use this tool to list all AlloyDB clusters in a given project and location. +kind: tools +name: list_clusters +type: alloydb-list-clusters +source: alloydb-admin-source +description: Use this tool to list all AlloyDB clusters in a given project and location. ``` ## Reference | **field** | **type** | **required** | **description** | | ----------- | :------: | :----------: | ---------------------------------------------------- | -| kind | string | true | Must be alloydb-list-clusters. | +| type | string | true | Must be alloydb-list-clusters. | | source | string | true | The name of an `alloydb-admin` source. | -| description | string | false | Description of the tool that is passed to the agent. | +| description | string | false | Description of the tool that is passed to the agent. | \ No newline at end of file diff --git a/docs/en/resources/tools/alloydb/alloydb-list-instances.md b/docs/en/resources/tools/alloydb/alloydb-list-instances.md index ecf1f9a8dd..fdf0c35c9d 100644 --- a/docs/en/resources/tools/alloydb/alloydb-list-instances.md +++ b/docs/en/resources/tools/alloydb/alloydb-list-instances.md @@ -26,17 +26,17 @@ parameters: ## Example ```yaml -tools: - list_instances: - kind: alloydb-list-instances - source: alloydb-admin-source - description: Use this tool to list all AlloyDB instances for a given project, cluster and location. +kind: tools +name: list_instances +type: alloydb-list-instances +source: alloydb-admin-source +description: Use this tool to list all AlloyDB instances for a given project, cluster and location. ``` ## Reference | **field** | **type** | **required** | **description** | | ----------- | :------: | :----------: | ---------------------------------------------------- | -| kind | string | true | Must be alloydb-list-instances. | +| type | string | true | Must be alloydb-list-instances. | | source | string | true | The name of an `alloydb-admin` source. | | description | string | false | Description of the tool that is passed to the agent. | diff --git a/docs/en/resources/tools/alloydb/alloydb-list-users.md b/docs/en/resources/tools/alloydb/alloydb-list-users.md index 73d3015b88..b735cd0607 100644 --- a/docs/en/resources/tools/alloydb/alloydb-list-users.md +++ b/docs/en/resources/tools/alloydb/alloydb-list-users.md @@ -22,17 +22,17 @@ The tool takes the following input parameters: ## Example ```yaml -tools: - list_users: - kind: alloydb-list-users - source: alloydb-admin-source - description: Use this tool to list all database users within an AlloyDB cluster +kind: tools +name: list_users +type: alloydb-list-users +source: alloydb-admin-source +description: Use this tool to list all database users within an AlloyDB cluster ``` ## Reference | **field** | **type** | **required** | **description** | | ----------- | :------: | :----------: | ---------------------------------------------------- | -| kind | string | true | Must be alloydb-list-users. | +| type | string | true | Must be alloydb-list-users. | | source | string | true | The name of an `alloydb-admin` source. | | description | string | false | Description of the tool that is passed to the agent. | diff --git a/docs/en/resources/tools/alloydb/alloydb-wait-for-operation.md b/docs/en/resources/tools/alloydb/alloydb-wait-for-operation.md index f5e84e5b97..98d67e9084 100644 --- a/docs/en/resources/tools/alloydb/alloydb-wait-for-operation.md +++ b/docs/en/resources/tools/alloydb/alloydb-wait-for-operation.md @@ -25,22 +25,22 @@ and shouldn't be used for production agents. ## Example ```yaml -tools: - wait_for_operation: - kind: alloydb-wait-for-operation - source: my-alloydb-admin-source - description: "This will poll on operations API until the operation is done. For checking operation status we need projectId, locationID and operationId. Once instance is created give follow up steps on how to use the variables to bring data plane MCP server up in local and remote setup." - delay: 1s - maxDelay: 4m - multiplier: 2 - maxRetries: 10 +kind: tools +name: wait_for_operation +type: alloydb-wait-for-operation +source: my-alloydb-admin-source +description: "This will poll on operations API until the operation is done. For checking operation status we need projectId, locationID and operationId. Once instance is created give follow up steps on how to use the variables to bring data plane MCP server up in local and remote setup." +delay: 1s +maxDelay: 4m +multiplier: 2 +maxRetries: 10 ``` ## Reference | **field** | **type** | **required** | **description** | | ----------- | :------: | :----------: | ---------------------------------------------------------------------------------------------------------------- | -| kind | string | true | Must be "alloydb-wait-for-operation". | +| type | string | true | Must be "alloydb-wait-for-operation". | | source | string | true | The name of a `alloydb-admin` source to use for authentication. | | description | string | false | A description of the tool. | | delay | duration | false | The initial delay between polling requests (e.g., `3s`). Defaults to 3 seconds. | diff --git a/docs/en/resources/tools/alloydbainl/alloydb-ai-nl.md b/docs/en/resources/tools/alloydbainl/alloydb-ai-nl.md index aa6d377f2c..97364c5dd8 100644 --- a/docs/en/resources/tools/alloydbainl/alloydb-ai-nl.md +++ b/docs/en/resources/tools/alloydbainl/alloydb-ai-nl.md @@ -103,29 +103,29 @@ CREATE EXTENSION IF NOT EXISTS parameterized_views; ## Example ```yaml -tools: - ask_questions: - kind: alloydb-ai-nl - source: my-alloydb-source - description: "Ask questions to check information about flights" - nlConfig: "cymbal_air_nl_config" - nlConfigParameters: - - name: user_email - type: string - description: User ID of the logged in user. - # note: we strongly recommend using features like Authenticated or - # Bound parameters to prevent the LLM from seeing these params and - # specifying values it shouldn't in the tool input - authServices: - - name: my_google_service - field: email +kind: tools +name: ask_questions +type: alloydb-ai-nl +source: my-alloydb-source +description: "Ask questions to check information about flights" +nlConfig: "cymbal_air_nl_config" +nlConfigParameters: + - name: user_email + type: string + description: User ID of the logged in user. + # note: we strongly recommend using features like Authenticated or + # Bound parameters to prevent the LLM from seeing these params and + # specifying values it shouldn't in the tool input + authServices: + - name: my_google_service + field: email ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:---------------------------------------:|:------------:|--------------------------------------------------------------------------| -| kind | string | true | Must be "alloydb-ai-nl". | +| type | string | true | Must be "alloydb-ai-nl". | | source | string | true | Name of the AlloyDB source the natural language query should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | nlConfig | string | true | The name of the `nl_config` in AlloyDB | diff --git a/docs/en/resources/tools/bigquery/bigquery-analyze-contribution.md b/docs/en/resources/tools/bigquery/bigquery-analyze-contribution.md index 3561af3dd3..ed126246a4 100644 --- a/docs/en/resources/tools/bigquery/bigquery-analyze-contribution.md +++ b/docs/en/resources/tools/bigquery/bigquery-analyze-contribution.md @@ -64,11 +64,11 @@ the `bigquery` source: ## Example ```yaml -tools: - contribution_analyzer: - kind: bigquery-analyze-contribution - source: my-bigquery-source - description: Use this tool to run contribution analysis on a dataset in BigQuery. +kind: tools +name: contribution_analyzer +type: bigquery-analyze-contribution +source: my-bigquery-source +description: Use this tool to run contribution analysis on a dataset in BigQuery. ``` ## Sample Prompt @@ -88,6 +88,6 @@ And use the following sample prompts to call this tool: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "bigquery-analyze-contribution". | +| type | string | true | Must be "bigquery-analyze-contribution". | | source | string | true | Name of the source the tool should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/bigquery/bigquery-conversational-analytics.md b/docs/en/resources/tools/bigquery/bigquery-conversational-analytics.md index 2c8ef29083..486db6083e 100644 --- a/docs/en/resources/tools/bigquery/bigquery-conversational-analytics.md +++ b/docs/en/resources/tools/bigquery/bigquery-conversational-analytics.md @@ -53,19 +53,19 @@ dataset specified in the `table_references` parameter. ## Example ```yaml -tools: - ask_data_insights: - kind: bigquery-conversational-analytics - source: my-bigquery-source - description: | - Use this tool to perform data analysis, get insights, or answer complex - questions about the contents of specific BigQuery tables. +kind: tools +name: ask_data_insights +type: bigquery-conversational-analytics +source: my-bigquery-source +description: | + Use this tool to perform data analysis, get insights, or answer complex + questions about the contents of specific BigQuery tables. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "bigquery-conversational-analytics". | +| type | string | true | Must be "bigquery-conversational-analytics". | | source | string | true | Name of the source for chat. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/bigquery/bigquery-execute-sql.md b/docs/en/resources/tools/bigquery/bigquery-execute-sql.md index b59ae58249..783ba5c742 100644 --- a/docs/en/resources/tools/bigquery/bigquery-execute-sql.md +++ b/docs/en/resources/tools/bigquery/bigquery-execute-sql.md @@ -54,17 +54,17 @@ layer of security by controlling which datasets can be accessed: ## Example ```yaml -tools: - execute_sql_tool: - kind: bigquery-execute-sql - source: my-bigquery-source - description: Use this tool to execute sql statement. +kind: tools +name: execute_sql_tool +type: bigquery-execute-sql +source: my-bigquery-source +description: Use this tool to execute sql statement. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "bigquery-execute-sql". | +| type | string | true | Must be "bigquery-execute-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/bigquery/bigquery-forecast.md b/docs/en/resources/tools/bigquery/bigquery-forecast.md index 823eb487e1..188a7d11c7 100644 --- a/docs/en/resources/tools/bigquery/bigquery-forecast.md +++ b/docs/en/resources/tools/bigquery/bigquery-forecast.md @@ -58,11 +58,11 @@ the `bigquery` source: ## Example ```yaml -tools: - forecast_tool: - kind: bigquery-forecast - source: my-bigquery-source - description: Use this tool to forecast time series data in BigQuery. +kind: tools +name: forecast_tool +type: bigquery-forecast +source: my-bigquery-source +description: Use this tool to forecast time series data in BigQuery. ``` ## Sample Prompt @@ -78,6 +78,6 @@ You can use the following sample prompts to call this tool: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|---------------------------------------------------------| -| kind | string | true | Must be "bigquery-forecast". | +| type | string | true | Must be "bigquery-forecast". | | source | string | true | Name of the source the forecast tool should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/bigquery/bigquery-get-dataset-info.md b/docs/en/resources/tools/bigquery/bigquery-get-dataset-info.md index 460816d290..e68fe83574 100644 --- a/docs/en/resources/tools/bigquery/bigquery-get-dataset-info.md +++ b/docs/en/resources/tools/bigquery/bigquery-get-dataset-info.md @@ -34,17 +34,17 @@ The tool's behavior regarding these parameters is influenced by the ## Example ```yaml -tools: - bigquery_get_dataset_info: - kind: bigquery-get-dataset-info - source: my-bigquery-source - description: Use this tool to get dataset metadata. +kind: tools +name: bigquery_get_dataset_info +type: bigquery-get-dataset-info +source: my-bigquery-source +description: Use this tool to get dataset metadata. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "bigquery-get-dataset-info". | +| type | string | true | Must be "bigquery-get-dataset-info". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/bigquery/bigquery-get-table-info.md b/docs/en/resources/tools/bigquery/bigquery-get-table-info.md index e05f74c131..807e9a8f04 100644 --- a/docs/en/resources/tools/bigquery/bigquery-get-table-info.md +++ b/docs/en/resources/tools/bigquery/bigquery-get-table-info.md @@ -35,17 +35,17 @@ The tool's behavior regarding these parameters is influenced by the ## Example ```yaml -tools: - bigquery_get_table_info: - kind: bigquery-get-table-info - source: my-bigquery-source - description: Use this tool to get table metadata. +kind: tools +name: bigquery_get_table_info +type: bigquery-get-table-info +source: my-bigquery-source +description: Use this tool to get table metadata. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "bigquery-get-table-info". | +| type | string | true | Must be "bigquery-get-table-info". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/bigquery/bigquery-list-dataset-ids.md b/docs/en/resources/tools/bigquery/bigquery-list-dataset-ids.md index 176ad78dfe..c2cd8b7609 100644 --- a/docs/en/resources/tools/bigquery/bigquery-list-dataset-ids.md +++ b/docs/en/resources/tools/bigquery/bigquery-list-dataset-ids.md @@ -32,17 +32,17 @@ The tool's behavior regarding this parameter is influenced by the ## Example ```yaml -tools: - bigquery_list_dataset_ids: - kind: bigquery-list-dataset-ids - source: my-bigquery-source - description: Use this tool to get dataset metadata. +kind: tools +name: bigquery_list_dataset_ids +type: bigquery-list-dataset-ids +source: my-bigquery-source +description: Use this tool to get dataset metadata. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "bigquery-list-dataset-ids". | +| type | string | true | Must be "bigquery-list-dataset-ids". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/bigquery/bigquery-list-table-ids.md b/docs/en/resources/tools/bigquery/bigquery-list-table-ids.md index 5e72d0b22a..828e84cb8f 100644 --- a/docs/en/resources/tools/bigquery/bigquery-list-table-ids.md +++ b/docs/en/resources/tools/bigquery/bigquery-list-table-ids.md @@ -34,17 +34,17 @@ will be used as the default value for the `dataset` parameter. ## Example ```yaml -tools: - bigquery_list_table_ids: - kind: bigquery-list-table-ids - source: my-bigquery-source - description: Use this tool to get table metadata. +kind: tools +name: bigquery_list_table_ids +type: bigquery-list-table-ids +source: my-bigquery-source +description: Use this tool to get table metadata. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "bigquery-list-table-ids". | +| type | string | true | Must be "bigquery-list-table-ids". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/bigquery/bigquery-search-catalog.md b/docs/en/resources/tools/bigquery/bigquery-search-catalog.md index 32a107983e..3994e4744a 100644 --- a/docs/en/resources/tools/bigquery/bigquery-search-catalog.md +++ b/docs/en/resources/tools/bigquery/bigquery-search-catalog.md @@ -48,17 +48,17 @@ applying IAM permissions and roles to an identity. ## Example ```yaml -tools: - search_catalog: - kind: bigquery-search-catalog - source: bigquery-source - description: Use this tool to find tables, views, models, routines or connections. +kind: tools +name: search_catalog +type: bigquery-search-catalog +source: bigquery-source +description: Use this tool to find tables, views, models, routines or connections. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "bigquery-search-catalog". | +| type | string | true | Must be "bigquery-search-catalog". | | source | string | true | Name of the source the tool should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/bigquery/bigquery-sql.md b/docs/en/resources/tools/bigquery/bigquery-sql.md index c07192f710..39d5c750ff 100644 --- a/docs/en/resources/tools/bigquery/bigquery-sql.md +++ b/docs/en/resources/tools/bigquery/bigquery-sql.md @@ -46,36 +46,36 @@ same query. > identifiers, column names, table names, or other parts of the query. ```yaml -tools: - # Example: Querying a user table in BigQuery - search_users_bq: - kind: bigquery-sql - source: my-bigquery-source - statement: | - SELECT - id, - name, - email - FROM - `my-project.my-dataset.users` - WHERE - id = @id OR email = @email; - description: | - Use this tool to get information for a specific user. - Takes an id number or a name and returns info on the user. +# Example: Querying a user table in BigQuery +kind: tools +name: search_users_bq +type: bigquery-sql +source: my-bigquery-source +statement: | + SELECT + id, + name, + email + FROM + `my-project.my-dataset.users` + WHERE + id = @id OR email = @email; +description: | + Use this tool to get information for a specific user. + Takes an id number or a name and returns info on the user. - Example: - {{ - "id": 123, - "name": "Alice", - }} - parameters: - - name: id - type: integer - description: User ID - - name: email - type: string - description: Email address of the user + Example: + {{ + "id": 123, + "name": "Alice", + }} +parameters: + - name: id + type: integer + description: User ID + - name: email + type: string + description: Email address of the user ``` ### Example with Template Parameters @@ -87,31 +87,31 @@ tools: > [templateParameters](../#template-parameters). ```yaml -tools: - list_table: - kind: bigquery-sql - source: my-bigquery-source - statement: | - SELECT * FROM {{.tableName}}; - description: | - Use this tool to list all information from a specific table. - Example: - {{ - "tableName": "flights", - }} - templateParameters: - - name: tableName - type: string - description: Table to select from +kind: tools +name: list_table +type: bigquery-sql +source: my-bigquery-source +statement: | + SELECT * FROM {{.tableName}}; +description: | + Use this tool to list all information from a specific table. + Example: + {{ + "tableName": "flights", + }} +templateParameters: + - name: tableName + type: string + description: Table to select from ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:---------------------------------------------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "bigquery-sql". | +| type | string | true | Must be "bigquery-sql". | | source | string | true | Name of the source the GoogleSQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | The GoogleSQL statement to execute. | | parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the SQL statement. | -| templateParameters | [templateParameters](../#template-parameters) | false | List of [templateParameters](../#template-parameters) that will be inserted into the SQL statement before executing prepared statement. | +| templateParameters | [templateParameters](../#template-parameters) | false | List of [templateParameters](../#template-parameters) that will be inserted into the SQL statement before executing prepared statement. | \ No newline at end of file diff --git a/docs/en/resources/tools/bigtable/bigtable-sql.md b/docs/en/resources/tools/bigtable/bigtable-sql.md index cce122f94d..7d56be05b5 100644 --- a/docs/en/resources/tools/bigtable/bigtable-sql.md +++ b/docs/en/resources/tools/bigtable/bigtable-sql.md @@ -40,35 +40,35 @@ inserted according to their name: e.g. `@name`. > names, or other parts of the query. ```yaml -tools: - search_user_by_id_or_name: - kind: bigtable-sql - source: my-bigtable-instance - statement: | - SELECT - TO_INT64(cf[ 'id' ]) as id, - CAST(cf[ 'name' ] AS string) as name, - FROM - mytable - WHERE - TO_INT64(cf[ 'id' ]) = @id - OR CAST(cf[ 'name' ] AS string) = @name; - description: | - Use this tool to get information for a specific user. - Takes an id number or a name and returns info on the user. +kind: tools +name: search_user_by_id_or_name +type: bigtable-sql +source: my-bigtable-instance +statement: | + SELECT + TO_INT64(cf[ 'id' ]) as id, + CAST(cf[ 'name' ] AS string) as name, + FROM + mytable + WHERE + TO_INT64(cf[ 'id' ]) = @id + OR CAST(cf[ 'name' ] AS string) = @name; +description: | + Use this tool to get information for a specific user. + Takes an id number or a name and returns info on the user. - Example: - {{ - "id": 123, - "name": "Alice", - }} - parameters: - - name: id - type: integer - description: User ID - - name: name - type: string - description: Name of the user + Example: + {{ + "id": 123, + "name": "Alice", + }} +parameters: + - name: id + type: integer + description: User ID + - name: name + type: string + description: Name of the user ``` ### Example with Template Parameters @@ -80,29 +80,29 @@ tools: > [templateParameters](..#template-parameters). ```yaml -tools: - list_table: - kind: bigtable-sql - source: my-bigtable-instance - statement: | - SELECT * FROM {{.tableName}}; - description: | - Use this tool to list all information from a specific table. - Example: - {{ - "tableName": "flights", - }} - templateParameters: - - name: tableName - type: string - description: Table to select from +kind: tools +name: list_table +type: bigtable-sql +source: my-bigtable-instance +statement: | + SELECT * FROM {{.tableName}}; +description: | + Use this tool to list all information from a specific table. + Example: + {{ + "tableName": "flights", + }} +templateParameters: + - name: tableName + type: string + description: Table to select from ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:--------------------------------------------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "bigtable-sql". | +| type | string | true | Must be "bigtable-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | SQL statement to execute on. | diff --git a/docs/en/resources/tools/cassandra/cassandra-cql.md b/docs/en/resources/tools/cassandra/cassandra-cql.md index feb092ee21..14f1eb8e01 100644 --- a/docs/en/resources/tools/cassandra/cassandra-cql.md +++ b/docs/en/resources/tools/cassandra/cassandra-cql.md @@ -31,27 +31,27 @@ the form of placeholders `?`. > names, or other parts of the query. ```yaml -tools: - search_users_by_email: - kind: cassandra-cql - source: my-cassandra-cluster - statement: | - SELECT user_id, email, first_name, last_name, created_at - FROM users - WHERE email = ? - description: | - Use this tool to retrieve specific user information by their email address. - Takes an email address and returns user details including user ID, email, - first name, last name, and account creation timestamp. - Do NOT use this tool with a user ID or other identifiers. - Example: - {{ - "email": "user@example.com", - }} - parameters: - - name: email - type: string - description: User's email address +kind: tools +name: search_users_by_email +type: cassandra-cql +source: my-cassandra-cluster +statement: | + SELECT user_id, email, first_name, last_name, created_at + FROM users + WHERE email = ? +description: | + Use this tool to retrieve specific user information by their email address. + Takes an email address and returns user details including user ID, email, + first name, last name, and account creation timestamp. + Do NOT use this tool with a user ID or other identifiers. + Example: + {{ + "email": "user@example.com", + }} +parameters: + - name: email + type: string + description: User's email address ``` ### Example with Template Parameters @@ -63,33 +63,33 @@ tools: > [templateParameters](../#template-parameters). ```yaml -tools: - list_keyspace_table: - kind: cassandra-cql - source: my-cassandra-cluster - statement: | - SELECT * FROM {{.keyspace}}.{{.tableName}}; - description: | - Use this tool to list all information from a specific table in a keyspace. - Example: - {{ - "keyspace": "my_keyspace", - "tableName": "users", - }} - templateParameters: - - name: keyspace - type: string - description: Keyspace containing the table - - name: tableName - type: string - description: Table to select from +kind: tools +name: list_keyspace_table +type: cassandra-cql +source: my-cassandra-cluster +statement: | + SELECT * FROM {{.keyspace}}.{{.tableName}}; +description: | + Use this tool to list all information from a specific table in a keyspace. + Example: + {{ + "keyspace": "my_keyspace", + "tableName": "users", + }} +templateParameters: + - name: keyspace + type: string + description: Keyspace containing the table + - name: tableName + type: string + description: Table to select from ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:---------------------------------------------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "cassandra-cql". | +| type | string | true | Must be "cassandra-cql". | | source | string | true | Name of the source the CQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | CQL statement to execute. | diff --git a/docs/en/resources/tools/clickhouse/clickhouse-execute-sql.md b/docs/en/resources/tools/clickhouse/clickhouse-execute-sql.md index e197c1f05b..5e28b54386 100644 --- a/docs/en/resources/tools/clickhouse/clickhouse-execute-sql.md +++ b/docs/en/resources/tools/clickhouse/clickhouse-execute-sql.md @@ -25,11 +25,11 @@ capabilities for monitoring and debugging purposes. ## Example ```yaml -tools: - execute_sql_tool: - kind: clickhouse-execute-sql - source: my-clickhouse-instance - description: Use this tool to execute SQL statements against ClickHouse. +kind: tools +name: execute_sql_tool +type: clickhouse-execute-sql +source: my-clickhouse-instance +description: Use this tool to execute SQL statements against ClickHouse. ``` ## Parameters @@ -42,6 +42,6 @@ tools: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|-------------------------------------------------------| -| kind | string | true | Must be "clickhouse-execute-sql". | +| type | string | true | Must be "clickhouse-execute-sql". | | source | string | true | Name of the ClickHouse source to execute SQL against. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/clickhouse/clickhouse-list-databases.md b/docs/en/resources/tools/clickhouse/clickhouse-list-databases.md index 43e5f0e982..eedf197b41 100644 --- a/docs/en/resources/tools/clickhouse/clickhouse-list-databases.md +++ b/docs/en/resources/tools/clickhouse/clickhouse-list-databases.md @@ -21,11 +21,11 @@ discovery and exploration tasks. ## Example ```yaml -tools: - list_clickhouse_databases: - kind: clickhouse-list-databases - source: my-clickhouse-instance - description: List all available databases in the ClickHouse instance +kind: tools +name: list_clickhouse_databases +type: clickhouse-list-databases +source: my-clickhouse-instance +description: List all available databases in the ClickHouse instance ``` ## Return Value @@ -49,7 +49,7 @@ Example response: | **field** | **type** | **required** | **description** | |--------------|:------------------:|:------------:|-------------------------------------------------------| -| kind | string | true | Must be "clickhouse-list-databases". | +| type | string | true | Must be "clickhouse-list-databases". | | source | string | true | Name of the ClickHouse source to list databases from. | | description | string | true | Description of the tool that is passed to the LLM. | | authRequired | array of string | false | Authentication services required to use this tool. | diff --git a/docs/en/resources/tools/clickhouse/clickhouse-list-tables.md b/docs/en/resources/tools/clickhouse/clickhouse-list-tables.md index a886813e17..dbeb49c79c 100644 --- a/docs/en/resources/tools/clickhouse/clickhouse-list-tables.md +++ b/docs/en/resources/tools/clickhouse/clickhouse-list-tables.md @@ -21,11 +21,11 @@ user, making it useful for schema exploration and table discovery tasks. ## Example ```yaml -tools: - list_clickhouse_tables: - kind: clickhouse-list-tables - source: my-clickhouse-instance - description: List all tables in a specific ClickHouse database +kind: tools +name: list_clickhouse_tables +type: clickhouse-list-tables +source: my-clickhouse-instance +description: List all tables in a specific ClickHouse database ``` ## Parameters @@ -56,7 +56,7 @@ Example response: | **field** | **type** | **required** | **description** | |--------------|:------------------:|:------------:|---------------------------------------------------------| -| kind | string | true | Must be "clickhouse-list-tables". | +| type | string | true | Must be "clickhouse-list-tables". | | source | string | true | Name of the ClickHouse source to list tables from. | | description | string | true | Description of the tool that is passed to the LLM. | | authRequired | array of string | false | Authentication services required to use this tool. | diff --git a/docs/en/resources/tools/clickhouse/clickhouse-sql.md b/docs/en/resources/tools/clickhouse/clickhouse-sql.md index 5d66d6826f..b26bb6eade 100644 --- a/docs/en/resources/tools/clickhouse/clickhouse-sql.md +++ b/docs/en/resources/tools/clickhouse/clickhouse-sql.md @@ -21,60 +21,60 @@ query execution capabilities. ## Example ```yaml -tools: - my_analytics_query: - kind: clickhouse-sql - source: my-clickhouse-instance - description: Get user analytics for a specific date range - statement: | - SELECT - user_id, - count(*) as event_count, - max(timestamp) as last_event - FROM events - WHERE date >= ? AND date <= ? - GROUP BY user_id - ORDER BY event_count DESC - LIMIT ? - parameters: - - name: start_date - description: Start date for the query (YYYY-MM-DD format) - - name: end_date - description: End date for the query (YYYY-MM-DD format) - - name: limit - description: Maximum number of results to return +kind: tools +name: my_analytics_query +type: clickhouse-sql +source: my-clickhouse-instance +description: Get user analytics for a specific date range +statement: | + SELECT + user_id, + count(*) as event_count, + max(timestamp) as last_event + FROM events + WHERE date >= ? AND date <= ? + GROUP BY user_id + ORDER BY event_count DESC + LIMIT ? +parameters: + - name: start_date + description: Start date for the query (YYYY-MM-DD format) + - name: end_date + description: End date for the query (YYYY-MM-DD format) + - name: limit + description: Maximum number of results to return ``` ## Template Parameters Example ```yaml -tools: - flexible_table_query: - kind: clickhouse-sql - source: my-clickhouse-instance - description: Query any table with flexible columns - statement: | - SELECT {{columns}} - FROM {{table_name}} - WHERE created_date >= ? - LIMIT ? - templateParameters: - - name: columns - description: Comma-separated list of columns to select - - name: table_name - description: Name of the table to query - parameters: - - name: start_date - description: Start date filter - - name: limit - description: Maximum number of results +kind: tools +name: flexible_table_query +type: clickhouse-sql +source: my-clickhouse-instance +description: Query any table with flexible columns +statement: | + SELECT {{columns}} + FROM {{table_name}} + WHERE created_date >= ? + LIMIT ? +templateParameters: + - name: columns + description: Comma-separated list of columns to select + - name: table_name + description: Name of the table to query +parameters: + - name: start_date + description: Start date filter + - name: limit + description: Maximum number of results ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:------------------:|:------------:|-------------------------------------------------------| -| kind | string | true | Must be "clickhouse-sql". | +| type | string | true | Must be "clickhouse-sql". | | source | string | true | Name of the ClickHouse source to execute SQL against. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | The SQL statement template to execute. | diff --git a/docs/en/resources/tools/cloudgda/cloud-gda-query.md b/docs/en/resources/tools/cloudgda/cloud-gda-query.md index 39e5bf64ae..d416321424 100644 --- a/docs/en/resources/tools/cloudgda/cloud-gda-query.md +++ b/docs/en/resources/tools/cloudgda/cloud-gda-query.md @@ -18,28 +18,28 @@ The `cloud-gemini-data-analytics-query` tool allows you to send natural language ## Example ```yaml -tools: - my-gda-query-tool: - kind: cloud-gemini-data-analytics-query - source: my-gda-source - description: "Use this tool to send natural language queries to the Gemini Data Analytics API and receive SQL, natural language answers, and explanations." - location: ${your_database_location} - context: - datasourceReferences: - cloudSqlReference: - databaseReference: - projectId: "${your_project_id}" - region: "${your_database_instance_region}" - instanceId: "${your_database_instance_id}" - databaseId: "${your_database_name}" - engine: "POSTGRESQL" - agentContextReference: - contextSetId: "${your_context_set_id}" # E.g. projects/${project_id}/locations/${context_set_location}/contextSets/${context_set_id} - generationOptions: - generateQueryResult: true - generateNaturalLanguageAnswer: true - generateExplanation: true - generateDisambiguationQuestion: true +kind: tools +name: my-gda-query-tool +type: cloud-gemini-data-analytics-query +source: my-gda-source +description: "Use this tool to send natural language queries to the Gemini Data Analytics API and receive SQL, natural language answers, and explanations." +location: ${your_database_location} +context: + datasourceReferences: + cloudSqlReference: + databaseReference: + projectId: "${your_project_id}" + region: "${your_database_instance_region}" + instanceId: "${your_database_instance_id}" + databaseId: "${your_database_name}" + engine: "POSTGRESQL" + agentContextReference: + contextSetId: "${your_context_set_id}" # E.g. projects/${project_id}/locations/${context_set_location}/contextSets/${context_set_id} +generationOptions: + generateQueryResult: true + generateNaturalLanguageAnswer: true + generateExplanation: true + generateDisambiguationQuestion: true ``` ### Usage Flow @@ -87,7 +87,7 @@ How many accounts who have region in Prague are eligible for loans? A3 contains | **field** | **type** | **required** | **description** | | ----------------- | :------: | :----------: | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| kind | string | true | Must be "cloud-gemini-data-analytics-query". | +| type | string | true | Must be "cloud-gemini-data-analytics-query". | | source | string | true | The name of the `cloud-gemini-data-analytics` source to use. | | description | string | true | A description of the tool's purpose. | | location | string | true | The Google Cloud location of the target database resource (e.g., "us-central1"). This is used to construct the parent resource name in the API call. | diff --git a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-fhir-fetch-page.md b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-fhir-fetch-page.md index 0e990c9fee..f8cd93cef8 100644 --- a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-fhir-fetch-page.md +++ b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-fhir-fetch-page.md @@ -23,18 +23,18 @@ response bundle. ## Example ```yaml -tools: - get_fhir_store: - kind: cloud-healthcare-fhir-fetch-page - source: my-healthcare-source - description: Use this tool to fetch a page of FHIR resources from a FHIR Bundle's entry.link.url +kind: tools +name: get_fhir_store +type: cloud-healthcare-fhir-fetch-page +source: my-healthcare-source +description: Use this tool to fetch a page of FHIR resources from a FHIR Bundle's entry.link.url ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "cloud-healthcare-fhir-fetch-page". | +| type | string | true | Must be "cloud-healthcare-fhir-fetch-page". | | source | string | true | Name of the healthcare source. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-fhir-patient-everything.md b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-fhir-patient-everything.md index b66f65f4bc..8f74aa1392 100644 --- a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-fhir-patient-everything.md +++ b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-fhir-patient-everything.md @@ -22,18 +22,18 @@ types, or only resources that have been updated after a given time. ## Example ```yaml -tools: - fhir_patient_everything: - kind: cloud-healthcare-fhir-patient-everything - source: my-healthcare-source - description: Use this tool to retrieve all the information about a given patient. +kind: tools +name: fhir_patient_everything +type: cloud-healthcare-fhir-patient-everything +source: my-healthcare-source +description: Use this tool to retrieve all the information about a given patient. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|-----------------------------------------------------| -| kind | string | true | Must be "cloud-healthcare-fhir-patient-everything". | +| type | string | true | Must be "cloud-healthcare-fhir-patient-everything". | | source | string | true | Name of the healthcare source. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-fhir-patient-search.md b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-fhir-patient-search.md index 73155c90e1..f3cdab733b 100644 --- a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-fhir-patient-search.md +++ b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-fhir-patient-search.md @@ -21,18 +21,18 @@ given criteria. ## Example ```yaml -tools: - fhir_patient_search: - kind: cloud-healthcare-fhir-patient-search - source: my-healthcare-source - description: Use this tool to search for patients in the FHIR store. +kind: tools +name: fhir_patient_search +type: cloud-healthcare-fhir-patient-search +source: my-healthcare-source +description: Use this tool to search for patients in the FHIR store. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "cloud-healthcare-fhir-patient-search". | +| type | string | true | Must be "cloud-healthcare-fhir-patient-search". | | source | string | true | Name of the healthcare source. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-dataset.md b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-dataset.md index 0bcdb6896d..856e327251 100644 --- a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-dataset.md +++ b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-dataset.md @@ -21,17 +21,17 @@ configured in the source. It takes no extra parameters. ## Example ```yaml -tools: - get_dataset: - kind: cloud-healthcare-get-dataset - source: my-healthcare-source - description: Use this tool to get healthcare dataset metadata. +kind: tools +name: get_dataset +type: cloud-healthcare-get-dataset +source: my-healthcare-source +description: Use this tool to get healthcare dataset metadata. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:------------------------------------------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "cloud-healthcare-get-dataset". | +| type | string | true | Must be "cloud-healthcare-get-dataset". | | source | string | true | Name of the healthcare source. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-dicom-store-metrics.md b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-dicom-store-metrics.md index 8bd5d38b1f..8f0b4f152b 100644 --- a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-dicom-store-metrics.md +++ b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-dicom-store-metrics.md @@ -20,18 +20,18 @@ store. It's compatible with the following sources: ## Example ```yaml -tools: - get_dicom_store_metrics: - kind: cloud-healthcare-get-dicom-store-metrics - source: my-healthcare-source - description: Use this tool to get metrics for a DICOM store. +kind: tools +name: get_dicom_store_metrics +type: cloud-healthcare-get-dicom-store-metrics +source: my-healthcare-source +description: Use this tool to get metrics for a DICOM store. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|-----------------------------------------------------| -| kind | string | true | Must be "cloud-healthcare-get-dicom-store-metrics". | +| type | string | true | Must be "cloud-healthcare-get-dicom-store-metrics". | | source | string | true | Name of the healthcare source. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-dicom-store.md b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-dicom-store.md index 9828b06aea..ba44d9b2cc 100644 --- a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-dicom-store.md +++ b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-dicom-store.md @@ -20,18 +20,18 @@ compatible with the following sources: ## Example ```yaml -tools: - get_dicom_store: - kind: cloud-healthcare-get-dicom-store - source: my-healthcare-source - description: Use this tool to get information about a DICOM store. +kind: tools +name: get_dicom_store +type: cloud-healthcare-get-dicom-store +source: my-healthcare-source +description: Use this tool to get information about a DICOM store. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "cloud-healthcare-get-dicom-store". | +| type | string | true | Must be "cloud-healthcare-get-dicom-store". | | source | string | true | Name of the healthcare source. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-fhir-resource.md b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-fhir-resource.md index ef093ec1a3..ab36abd08d 100644 --- a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-fhir-resource.md +++ b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-fhir-resource.md @@ -23,18 +23,18 @@ by its type and ID. ## Example ```yaml -tools: - get_fhir_resource: - kind: cloud-healthcare-get-fhir-resource - source: my-healthcare-source - description: Use this tool to retrieve a specific FHIR resource. +kind: tools +name: get_fhir_resource +type: cloud-healthcare-get-fhir-resource +source: my-healthcare-source +description: Use this tool to retrieve a specific FHIR resource. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "cloud-healthcare-get-fhir-resource". | +| type | string | true | Must be "cloud-healthcare-get-fhir-resource". | | source | string | true | Name of the healthcare source. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-fhir-store-metrics.md b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-fhir-store-metrics.md index e5209e6118..f53aafac4e 100644 --- a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-fhir-store-metrics.md +++ b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-fhir-store-metrics.md @@ -20,18 +20,18 @@ compatible with the following sources: ## Example ```yaml -tools: - get_fhir_store_metrics: - kind: cloud-healthcare-get-fhir-store-metrics - source: my-healthcare-source - description: Use this tool to get metrics for a FHIR store. +kind: tools +name: get_fhir_store_metrics +type: cloud-healthcare-get-fhir-store-metrics +source: my-healthcare-source +description: Use this tool to get metrics for a FHIR store. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "cloud-healthcare-get-fhir-store-metrics". | +| type | string | true | Must be "cloud-healthcare-get-fhir-store-metrics". | | source | string | true | Name of the healthcare source. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-fhir-store.md b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-fhir-store.md index a9ce8d4aee..ff08bf3b31 100644 --- a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-fhir-store.md +++ b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-get-fhir-store.md @@ -20,18 +20,18 @@ compatible with the following sources: ## Example ```yaml -tools: - get_fhir_store: - kind: cloud-healthcare-get-fhir-store - source: my-healthcare-source - description: Use this tool to get information about a FHIR store. +kind: tools +name: get_fhir_store +type: cloud-healthcare-get-fhir-store +source: my-healthcare-source +description: Use this tool to get information about a FHIR store. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "cloud-healthcare-get-fhir-store". | +| type | string | true | Must be "cloud-healthcare-get-fhir-store". | | source | string | true | Name of the healthcare source. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-list-dicom-stores.md b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-list-dicom-stores.md index 7a6b493aca..ce1238d632 100644 --- a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-list-dicom-stores.md +++ b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-list-dicom-stores.md @@ -22,17 +22,17 @@ stores in the dataset of the healthcare source. It takes no extra parameters. ## Example ```yaml -tools: - list_dicom_stores: - kind: cloud-healthcare-list-dicom-stores - source: my-healthcare-source - description: Use this tool to list DICOM stores in the healthcare dataset. +kind: tools +name: list_dicom_stores +type: cloud-healthcare-list-dicom-stores +source: my-healthcare-source +description: Use this tool to list DICOM stores in the healthcare dataset. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "cloud-healthcare-list-dicom-stores". | +| type | string | true | Must be "cloud-healthcare-list-dicom-stores". | | source | string | true | Name of the healthcare source. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-list-fhir-stores.md b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-list-fhir-stores.md index ff10278203..12986af45d 100644 --- a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-list-fhir-stores.md +++ b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-list-fhir-stores.md @@ -22,17 +22,17 @@ stores in the dataset of the healthcare source. It takes no extra parameters. ## Example ```yaml -tools: - list_fhir_stores: - kind: cloud-healthcare-list-fhir-stores - source: my-healthcare-source - description: Use this tool to list FHIR stores in the healthcare dataset. +kind: tools +name: list_fhir_stores +type: cloud-healthcare-list-fhir-stores +source: my-healthcare-source +description: Use this tool to list FHIR stores in the healthcare dataset. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "cloud-healthcare-list-fhir-stores". | +| type | string | true | Must be "cloud-healthcare-list-fhir-stores". | | source | string | true | Name of the healthcare source. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-retrieve-rendered-dicom-instance.md b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-retrieve-rendered-dicom-instance.md index c1b99818ef..1d8eea9f29 100644 --- a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-retrieve-rendered-dicom-instance.md +++ b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-retrieve-rendered-dicom-instance.md @@ -22,18 +22,18 @@ string of the image in JPEG format. ## Example ```yaml -tools: - retrieve_rendered_dicom_instance: - kind: cloud-healthcare-retrieve-rendered-dicom-instance - source: my-healthcare-source - description: Use this tool to retrieve a rendered DICOM instance from the DICOM store. +kind: tools +name: retrieve_rendered_dicom_instance +type: cloud-healthcare-retrieve-rendered-dicom-instance +source: my-healthcare-source +description: Use this tool to retrieve a rendered DICOM instance from the DICOM store. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|--------------------------------------------------------------| -| kind | string | true | Must be "cloud-healthcare-retrieve-rendered-dicom-instance". | +| type | string | true | Must be "cloud-healthcare-retrieve-rendered-dicom-instance". | | source | string | true | Name of the healthcare source. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-search-dicom-instances.md b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-search-dicom-instances.md index 042253d91d..554290fe37 100644 --- a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-search-dicom-instances.md +++ b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-search-dicom-instances.md @@ -22,18 +22,18 @@ criteria. ## Example ```yaml -tools: - search_dicom_instances: - kind: cloud-healthcare-search-dicom-instances - source: my-healthcare-source - description: Use this tool to search for DICOM instances in the DICOM store. +kind: tools +name: search_dicom_instances +type: cloud-healthcare-search-dicom-instances +source: my-healthcare-source +description: Use this tool to search for DICOM instances in the DICOM store. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "cloud-healthcare-search-dicom-instances". | +| type | string | true | Must be "cloud-healthcare-search-dicom-instances". | | source | string | true | Name of the healthcare source. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-search-dicom-series.md b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-search-dicom-series.md index 1a8e76a183..2dba31f3cf 100644 --- a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-search-dicom-series.md +++ b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-search-dicom-series.md @@ -20,18 +20,18 @@ set of criteria. It's compatible with the following sources: ## Example ```yaml -tools: - search_dicom_series: - kind: cloud-healthcare-search-dicom-series - source: my-healthcare-source - description: Use this tool to search for DICOM series in the DICOM store. +kind: tools +name: search_dicom_series +type: cloud-healthcare-search-dicom-series +source: my-healthcare-source +description: Use this tool to search for DICOM series in the DICOM store. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "cloud-healthcare-search-dicom-series". | +| type | string | true | Must be "cloud-healthcare-search-dicom-series". | | source | string | true | Name of the healthcare source. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-search-dicom-studies.md b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-search-dicom-studies.md index 11d9036292..a3c8504444 100644 --- a/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-search-dicom-studies.md +++ b/docs/en/resources/tools/cloudhealthcare/cloud-healthcare-search-dicom-studies.md @@ -20,18 +20,18 @@ set of criteria. It's compatible with the following sources: ## Example ```yaml -tools: - search_dicom_studies: - kind: cloud-healthcare-search-dicom-studies - source: my-healthcare-source - description: Use this tool to search for DICOM studies in the DICOM store. +kind: tools +name: search_dicom_studies +type: cloud-healthcare-search-dicom-studies +source: my-healthcare-source +description: Use this tool to search for DICOM studies in the DICOM store. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "cloud-healthcare-search-dicom-studies". | +| type | string | true | Must be "cloud-healthcare-search-dicom-studies". | | source | string | true | Name of the healthcare source. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/cloudloggingadmin/_index.md b/docs/en/resources/tools/cloudloggingadmin/_index.md new file mode 100644 index 0000000000..a5b34e9a76 --- /dev/null +++ b/docs/en/resources/tools/cloudloggingadmin/_index.md @@ -0,0 +1,8 @@ +--- +title: "Cloud Logging Admin" +linkTitle: "Cloud Logging Admin" +type: docs +weight: 1 +description: > + Tools that work with Cloud Logging Admin Sources. +--- diff --git a/docs/en/resources/tools/cloudloggingadmin/cloud-logging-admin-list-log-names.md b/docs/en/resources/tools/cloudloggingadmin/cloud-logging-admin-list-log-names.md new file mode 100644 index 0000000000..17e30d050d --- /dev/null +++ b/docs/en/resources/tools/cloudloggingadmin/cloud-logging-admin-list-log-names.md @@ -0,0 +1,39 @@ +--- +title: "cloud-logging-admin-list-log-names" +type: docs +description: > + A "cloud-logging-admin-list-log-names" tool lists the log names in the project. +aliases: +- /resources/tools/cloud-logging-admin-list-log-names +--- + +## About + +The `cloud-logging-admin-list-log-names` tool lists the log names available in the Google Cloud project. +It's compatible with the following sources: + +- [cloud-logging-admin](../../sources/cloud-logging-admin.md) + +## Example + +```yaml +kind: tools +name: list_log_names +type: cloud-logging-admin-list-log-names +source: my-cloud-logging +description: Lists all log names in the project. +``` + +## Reference + +| **field** | **type** | **required** | **description** | +|-------------|:--------:|:------------:|----------------------------------------------------| +| type | string | true | Must be "cloud-logging-admin-list-log-names". | +| source | string | true | Name of the cloud-logging-admin source. | +| description | string | true | Description of the tool that is passed to the LLM. | + +### Parameters + +| **parameter** | **type** | **required** | **description** | +|:--------------|:--------:|:------------:|:----------------| +| limit | integer | false | Maximum number of log entries to return (default: 200). | diff --git a/docs/en/resources/tools/cloudloggingadmin/cloud-logging-admin-list-resource-types.md b/docs/en/resources/tools/cloudloggingadmin/cloud-logging-admin-list-resource-types.md new file mode 100644 index 0000000000..1bbac35c4b --- /dev/null +++ b/docs/en/resources/tools/cloudloggingadmin/cloud-logging-admin-list-resource-types.md @@ -0,0 +1,34 @@ +--- +title: "cloud-logging-admin-list-resource-types" +type: docs +description: > + A "cloud-logging-admin-list-resource-types" tool lists the monitored resource types. +aliases: +- /resources/tools/cloud-logging-admin-list-resource-types +--- + +## About + +The `cloud-logging-admin-list-resource-types` tool lists the monitored resource types available in Google Cloud Logging. +It's compatible with the following sources: + +- [cloud-logging-admin](../../sources/cloud-logging-admin.md) + +## Example + +```yaml +kind: tools +name: list_resource_types +type: cloud-logging-admin-list-resource-types +source: my-cloud-logging +description: Lists monitored resource types. +``` + +## Reference + +| **field** | **type** | **required** | **description** | +|-------------|:--------:|:------------:|----------------------------------------------------| +| type | string | true | Must be "cloud-logging-admin-list-resource-types".| +| source | string | true | Name of the cloud-logging-admin source. | +| description | string | true | Description of the tool that is passed to the LLM. | + diff --git a/docs/en/resources/tools/cloudloggingadmin/cloud-logging-admin-query-logs.md b/docs/en/resources/tools/cloudloggingadmin/cloud-logging-admin-query-logs.md new file mode 100644 index 0000000000..7a9b8f97d5 --- /dev/null +++ b/docs/en/resources/tools/cloudloggingadmin/cloud-logging-admin-query-logs.md @@ -0,0 +1,44 @@ +--- +title: "cloud-logging-admin-query-logs" +type: docs +description: > + A "cloud-logging-admin-query-logs" tool queries log entries. +aliases: +- /resources/tools/cloud-logging-admin-query-logs +--- + +## About + +The `cloud-logging-admin-query-logs` tool allows you to query log entries from Google Cloud Logging using the advanced logs filter syntax. +It's compatible with the following sources: + +- [cloud-logging-admin](../../sources/cloud-logging-admin.md) + +## Example + +```yaml +kind: tools +name: query_logs +type: cloud-logging-admin-query-logs +source: my-cloud-logging +description: Queries log entries from Cloud Logging. +``` + +## Reference + +| **field** | **type** | **required** | **description** | +|-------------|:--------:|:------------:|----------------------------------------------------| +| type | string | true | Must be "cloud-logging-admin-query-logs". | +| source | string | true | Name of the cloud-logging-admin source. | +| description | string | true | Description of the tool that is passed to the LLM. | + +### Parameters + +| **parameter** | **type** | **required** | **description** | +|:--------------|:--------:|:------------:|:----------------| +| filter | string | false | Cloud Logging filter query. Common fields: resource.type, resource.labels.*, logName, severity, textPayload, jsonPayload.*, protoPayload.*, labels.*, httpRequest.*. Operators: =, !=, <, <=, >, >=, :, =~, AND, OR, NOT. | +| newestFirst | boolean | false | Set to true for newest logs first. Defaults to oldest first. | +| startTime | string | false | Start time in RFC3339 format (e.g., 2025-12-09T00:00:00Z). Defaults to 30 days ago. | +| endTime | string | false | End time in RFC3339 format (e.g., 2025-12-09T23:59:59Z). Defaults to now. | +| verbose | boolean | false | Include additional fields (insertId, trace, spanId, httpRequest, labels, operation, sourceLocation). Defaults to false. | +| limit | integer | false | Maximum number of log entries to return. Default: `200`. | diff --git a/docs/en/resources/tools/cloudmonitoring/cloud-monitoring-query-prometheus.md b/docs/en/resources/tools/cloudmonitoring/cloud-monitoring-query-prometheus.md index 53ade0eb33..8597c1dc1a 100644 --- a/docs/en/resources/tools/cloudmonitoring/cloud-monitoring-query-prometheus.md +++ b/docs/en/resources/tools/cloudmonitoring/cloud-monitoring-query-prometheus.md @@ -55,21 +55,21 @@ Here are some examples of how to use the `cloud-monitoring-query-prometheus` tool. ```yaml -tools: - get_wait_time_metrics: - kind: cloud-monitoring-query-prometheus - source: cloud-monitoring-source - description: | - This tool fetches system wait time information for AlloyDB cluster, instance. Get the `projectID`, `clusterID` and `instanceID` from the user intent. To use this tool, you must provide the Google Cloud `projectId` and a PromQL `query`. - Generate `query` using these metric details: - metric: `alloydb.googleapis.com/instance/postgresql/wait_time`, monitored_resource: `alloydb.googleapis.com/Instance`. labels: `cluster_id`, `instance_id`, `wait_event_type`, `wait_event_name`. - Basic time series example promql query: `avg_over_time({"__name__"="alloydb.googleapis.com/instance/postgresql/wait_time","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance"}[5m])` +kind: tools +name: get_wait_time_metrics +type: cloud-monitoring-query-prometheus +source: cloud-monitoring-source +description: | + This tool fetches system wait time information for AlloyDB cluster, instance. Get the `projectID`, `clusterID` and `instanceID` from the user intent. To use this tool, you must provide the Google Cloud `projectId` and a PromQL `query`. + Generate `query` using these metric details: + metric: `alloydb.googleapis.com/instance/postgresql/wait_time`, monitored_resource: `alloydb.googleapis.com/Instance`. labels: `cluster_id`, `instance_id`, `wait_event_type`, `wait_event_name`. + Basic time series example promql query: `avg_over_time({"__name__"="alloydb.googleapis.com/instance/postgresql/wait_time","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance"}[5m])` ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------| -| kind | string | true | Must be cloud-monitoring-query-prometheus. | +| type | string | true | Must be cloud-monitoring-query-prometheus. | | source | string | true | The name of an `cloud-monitoring` source. | | description | string | true | Description of the tool that is passed to the agent. | diff --git a/docs/en/resources/tools/cloudsql/cloudsqlcloneinstance.md b/docs/en/resources/tools/cloudsql/cloudsqlcloneinstance.md index 89fdb8d986..455381173d 100644 --- a/docs/en/resources/tools/cloudsql/cloudsqlcloneinstance.md +++ b/docs/en/resources/tools/cloudsql/cloudsqlcloneinstance.md @@ -8,7 +8,7 @@ description: "Clone a Cloud SQL instance." The `cloud-sql-clone-instance` tool clones a Cloud SQL instance using the Cloud SQL Admin API. {{< notice info dd>}} -This tool uses a `source` of kind `cloud-sql-admin`. +This tool uses a `source` of type `cloud-sql-admin`. {{< /notice >}} ## Examples @@ -16,21 +16,21 @@ This tool uses a `source` of kind `cloud-sql-admin`. Basic clone (current state) ```yaml -tools: - clone-instance-basic: - kind: cloud-sql-clone-instance - source: cloud-sql-admin-source - description: "Creates an exact copy of a Cloud SQL instance. Supports configuring instance zones and high-availability setup through zone preferences." +kind: tools +name: clone-instance-basic +type: cloud-sql-clone-instance +source: cloud-sql-admin-source +description: "Creates an exact copy of a Cloud SQL instance. Supports configuring instance zones and high-availability setup through zone preferences." ``` Point-in-time recovery (PITR) clone ```yaml -tools: - clone-instance-pitr: - kind: cloud-sql-clone-instance - source: cloud-sql-admin-source - description: "Creates an exact copy of a Cloud SQL instance at a specific point in time (PITR). Supports configuring instance zones and high-availability setup through zone preferences" +kind: tools +name: clone-instance-pitr +type: cloud-sql-clone-instance +source: cloud-sql-admin-source +description: "Creates an exact copy of a Cloud SQL instance at a specific point in time (PITR). Supports configuring instance zones and high-availability setup through zone preferences" ``` ## Reference @@ -39,7 +39,7 @@ tools: | **field** | **type** | **required** | **description** | | -------------- | :------: | :----------: | ------------------------------------------------------------- | -| kind | string | true | Must be "cloud-sql-clone-instance". | +| type | string | true | Must be "cloud-sql-clone-instance". | | source | string | true | The name of the `cloud-sql-admin` source to use. | | description | string | false | A description of the tool. | diff --git a/docs/en/resources/tools/cloudsql/cloudsqlcreatebackup.md b/docs/en/resources/tools/cloudsql/cloudsqlcreatebackup.md index 751534a0ba..b767389644 100644 --- a/docs/en/resources/tools/cloudsql/cloudsqlcreatebackup.md +++ b/docs/en/resources/tools/cloudsql/cloudsqlcreatebackup.md @@ -8,7 +8,7 @@ description: "Creates a backup on a Cloud SQL instance." The `cloud-sql-create-backup` tool creates an on-demand backup on a Cloud SQL instance using the Cloud SQL Admin API. {{< notice info dd>}} -This tool uses a `source` of kind `cloud-sql-admin`. +This tool uses a `source` of type `cloud-sql-admin`. {{< /notice >}} ## Examples @@ -16,17 +16,17 @@ This tool uses a `source` of kind `cloud-sql-admin`. Basic backup creation (current state) ```yaml -tools: - backup-creation-basic: - kind: cloud-sql-create-backup - source: cloud-sql-admin-source - description: "Creates a backup on the given Cloud SQL instance." +kind: tools +name: backup-creation-basic +type: cloud-sql-create-backup +source: cloud-sql-admin-source +description: "Creates a backup on the given Cloud SQL instance." ``` ## Reference ### Tool Configuration | **field** | **type** | **required** | **description** | | -------------- | :------: | :----------: | ------------------------------------------------------------- | -| kind | string | true | Must be "cloud-sql-create-backup". | +| type | string | true | Must be "cloud-sql-create-backup". | | source | string | true | The name of the `cloud-sql-admin` source to use. | | description | string | false | A description of the tool. | diff --git a/docs/en/resources/tools/cloudsql/cloudsqlcreatedatabase.md b/docs/en/resources/tools/cloudsql/cloudsqlcreatedatabase.md index dbad4f3376..96c41047d8 100644 --- a/docs/en/resources/tools/cloudsql/cloudsqlcreatedatabase.md +++ b/docs/en/resources/tools/cloudsql/cloudsqlcreatedatabase.md @@ -10,24 +10,24 @@ The `cloud-sql-create-database` tool creates a new database in a specified Cloud SQL instance. {{< notice info >}} -This tool uses a `source` of kind `cloud-sql-admin`. +This tool uses a `source` of type `cloud-sql-admin`. {{< /notice >}} ## Example ```yaml -tools: - create-cloud-sql-database: - kind: cloud-sql-create-database - source: my-cloud-sql-admin-source - description: "Creates a new database in a Cloud SQL instance." +kind: tools +name: create-cloud-sql-database +type: cloud-sql-create-database +source: my-cloud-sql-admin-source +description: "Creates a new database in a Cloud SQL instance." ``` ## Reference | **field** | **type** | **required** | **description** | | ----------- | :------: | :----------: | ------------------------------------------------ | -| kind | string | true | Must be "cloud-sql-create-database". | +| type | string | true | Must be "cloud-sql-create-database". | | source | string | true | The name of the `cloud-sql-admin` source to use. | | description | string | false | A description of the tool. | diff --git a/docs/en/resources/tools/cloudsql/cloudsqlcreateusers.md b/docs/en/resources/tools/cloudsql/cloudsqlcreateusers.md index affee91cfc..243e812100 100644 --- a/docs/en/resources/tools/cloudsql/cloudsqlcreateusers.md +++ b/docs/en/resources/tools/cloudsql/cloudsqlcreateusers.md @@ -10,23 +10,23 @@ The `cloud-sql-create-users` tool creates a new user in a specified Cloud SQL instance. It can create both built-in and IAM users. {{< notice info >}} -This tool uses a `source` of kind `cloud-sql-admin`. +This tool uses a `source` of type `cloud-sql-admin`. {{< /notice >}} ## Example ```yaml -tools: - create-cloud-sql-user: - kind: cloud-sql-create-users - source: my-cloud-sql-admin-source - description: "Creates a new user in a Cloud SQL instance. Both built-in and IAM users are supported. IAM users require an email account as the user name. IAM is the more secure and recommended way to manage users. The agent should always ask the user what type of user they want to create. For more information, see https://cloud.google.com/sql/docs/postgres/add-manage-iam-users" +kind: tools +name: create-cloud-sql-user +type: cloud-sql-create-users +source: my-cloud-sql-admin-source +description: "Creates a new user in a Cloud SQL instance. Both built-in and IAM users are supported. IAM users require an email account as the user name. IAM is the more secure and recommended way to manage users. The agent should always ask the user what type of user they want to create. For more information, see https://cloud.google.com/sql/docs/postgres/add-manage-iam-users" ``` ## Reference | **field** | **type** | **required** | **description** | | ------------ | :-------: | :----------: | ------------------------------------------------ | -| kind | string | true | Must be "cloud-sql-create-users". | +| type | string | true | Must be "cloud-sql-create-users". | | description | string | false | A description of the tool. | | source | string | true | The name of the `cloud-sql-admin` source to use. | diff --git a/docs/en/resources/tools/cloudsql/cloudsqlgetinstances.md b/docs/en/resources/tools/cloudsql/cloudsqlgetinstances.md index 711fc3b333..d6ab497995 100644 --- a/docs/en/resources/tools/cloudsql/cloudsqlgetinstances.md +++ b/docs/en/resources/tools/cloudsql/cloudsqlgetinstances.md @@ -10,23 +10,23 @@ The `cloud-sql-get-instance` tool retrieves a Cloud SQL instance resource using the Cloud SQL Admin API. {{< notice info >}} -This tool uses a `source` of kind `cloud-sql-admin`. +This tool uses a `source` of type `cloud-sql-admin`. {{< /notice >}} ## Example ```yaml -tools: - get-sql-instance: - kind: cloud-sql-get-instance - source: my-cloud-sql-admin-source - description: "Gets a particular cloud sql instance." +kind: tools +name: get-sql-instance +type: cloud-sql-get-instance +source: my-cloud-sql-admin-source +description: "Gets a particular cloud sql instance." ``` ## Reference | **field** | **type** | **required** | **description** | | ----------- | :------: | :----------: | ------------------------------------------------ | -| kind | string | true | Must be "cloud-sql-get-instance". | +| type | string | true | Must be "cloud-sql-get-instance". | | source | string | true | The name of the `cloud-sql-admin` source to use. | | description | string | false | A description of the tool. | diff --git a/docs/en/resources/tools/cloudsql/cloudsqllistdatabases.md b/docs/en/resources/tools/cloudsql/cloudsqllistdatabases.md index 54f8f3401d..2d86f5b23e 100644 --- a/docs/en/resources/tools/cloudsql/cloudsqllistdatabases.md +++ b/docs/en/resources/tools/cloudsql/cloudsqllistdatabases.md @@ -18,15 +18,15 @@ Here is an example of how to configure the `cloud-sql-list-databases` tool in yo `tools.yaml` file: ```yaml -sources: - my-cloud-sql-admin-source: - kind: cloud-sql-admin - -tools: - list_my_databases: - kind: cloud-sql-list-databases - source: my-cloud-sql-admin-source - description: Use this tool to list all Cloud SQL databases in an instance. +kind: sources +name: my-cloud-sql-admin-source +type: cloud-sql-admin +--- +kind: tools +name: list_my_databases +type: cloud-sql-list-databases +source: my-cloud-sql-admin-source +description: Use this tool to list all Cloud SQL databases in an instance. ``` ## Parameters @@ -42,6 +42,6 @@ The `cloud-sql-list-databases` tool has two required parameters: | **field** | **type** | **required** | **description** | | ----------- | :------: | :----------: | -------------------------------------------------------------- | -| kind | string | true | Must be "cloud-sql-list-databases". | +| type | string | true | Must be "cloud-sql-list-databases". | | source | string | true | The name of the `cloud-sql-admin` source to use for this tool. | | description | string | false | Description of the tool that is passed to the agent. | diff --git a/docs/en/resources/tools/cloudsql/cloudsqllistinstances.md b/docs/en/resources/tools/cloudsql/cloudsqllistinstances.md index 45e3c542e6..c078aa43bd 100644 --- a/docs/en/resources/tools/cloudsql/cloudsqllistinstances.md +++ b/docs/en/resources/tools/cloudsql/cloudsqllistinstances.md @@ -19,15 +19,15 @@ Here is an example of how to configure the `cloud-sql-list-instances` tool in your `tools.yaml` file: ```yaml -sources: - my-cloud-sql-admin-source: - kind: cloud-sql-admin - -tools: - list_my_instances: - kind: cloud-sql-list-instances - source: my-cloud-sql-admin-source - description: Use this tool to list all Cloud SQL instances in a project. +kind: sources +name: my-cloud-sql-admin-source +type: cloud-sql-admin +--- +kind: tools +name: list_my_instances +type: cloud-sql-list-instances +source: my-cloud-sql-admin-source +description: Use this tool to list all Cloud SQL instances in a project. ``` ## Parameters @@ -42,6 +42,6 @@ The `cloud-sql-list-instances` tool has one required parameter: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------------------| -| kind | string | true | Must be "cloud-sql-list-instances". | +| type | string | true | Must be "cloud-sql-list-instances". | | description | string | false | Description of the tool that is passed to the agent. | | source | string | true | The name of the `cloud-sql-admin` source to use for this tool. | diff --git a/docs/en/resources/tools/cloudsql/cloudsqlmssqlcreateinstance.md b/docs/en/resources/tools/cloudsql/cloudsqlmssqlcreateinstance.md index 6b7cd9beb9..2053724237 100644 --- a/docs/en/resources/tools/cloudsql/cloudsqlmssqlcreateinstance.md +++ b/docs/en/resources/tools/cloudsql/cloudsqlmssqlcreateinstance.md @@ -9,17 +9,17 @@ The `cloud-sql-mssql-create-instance` tool creates a Cloud SQL for SQL Server instance using the Cloud SQL Admin API. {{< notice info dd>}} -This tool uses a `source` of kind `cloud-sql-admin`. +This tool uses a `source` of type `cloud-sql-admin`. {{< /notice >}} ## Example ```yaml -tools: - create-sql-instance: - kind: cloud-sql-mssql-create-instance - source: cloud-sql-admin-source - description: "Creates a SQL Server instance using `Production` and `Development` presets. For the `Development` template, it chooses a 2 vCPU, 8 GiB RAM (`db-custom-2-8192`) configuration with Non-HA/zonal availability. For the `Production` template, it chooses a 4 vCPU, 26 GiB RAM (`db-custom-4-26624`) configuration with HA/regional availability. The Enterprise edition is used in both cases. The default database version is `SQLSERVER_2022_STANDARD`. The agent should ask the user if they want to use a different version." +kind: tools +name: create-sql-instance +type: cloud-sql-mssql-create-instance +source: cloud-sql-admin-source +description: "Creates a SQL Server instance using `Production` and `Development` presets. For the `Development` template, it chooses a 2 vCPU, 8 GiB RAM (`db-custom-2-8192`) configuration with Non-HA/zonal availability. For the `Production` template, it chooses a 4 vCPU, 26 GiB RAM (`db-custom-4-26624`) configuration with HA/regional availability. The Enterprise edition is used in both cases. The default database version is `SQLSERVER_2022_STANDARD`. The agent should ask the user if they want to use a different version." ``` ## Reference @@ -28,7 +28,7 @@ tools: | **field** | **type** | **required** | **description** | | ----------- | :------: | :----------: | ------------------------------------------------ | -| kind | string | true | Must be "cloud-sql-mssql-create-instance". | +| type | string | true | Must be "cloud-sql-mssql-create-instance". | | source | string | true | The name of the `cloud-sql-admin` source to use. | | description | string | false | A description of the tool. | diff --git a/docs/en/resources/tools/cloudsql/cloudsqlmysqlcreateinstance.md b/docs/en/resources/tools/cloudsql/cloudsqlmysqlcreateinstance.md index 8707b8fea2..d9e5a7dcee 100644 --- a/docs/en/resources/tools/cloudsql/cloudsqlmysqlcreateinstance.md +++ b/docs/en/resources/tools/cloudsql/cloudsqlmysqlcreateinstance.md @@ -18,15 +18,15 @@ Here is an example of how to configure the `cloud-sql-mysql-create-instance` tool in your `tools.yaml` file: ```yaml -sources: - my-cloud-sql-admin-source: - kind: cloud-sql-admin - -tools: - create_my_mysql_instance: - kind: cloud-sql-mysql-create-instance - source: my-cloud-sql-admin-source - description: "Creates a MySQL instance using `Production` and `Development` presets. For the `Development` template, it chooses a 2 vCPU, 16 GiB RAM, 100 GiB SSD configuration with Non-HA/zonal availability. For the `Production` template, it chooses an 8 vCPU, 64 GiB RAM, 250 GiB SSD configuration with HA/regional availability. The Enterprise Plus edition is used in both cases. The default database version is `MYSQL_8_4`. The agent should ask the user if they want to use a different version." +kind: sources +name: my-cloud-sql-admin-source +type: cloud-sql-admin +--- +kind: tools +name: create_my_mysql_instance +type: cloud-sql-mysql-create-instance +source: my-cloud-sql-admin-source +description: "Creates a MySQL instance using `Production` and `Development` presets. For the `Development` template, it chooses a 2 vCPU, 16 GiB RAM, 100 GiB SSD configuration with Non-HA/zonal availability. For the `Production` template, it chooses an 8 vCPU, 64 GiB RAM, 250 GiB SSD configuration with HA/regional availability. The Enterprise Plus edition is used in both cases. The default database version is `MYSQL_8_4`. The agent should ask the user if they want to use a different version." ``` ## Parameters @@ -45,6 +45,6 @@ The `cloud-sql-mysql-create-instance` tool has the following parameters: | **field** | **type** | **required** | **description** | | ----------- | :------: | :----------: | -------------------------------------------------------------- | -| kind | string | true | Must be `cloud-sql-mysql-create-instance`. | +| type | string | true | Must be `cloud-sql-mysql-create-instance`. | | source | string | true | The name of the `cloud-sql-admin` source to use for this tool. | | description | string | false | A description of the tool that is passed to the agent. | diff --git a/docs/en/resources/tools/cloudsql/cloudsqlpgcreateinstances.md b/docs/en/resources/tools/cloudsql/cloudsqlpgcreateinstances.md index b567537592..0d41a89e03 100644 --- a/docs/en/resources/tools/cloudsql/cloudsqlpgcreateinstances.md +++ b/docs/en/resources/tools/cloudsql/cloudsqlpgcreateinstances.md @@ -9,17 +9,17 @@ The `cloud-sql-postgres-create-instance` tool creates a Cloud SQL for PostgreSQL instance using the Cloud SQL Admin API. {{< notice info >}} -This tool uses a `source` of kind `cloud-sql-admin`. +This tool uses a `source` of type `cloud-sql-admin`. {{< /notice >}} ## Example ```yaml -tools: - create-sql-instance: - kind: cloud-sql-postgres-create-instance - source: cloud-sql-admin-source - description: "Creates a Postgres instance using `Production` and `Development` presets. For the `Development` template, it chooses a 2 vCPU, 16 GiB RAM, 100 GiB SSD configuration with Non-HA/zonal availability. For the `Production` template, it chooses an 8 vCPU, 64 GiB RAM, 250 GiB SSD configuration with HA/regional availability. The Enterprise Plus edition is used in both cases. The default database version is `POSTGRES_17`. The agent should ask the user if they want to use a different version." +kind: tools +name: create-sql-instance +type: cloud-sql-postgres-create-instance +source: cloud-sql-admin-source +description: "Creates a Postgres instance using `Production` and `Development` presets. For the `Development` template, it chooses a 2 vCPU, 16 GiB RAM, 100 GiB SSD configuration with Non-HA/zonal availability. For the `Production` template, it chooses an 8 vCPU, 64 GiB RAM, 250 GiB SSD configuration with HA/regional availability. The Enterprise Plus edition is used in both cases. The default database version is `POSTGRES_17`. The agent should ask the user if they want to use a different version." ``` ## Reference @@ -28,7 +28,7 @@ tools: | **field** | **type** | **required** | **description** | | ----------- | :------: | :----------: | ------------------------------------------------ | -| kind | string | true | Must be "cloud-sql-postgres-create-instance". | +| type | string | true | Must be "cloud-sql-postgres-create-instance". | | source | string | true | The name of the `cloud-sql-admin` source to use. | | description | string | false | A description of the tool. | diff --git a/docs/en/resources/tools/cloudsql/cloudsqlpgupgradeprecheck.md b/docs/en/resources/tools/cloudsql/cloudsqlpgupgradeprecheck.md index 4b5933a176..820bf44031 100644 --- a/docs/en/resources/tools/cloudsql/cloudsqlpgupgradeprecheck.md +++ b/docs/en/resources/tools/cloudsql/cloudsqlpgupgradeprecheck.md @@ -10,7 +10,7 @@ instance to assess its readiness for a major version upgrade using the Cloud SQL It helps identify potential incompatibilities or issues before starting the actual upgrade process. {{< notice info >}} -This tool uses a `source` of kind `cloud-sql-admin`. +This tool uses a `source` of type `cloud-sql-admin`. {{< /notice >}} ## Tool Inputs @@ -18,18 +18,18 @@ This tool uses a `source` of kind `cloud-sql-admin`. ### Example ```yaml -tools: - postgres-upgrade-precheck: - kind: postgres-upgrade-precheck - source: cloud-sql-admin-source - description: "Checks if a Cloud SQL PostgreSQL instance is ready for a major version upgrade to the specified target version." +kind: tools +name: postgres-upgrade-precheck +type: postgres-upgrade-precheck +source: cloud-sql-admin-source +description: "Checks if a Cloud SQL PostgreSQL instance is ready for a major version upgrade to the specified target version." ``` ### Reference | **field** | **type** | **required** | **description** | | ------------ | :------: | :----------: | --------------------------------------------------------- | -| kind | string | true | Must be "postgres-upgrade-precheck". | +| type | string | true | Must be "postgres-upgrade-precheck". | | source | string | true | The name of the `cloud-sql-admin` source to use. | | description | string | false | A description of the tool. | diff --git a/docs/en/resources/tools/cloudsql/cloudsqlrestorebackup.md b/docs/en/resources/tools/cloudsql/cloudsqlrestorebackup.md index 3eabdac865..12e45a2c66 100644 --- a/docs/en/resources/tools/cloudsql/cloudsqlrestorebackup.md +++ b/docs/en/resources/tools/cloudsql/cloudsqlrestorebackup.md @@ -8,7 +8,7 @@ description: "Restores a backup of a Cloud SQL instance." The `cloud-sql-restore-backup` tool restores a backup on a Cloud SQL instance using the Cloud SQL Admin API. {{< notice info dd>}} -This tool uses a `source` of kind `cloud-sql-admin`. +This tool uses a `source` of type `cloud-sql-admin`. {{< /notice >}} ## Examples @@ -16,11 +16,11 @@ This tool uses a `source` of kind `cloud-sql-admin`. Basic backup restore ```yaml -tools: - backup-restore-basic: - kind: cloud-sql-restore-backup - source: cloud-sql-admin-source - description: "Restores a backup onto the given Cloud SQL instance." +kind: tools +name: backup-restore-basic +type: cloud-sql-restore-backup +source: cloud-sql-admin-source +description: "Restores a backup onto the given Cloud SQL instance." ``` ## Reference @@ -28,7 +28,7 @@ tools: ### Tool Configuration | **field** | **type** | **required** | **description** | | -------------- | :------: | :----------: | ------------------------------------------------ | -| kind | string | true | Must be "cloud-sql-restore-backup". | +| type | string | true | Must be "cloud-sql-restore-backup". | | source | string | true | The name of the `cloud-sql-admin` source to use. | | description | string | false | A description of the tool. | diff --git a/docs/en/resources/tools/cloudsql/cloudsqlwaitforoperation.md b/docs/en/resources/tools/cloudsql/cloudsqlwaitforoperation.md index 3816d12bf3..728d0e2bc8 100644 --- a/docs/en/resources/tools/cloudsql/cloudsqlwaitforoperation.md +++ b/docs/en/resources/tools/cloudsql/cloudsqlwaitforoperation.md @@ -14,22 +14,22 @@ exponential backoff. ## Example ```yaml -tools: - cloudsql-operations-get: - kind: cloud-sql-wait-for-operation - source: my-cloud-sql-source - description: "This will poll on operations API until the operation is done. For checking operation status we need projectId and operationId. Once instance is created give follow up steps on how to use the variables to bring data plane MCP server up in local and remote setup." - delay: 1s - maxDelay: 4m - multiplier: 2 - maxRetries: 10 +kind: tools +name: cloudsql-operations-get +type: cloud-sql-wait-for-operation +source: my-cloud-sql-source +description: "This will poll on operations API until the operation is done. For checking operation status we need projectId and operationId. Once instance is created give follow up steps on how to use the variables to bring data plane MCP server up in local and remote setup." +delay: 1s +maxDelay: 4m +multiplier: 2 +maxRetries: 10 ``` ## Reference | **field** | **type** | **required** | **description** | | ----------- | :------: | :----------: | ---------------------------------------------------------------------------------------------------------------- | -| kind | string | true | Must be "cloud-sql-wait-for-operation". | +| type | string | true | Must be "cloud-sql-wait-for-operation". | | source | string | true | The name of a `cloud-sql-admin` source to use for authentication. | | description | string | false | A description of the tool. | | delay | duration | false | The initial delay between polling requests (e.g., `3s`). Defaults to 3 seconds. | diff --git a/docs/en/resources/tools/cockroachdb/cockroachdb-execute-sql.md b/docs/en/resources/tools/cockroachdb/cockroachdb-execute-sql.md new file mode 100644 index 0000000000..10a78926f4 --- /dev/null +++ b/docs/en/resources/tools/cockroachdb/cockroachdb-execute-sql.md @@ -0,0 +1,273 @@ +--- +title: "cockroachdb-execute-sql" +type: docs +weight: 1 +description: > + Execute ad-hoc SQL statements against a CockroachDB database. + +--- + +## About + +A `cockroachdb-execute-sql` tool executes ad-hoc SQL statements against a CockroachDB database. This tool is designed for interactive workflows where the SQL query is provided dynamically at runtime, making it ideal for developer assistance and exploratory data analysis. + +The tool takes a single `sql` parameter containing the SQL statement to execute and returns the query results. + +> **Note:** This tool is intended for developer assistant workflows with human-in-the-loop and shouldn't be used for production agents. For production use cases with predefined queries, use [cockroachdb-sql](./cockroachdb-sql.md) instead. + +## Example + +```yaml +sources: + my_cockroachdb: + type: cockroachdb + host: your-cluster.cockroachlabs.cloud + port: "26257" + user: myuser + password: mypassword + database: defaultdb + queryParams: + sslmode: require + +tools: + execute_sql: + type: cockroachdb-execute-sql + source: my_cockroachdb + description: Execute any SQL statement against the CockroachDB database +``` + +## Usage Examples + +### Simple SELECT Query + +```json +{ + "sql": "SELECT * FROM users LIMIT 10" +} +``` + +### Query with Aggregations + +```json +{ + "sql": "SELECT category, COUNT(*) as count, SUM(amount) as total FROM expenses GROUP BY category ORDER BY total DESC" +} +``` + +### Database Introspection + +```json +{ + "sql": "SHOW TABLES" +} +``` + +```json +{ + "sql": "SHOW COLUMNS FROM expenses" +} +``` + +### Multi-Region Information + +```json +{ + "sql": "SHOW REGIONS FROM DATABASE defaultdb" +} +``` + +```json +{ + "sql": "SHOW ZONE CONFIGURATIONS" +} +``` + +## CockroachDB-Specific Features + +### Check Cluster Version + +```json +{ + "sql": "SELECT version()" +} +``` + +### View Node Status + +```json +{ + "sql": "SELECT node_id, address, locality, is_live FROM crdb_internal.gossip_nodes" +} +``` + +### Check Replication Status + +```json +{ + "sql": "SELECT range_id, start_key, end_key, replicas, lease_holder FROM crdb_internal.ranges LIMIT 10" +} +``` + +### View Table Regions + +```json +{ + "sql": "SHOW REGIONS FROM TABLE expenses" +} +``` + +## Configuration + +### Required Fields + +| Field | Type | Description | +|-------|------|-------------| +| `type` | string | Must be `cockroachdb-execute-sql` | +| `source` | string | Name of the CockroachDB source to use | +| `description` | string | Human-readable description for the LLM | + +### Optional Fields + +| Field | Type | Description | +|-------|------|-------------| +| `authRequired` | array | List of authentication services required | + +## Parameters + +The tool accepts a single runtime parameter: + +| Parameter | Type | Description | +|-----------|------|-------------| +| `sql` | string | The SQL statement to execute | + +## Best Practices + +### Use for Exploration, Not Production + +This tool is ideal for: +- Interactive database exploration +- Ad-hoc analysis and reporting +- Debugging and troubleshooting +- Schema inspection + +For production use cases, use [cockroachdb-sql](./cockroachdb-sql.md) with parameterized queries. + +### Be Cautious with Data Modification + +While this tool can execute any SQL statement, be careful with: +- `INSERT`, `UPDATE`, `DELETE` statements +- `DROP` or `ALTER` statements +- Schema changes in production + +### Use LIMIT for Large Results + +Always use `LIMIT` clauses when exploring data: + +```sql +SELECT * FROM large_table LIMIT 100 +``` + +### Leverage CockroachDB's SQL Extensions + +CockroachDB supports PostgreSQL syntax plus extensions: + +```sql +-- Show database survival goal +SHOW SURVIVAL GOAL FROM DATABASE defaultdb; + +-- View zone configurations +SHOW ZONE CONFIGURATION FOR TABLE expenses; + +-- Check table localities +SHOW CREATE TABLE expenses; +``` + +## Error Handling + +The tool will return descriptive errors for: +- **Syntax errors**: Invalid SQL syntax +- **Permission errors**: Insufficient user privileges +- **Connection errors**: Network or authentication issues +- **Runtime errors**: Constraint violations, type mismatches, etc. + +## Security Considerations + +### SQL Injection Risk + +Since this tool executes arbitrary SQL, it should only be used with: +- Trusted users in interactive sessions +- Human-in-the-loop workflows +- Development and testing environments + +Never expose this tool directly to end users without proper authorization controls. + +### Use Authentication + +Configure the `authRequired` field to restrict access: + +```yaml +tools: + execute_sql: + type: cockroachdb-execute-sql + source: my_cockroachdb + description: Execute SQL statements + authRequired: + - my-auth-service +``` + +### Read-Only Users + +For safer exploration, create read-only database users: + +```sql +CREATE USER readonly_user; +GRANT SELECT ON DATABASE defaultdb TO readonly_user; +``` + +## Common Use Cases + +### Database Administration + +```sql +-- View database size +SELECT + table_name, + pg_size_pretty(pg_total_relation_size(table_name::regclass)) AS size +FROM information_schema.tables +WHERE table_schema = 'public' +ORDER BY pg_total_relation_size(table_name::regclass) DESC; +``` + +### Performance Analysis + +```sql +-- Find slow queries +SELECT query, count, mean_latency +FROM crdb_internal.statement_statistics +WHERE mean_latency > INTERVAL '1 second' +ORDER BY mean_latency DESC +LIMIT 10; +``` + +### Data Quality Checks + +```sql +-- Find NULL values +SELECT COUNT(*) as null_count +FROM expenses +WHERE description IS NULL OR amount IS NULL; + +-- Find duplicates +SELECT user_id, email, COUNT(*) as count +FROM users +GROUP BY user_id, email +HAVING COUNT(*) > 1; +``` + +## See Also + +- [cockroachdb-sql](./cockroachdb-sql.md) - For parameterized, production-ready queries +- [cockroachdb-list-tables](./cockroachdb-list-tables.md) - List tables in the database +- [cockroachdb-list-schemas](./cockroachdb-list-schemas.md) - List database schemas +- [CockroachDB Source](../../sources/cockroachdb.md) - Source configuration reference +- [CockroachDB SQL Reference](https://www.cockroachlabs.com/docs/stable/sql-statements.html) - Official SQL documentation diff --git a/docs/en/resources/tools/cockroachdb/cockroachdb-list-schemas.md b/docs/en/resources/tools/cockroachdb/cockroachdb-list-schemas.md new file mode 100644 index 0000000000..8a9ee11292 --- /dev/null +++ b/docs/en/resources/tools/cockroachdb/cockroachdb-list-schemas.md @@ -0,0 +1,305 @@ +--- +title: "cockroachdb-list-schemas" +type: docs +weight: 1 +description: > + List schemas in a CockroachDB database. + +--- + +## About + +The `cockroachdb-list-schemas` tool retrieves a list of schemas (namespaces) in a CockroachDB database. Schemas are used to organize database objects such as tables, views, and functions into logical groups. + +This tool is useful for: +- Understanding database organization +- Discovering available schemas +- Multi-tenant application analysis +- Schema-level access control planning + +## Example + +```yaml +sources: + my_cockroachdb: + type: cockroachdb + host: your-cluster.cockroachlabs.cloud + port: "26257" + user: myuser + password: mypassword + database: defaultdb + queryParams: + sslmode: require + +tools: + list_schemas: + type: cockroachdb-list-schemas + source: my_cockroachdb + description: List all schemas in the database +``` + +## Configuration + +### Required Fields + +| Field | Type | Description | +|-------|------|-------------| +| `type` | string | Must be `cockroachdb-list-schemas` | +| `source` | string | Name of the CockroachDB source to use | +| `description` | string | Human-readable description for the LLM | + +### Optional Fields + +| Field | Type | Description | +|-------|------|-------------| +| `authRequired` | array | List of authentication services required | + +## Output Structure + +The tool returns a list of schemas with the following information: + +```json +[ + { + "catalog_name": "defaultdb", + "schema_name": "public", + "is_user_defined": true + }, + { + "catalog_name": "defaultdb", + "schema_name": "analytics", + "is_user_defined": true + } +] +``` + +### Fields + +| Field | Type | Description | +|-------|------|-------------| +| `catalog_name` | string | The database (catalog) name | +| `schema_name` | string | The schema name | +| `is_user_defined` | boolean | Whether this is a user-created schema (excludes system schemas) | + +## Usage Example + +```json +{} +``` + +No parameters are required. The tool automatically lists all user-defined schemas. + +## Default Schemas + +CockroachDB includes several standard schemas: + +- **`public`**: The default schema for user objects +- **`pg_catalog`**: PostgreSQL system catalog (excluded from results) +- **`information_schema`**: SQL standard metadata views (excluded from results) +- **`crdb_internal`**: CockroachDB internal metadata (excluded from results) +- **`pg_extension`**: PostgreSQL extension objects (excluded from results) + +The tool filters out system schemas and only returns user-defined schemas. + +## Schema Management in CockroachDB + +### Creating Schemas + +```sql +CREATE SCHEMA analytics; +``` + +### Using Schemas + +```sql +-- Create table in specific schema +CREATE TABLE analytics.revenue ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + amount DECIMAL(10,2), + date DATE +); + +-- Query from specific schema +SELECT * FROM analytics.revenue; +``` + +### Schema Search Path + +The search path determines which schemas are searched for unqualified object names: + +```sql +-- Show current search path +SHOW search_path; + +-- Set search path +SET search_path = analytics, public; +``` + +## Multi-Tenant Applications + +Schemas are commonly used for multi-tenant applications: + +```sql +-- Create schema per tenant +CREATE SCHEMA tenant_acme; +CREATE SCHEMA tenant_globex; + +-- Create same table structure in each schema +CREATE TABLE tenant_acme.orders (...); +CREATE TABLE tenant_globex.orders (...); +``` + +The `cockroachdb-list-schemas` tool helps discover all tenant schemas: + +```yaml +tools: + list_tenants: + type: cockroachdb-list-schemas + source: my_cockroachdb + description: | + List all tenant schemas in the database. + Each schema represents a separate tenant's data namespace. +``` + +## Best Practices + +### Use Schemas for Organization + +Group related tables into schemas: + +```sql +CREATE SCHEMA sales; +CREATE SCHEMA inventory; +CREATE SCHEMA hr; + +CREATE TABLE sales.orders (...); +CREATE TABLE inventory.products (...); +CREATE TABLE hr.employees (...); +``` + +### Schema Naming Conventions + +Use clear, descriptive schema names: +- Lowercase names +- Use underscores for multi-word names +- Avoid reserved keywords +- Use prefixes for grouped schemas (e.g., `tenant_`, `app_`) + +### Schema-Level Permissions + +Schemas enable fine-grained access control: + +```sql +-- Grant access to specific schema +GRANT USAGE ON SCHEMA analytics TO analyst_role; +GRANT SELECT ON ALL TABLES IN SCHEMA analytics TO analyst_role; + +-- Revoke access +REVOKE ALL ON SCHEMA hr FROM public; +``` + +## Integration with Other Tools + +### Combined with List Tables + +```yaml +tools: + list_schemas: + type: cockroachdb-list-schemas + source: my_cockroachdb + description: List all schemas first + + list_tables: + type: cockroachdb-list-tables + source: my_cockroachdb + description: | + List tables in the database. + Use list_schemas first to understand schema organization. +``` + +### Schema Discovery Workflow + +1. Call `cockroachdb-list-schemas` to discover schemas +2. Call `cockroachdb-list-tables` to see tables in each schema +3. Generate queries using fully qualified names: `schema.table` + +## Common Use Cases + +### Discover Database Structure + +```yaml +tools: + discover_schemas: + type: cockroachdb-list-schemas + source: my_cockroachdb + description: | + Discover how the database is organized into schemas. + Use this to understand the logical grouping of tables. +``` + +### Multi-Tenant Analysis + +```yaml +tools: + list_tenant_schemas: + type: cockroachdb-list-schemas + source: my_cockroachdb + description: | + List all tenant schemas (each tenant has their own schema). + Schema names follow the pattern: tenant_ +``` + +### Schema Migration Planning + +```yaml +tools: + audit_schemas: + type: cockroachdb-list-schemas + source: my_cockroachdb + description: | + Audit existing schemas before migration. + Identifies all schemas that need to be migrated. +``` + +## Error Handling + +The tool handles common errors: +- **Connection errors**: Returns connection failure details +- **Permission errors**: Returns error if user lacks USAGE privilege +- **Empty results**: Returns empty array if no user schemas exist + +## Permissions Required + +To list schemas, the user needs: +- `CONNECT` privilege on the database +- No specific schema privileges required for listing + +To query objects within schemas, the user needs: +- `USAGE` privilege on the schema +- Appropriate object privileges (SELECT, INSERT, etc.) + +## CockroachDB-Specific Features + +### System Schemas + +CockroachDB includes PostgreSQL-compatible system schemas plus CockroachDB-specific ones: + +- `crdb_internal.*`: CockroachDB internal metadata and statistics +- `pg_catalog.*`: PostgreSQL system catalog +- `information_schema.*`: SQL standard information schema + +These are automatically filtered from the results. + +### User-Defined Flag + +The `is_user_defined` field helps distinguish: +- `true`: User-created schemas +- `false`: System schemas (already filtered out) + +## See Also + +- [cockroachdb-sql](./cockroachdb-sql.md) - Execute parameterized queries +- [cockroachdb-execute-sql](./cockroachdb-execute-sql.md) - Execute ad-hoc SQL +- [cockroachdb-list-tables](./cockroachdb-list-tables.md) - List tables in the database +- [CockroachDB Source](../../sources/cockroachdb.md) - Source configuration reference +- [CockroachDB Schema Design](https://www.cockroachlabs.com/docs/stable/schema-design-overview.html) - Official documentation diff --git a/docs/en/resources/tools/cockroachdb/cockroachdb-list-tables.md b/docs/en/resources/tools/cockroachdb/cockroachdb-list-tables.md new file mode 100644 index 0000000000..339dbd2320 --- /dev/null +++ b/docs/en/resources/tools/cockroachdb/cockroachdb-list-tables.md @@ -0,0 +1,344 @@ +--- +title: "cockroachdb-list-tables" +type: docs +weight: 1 +description: > + List tables in a CockroachDB database with schema details. + +--- + +## About + +The `cockroachdb-list-tables` tool retrieves a list of tables from a CockroachDB database. It provides detailed information about table structure, including columns, constraints, indexes, and foreign key relationships. + +This tool is useful for: +- Database schema discovery +- Understanding table relationships +- Generating context for AI-powered database queries +- Documentation and analysis + +## Example + +```yaml +sources: + my_cockroachdb: + type: cockroachdb + host: your-cluster.cockroachlabs.cloud + port: "26257" + user: myuser + password: mypassword + database: defaultdb + queryParams: + sslmode: require + +tools: + list_all_tables: + type: cockroachdb-list-tables + source: my_cockroachdb + description: List all user tables in the database with their structure +``` + +## Configuration + +### Required Fields + +| Field | Type | Description | +|-------|------|-------------| +| `type` | string | Must be `cockroachdb-list-tables` | +| `source` | string | Name of the CockroachDB source to use | +| `description` | string | Human-readable description for the LLM | + +### Optional Fields + +| Field | Type | Description | +|-------|------|-------------| +| `authRequired` | array | List of authentication services required | + +## Parameters + +The tool accepts optional runtime parameters: + +| Parameter | Type | Default | Description | +|-----------|------|---------|-------------| +| `table_names` | array | all tables | List of specific table names to retrieve | +| `output_format` | string | "detailed" | Output format: "simple" or "detailed" | + +## Output Formats + +### Simple Format + +Returns basic table information: +- Table name +- Row count estimate +- Size information + +```json +{ + "table_names": ["users"], + "output_format": "simple" +} +``` + +### Detailed Format (Default) + +Returns comprehensive table information: +- Table name and schema +- All columns with types and constraints +- Primary keys +- Foreign keys and relationships +- Indexes +- Check constraints +- Table size and row counts + +```json +{ + "table_names": ["users", "orders"], + "output_format": "detailed" +} +``` + +## Usage Examples + +### List All Tables + +```json +{} +``` + +### List Specific Tables + +```json +{ + "table_names": ["users", "orders", "expenses"] +} +``` + +### Simple Output + +```json +{ + "output_format": "simple" +} +``` + +## Output Structure + +### Simple Format Output + +```json +{ + "table_name": "users", + "estimated_rows": 1000, + "size": "128 KB" +} +``` + +### Detailed Format Output + +```json +{ + "table_name": "users", + "schema": "public", + "columns": [ + { + "name": "id", + "type": "UUID", + "nullable": false, + "default": "gen_random_uuid()" + }, + { + "name": "email", + "type": "STRING", + "nullable": false, + "default": null + }, + { + "name": "created_at", + "type": "TIMESTAMP", + "nullable": false, + "default": "now()" + } + ], + "primary_key": ["id"], + "indexes": [ + { + "name": "users_pkey", + "columns": ["id"], + "unique": true, + "primary": true + }, + { + "name": "users_email_idx", + "columns": ["email"], + "unique": true, + "primary": false + } + ], + "foreign_keys": [], + "constraints": [ + { + "name": "users_email_check", + "type": "CHECK", + "definition": "email ~* '^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\\.[A-Z|a-z]{2,}$'" + } + ] +} +``` + +## CockroachDB-Specific Information + +### UUID Primary Keys + +The tool recognizes CockroachDB's recommended UUID primary key pattern: + +```sql +CREATE TABLE users ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + ... +); +``` + +### Multi-Region Tables + +For multi-region tables, the output includes locality information: + +```json +{ + "table_name": "users", + "locality": "REGIONAL BY ROW", + "regions": ["us-east-1", "us-west-2", "eu-west-1"] +} +``` + +### Interleaved Tables + +The tool shows parent-child relationships for interleaved tables (legacy feature): + +```json +{ + "table_name": "order_items", + "interleaved_in": "orders" +} +``` + +## Best Practices + +### Use for Schema Discovery + +The tool is ideal for helping AI assistants understand your database structure: + +```yaml +tools: + discover_schema: + type: cockroachdb-list-tables + source: my_cockroachdb + description: | + Use this tool first to understand the database schema before generating queries. + It shows all tables, their columns, data types, and relationships. +``` + +### Filter Large Schemas + +For databases with many tables, specify relevant tables: + +```json +{ + "table_names": ["users", "orders", "products"], + "output_format": "detailed" +} +``` + +### Use Simple Format for Overviews + +When you need just table names and sizes: + +```json +{ + "output_format": "simple" +} +``` + +## Excluded Tables + +The tool automatically excludes system tables and schemas: +- `pg_catalog.*` - PostgreSQL system catalog +- `information_schema.*` - SQL standard information schema +- `crdb_internal.*` - CockroachDB internal tables +- `pg_extension.*` - PostgreSQL extension tables + +Only user-created tables in the public schema (and other user schemas) are returned. + +## Error Handling + +The tool handles common errors: +- **Table not found**: Returns empty result for non-existent tables +- **Permission errors**: Returns error if user lacks SELECT privileges +- **Connection errors**: Returns connection failure details + +## Integration with AI Assistants + +### Prompt Example + +```yaml +tools: + list_tables: + type: cockroachdb-list-tables + source: my_cockroachdb + description: | + Lists all tables in the database with detailed schema information. + Use this tool to understand: + - What tables exist + - What columns each table has + - Data types and constraints + - Relationships between tables (foreign keys) + - Available indexes + + Always call this tool before generating SQL queries to ensure + you use correct table and column names. +``` + +## Common Use Cases + +### Generate Context for Queries + +```json +{} +``` + +This provides comprehensive schema information that helps AI assistants generate accurate SQL queries. + +### Analyze Table Structure + +```json +{ + "table_names": ["users"], + "output_format": "detailed" +} +``` + +Perfect for understanding a specific table's structure, constraints, and relationships. + +### Quick Schema Overview + +```json +{ + "output_format": "simple" +} +``` + +Gets a quick list of tables with basic statistics. + +## Performance Considerations + +- **Simple format** is faster for large databases +- **Detailed format** queries system tables extensively +- Specifying `table_names` reduces query time +- Results are fetched in a single query for efficiency + +## See Also + +- [cockroachdb-sql](./cockroachdb-sql.md) - Execute parameterized queries +- [cockroachdb-execute-sql](./cockroachdb-execute-sql.md) - Execute ad-hoc SQL +- [cockroachdb-list-schemas](./cockroachdb-list-schemas.md) - List database schemas +- [CockroachDB Source](../../sources/cockroachdb.md) - Source configuration reference +- [CockroachDB Schema Design](https://www.cockroachlabs.com/docs/stable/schema-design-overview.html) - Best practices diff --git a/docs/en/resources/tools/cockroachdb/cockroachdb-sql.md b/docs/en/resources/tools/cockroachdb/cockroachdb-sql.md new file mode 100644 index 0000000000..aa31edcd52 --- /dev/null +++ b/docs/en/resources/tools/cockroachdb/cockroachdb-sql.md @@ -0,0 +1,291 @@ +--- +title: "cockroachdb-sql" +type: docs +weight: 1 +description: > + Execute parameterized SQL queries in CockroachDB. + +--- + +## About + +The `cockroachdb-sql` tool allows you to execute parameterized SQL queries against a CockroachDB database. This tool supports prepared statements with parameter binding, template parameters for dynamic query construction, and automatic transaction retry for resilience against serialization conflicts. + +## Example + +```yaml +sources: + my_cockroachdb: + type: cockroachdb + host: your-cluster.cockroachlabs.cloud + port: "26257" + user: myuser + password: mypassword + database: defaultdb + queryParams: + sslmode: require + +tools: + get_user_orders: + type: cockroachdb-sql + source: my_cockroachdb + description: Get all orders for a specific user + statement: | + SELECT o.id, o.order_date, o.total_amount, o.status + FROM orders o + WHERE o.user_id = $1 + ORDER BY o.order_date DESC + parameters: + - name: user_id + type: string + description: The UUID of the user +``` + +## Configuration + +### Required Fields + +| Field | Type | Description | +|-------|------|-------------| +| `type` | string | Must be `cockroachdb-sql` | +| `source` | string | Name of the CockroachDB source to use | +| `description` | string | Human-readable description of what the tool does | +| `statement` | string | The SQL query to execute | + +### Optional Fields + +| Field | Type | Description | +|-------|------|-------------| +| `parameters` | array | List of parameter definitions for the query | +| `templateParameters` | array | List of template parameters for dynamic query construction | +| `authRequired` | array | List of authentication services required | + +## Parameters + +Parameters allow you to safely pass values into your SQL queries using prepared statements. CockroachDB uses PostgreSQL-style parameter placeholders: `$1`, `$2`, etc. + +### Parameter Types + +- `string`: Text values +- `number`: Numeric values (integers or decimals) +- `boolean`: True/false values +- `array`: Array of values + +### Example with Multiple Parameters + +```yaml +tools: + filter_expenses: + type: cockroachdb-sql + source: my_cockroachdb + description: Filter expenses by category and date range + statement: | + SELECT id, description, amount, category, expense_date + FROM expenses + WHERE user_id = $1 + AND category = $2 + AND expense_date >= $3 + AND expense_date <= $4 + ORDER BY expense_date DESC + parameters: + - name: user_id + type: string + description: The user's UUID + - name: category + type: string + description: Expense category (e.g., "Food", "Transport") + - name: start_date + type: string + description: Start date in YYYY-MM-DD format + - name: end_date + type: string + description: End date in YYYY-MM-DD format +``` + +## Template Parameters + +Template parameters enable dynamic query construction by replacing placeholders in the SQL statement before parameter binding. This is useful for dynamic table names, column names, or query structure. + +### Example with Template Parameters + +```yaml +tools: + get_column_data: + type: cockroachdb-sql + source: my_cockroachdb + description: Get data from a specific column + statement: | + SELECT {{column_name}} + FROM {{table_name}} + WHERE user_id = $1 + LIMIT 100 + templateParameters: + - name: table_name + type: string + description: The table to query + - name: column_name + type: string + description: The column to retrieve + parameters: + - name: user_id + type: string + description: The user's UUID +``` + +## Best Practices + +### Use UUID Primary Keys + +CockroachDB performs best with UUID primary keys to avoid transaction hotspots: + +```sql +CREATE TABLE orders ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL, + order_date TIMESTAMP DEFAULT now(), + total_amount DECIMAL(10,2) +); +``` + +### Use Indexes for Performance + +Create indexes on frequently queried columns: + +```sql +CREATE INDEX idx_orders_user_id ON orders(user_id); +CREATE INDEX idx_orders_date ON orders(order_date DESC); +``` + +### Use JOINs Efficiently + +CockroachDB supports standard SQL JOINs. Keep joins efficient by: +- Adding appropriate indexes +- Using UUIDs for foreign keys +- Limiting result sets with WHERE clauses + +```yaml +tools: + get_user_with_orders: + type: cockroachdb-sql + source: my_cockroachdb + description: Get user details with their recent orders + statement: | + SELECT u.name, u.email, o.id as order_id, o.order_date, o.total_amount + FROM users u + LEFT JOIN orders o ON u.id = o.user_id + WHERE u.id = $1 + ORDER BY o.order_date DESC + LIMIT 10 + parameters: + - name: user_id + type: string + description: The user's UUID +``` + +### Handle NULL Values + +Use COALESCE or NULL checks when dealing with nullable columns: + +```sql +SELECT id, description, COALESCE(notes, 'No notes') as notes +FROM expenses +WHERE user_id = $1 +``` + +## Error Handling + +The tool automatically handles: +- **Connection errors**: Retried with exponential backoff +- **Serialization conflicts**: Automatically retried using cockroach-go library +- **Invalid parameters**: Returns descriptive error messages +- **SQL syntax errors**: Returns database error details + +## Advanced Usage + +### Aggregations + +```yaml +tools: + expense_summary: + type: cockroachdb-sql + source: my_cockroachdb + description: Get expense summary by category for a user + statement: | + SELECT + category, + COUNT(*) as count, + SUM(amount) as total_amount, + AVG(amount) as avg_amount + FROM expenses + WHERE user_id = $1 + AND expense_date >= $2 + GROUP BY category + ORDER BY total_amount DESC + parameters: + - name: user_id + type: string + description: The user's UUID + - name: start_date + type: string + description: Start date in YYYY-MM-DD format +``` + +### Window Functions + +```yaml +tools: + running_total: + type: cockroachdb-sql + source: my_cockroachdb + description: Get running total of expenses + statement: | + SELECT + expense_date, + amount, + SUM(amount) OVER (ORDER BY expense_date) as running_total + FROM expenses + WHERE user_id = $1 + ORDER BY expense_date + parameters: + - name: user_id + type: string + description: The user's UUID +``` + +### Common Table Expressions (CTEs) + +```yaml +tools: + top_spenders: + type: cockroachdb-sql + source: my_cockroachdb + description: Find top spending users + statement: | + WITH user_totals AS ( + SELECT + user_id, + SUM(amount) as total_spent + FROM expenses + WHERE expense_date >= $1 + GROUP BY user_id + ) + SELECT + u.name, + u.email, + ut.total_spent + FROM user_totals ut + JOIN users u ON ut.user_id = u.id + ORDER BY ut.total_spent DESC + LIMIT 10 + parameters: + - name: start_date + type: string + description: Start date in YYYY-MM-DD format +``` + +## See Also + +- [cockroachdb-execute-sql](./cockroachdb-execute-sql.md) - For ad-hoc SQL execution +- [cockroachdb-list-tables](./cockroachdb-list-tables.md) - List tables in the database +- [cockroachdb-list-schemas](./cockroachdb-list-schemas.md) - List database schemas +- [CockroachDB Source](../../sources/cockroachdb.md) - Source configuration reference diff --git a/docs/en/resources/tools/couchbase/couchbase-sql.md b/docs/en/resources/tools/couchbase/couchbase-sql.md index 0f83f3cf5d..3932bf0613 100644 --- a/docs/en/resources/tools/couchbase/couchbase-sql.md +++ b/docs/en/resources/tools/couchbase/couchbase-sql.md @@ -27,37 +27,37 @@ parameters will be used according to their name: e.g. `$id`. > names, or other parts of the query. ```yaml -tools: - search_products_by_category: - kind: couchbase-sql - source: my-couchbase-instance - statement: | - SELECT p.name, p.price, p.description - FROM products p - WHERE p.category = $category AND p.price < $max_price - ORDER BY p.price DESC - LIMIT 10 - description: | - Use this tool to get a list of products for a specific category under a maximum price. - Takes a category name, e.g. "Electronics" and a maximum price e.g 500 and returns a list of product names, prices, and descriptions. - Do NOT use this tool with invalid category names. Do NOT guess a category name, Do NOT guess a price. - Example: - {{ - "category": "Electronics", - "max_price": 500 - }} - Example: - {{ - "category": "Furniture", - "max_price": 1000 - }} - parameters: - - name: category - type: string - description: Product category name - - name: max_price - type: integer - description: Maximum price (positive integer) +kind: tools +name: search_products_by_category +type: couchbase-sql +source: my-couchbase-instance +statement: | + SELECT p.name, p.price, p.description + FROM products p + WHERE p.category = $category AND p.price < $max_price + ORDER BY p.price DESC + LIMIT 10 +description: | + Use this tool to get a list of products for a specific category under a maximum price. + Takes a category name, e.g. "Electronics" and a maximum price e.g 500 and returns a list of product names, prices, and descriptions. + Do NOT use this tool with invalid category names. Do NOT guess a category name, Do NOT guess a price. + Example: + {{ + "category": "Electronics", + "max_price": 500 + }} + Example: + {{ + "category": "Furniture", + "max_price": 1000 + }} +parameters: + - name: category + type: string + description: Product category name + - name: max_price + type: integer + description: Maximum price (positive integer) ``` ### Example with Template Parameters @@ -69,29 +69,29 @@ tools: > [templateParameters](..#template-parameters). ```yaml -tools: - list_table: - kind: couchbase-sql - source: my-couchbase-instance - statement: | - SELECT * FROM {{.tableName}}; - description: | - Use this tool to list all information from a specific table. - Example: - {{ - "tableName": "flights", - }} - templateParameters: - - name: tableName - type: string - description: Table to select from +kind: tools +name: list_table +type: couchbase-sql +source: my-couchbase-instance +statement: | + SELECT * FROM {{.tableName}}; +description: | + Use this tool to list all information from a specific table. + Example: + {{ + "tableName": "flights", + }} +templateParameters: + - name: tableName + type: string + description: Table to select from ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:--------------------------------------------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "couchbase-sql". | +| type | string | true | Must be "couchbase-sql". | | source | string | true | Name of the source the SQL query should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | SQL statement to execute | diff --git a/docs/en/resources/tools/dataform/dataform-compile-local.md b/docs/en/resources/tools/dataform/dataform-compile-local.md index eb1127772e..ba24656260 100644 --- a/docs/en/resources/tools/dataform/dataform-compile-local.md +++ b/docs/en/resources/tools/dataform/dataform-compile-local.md @@ -44,15 +44,15 @@ for more details. ## Example ```yaml -tools: - my_dataform_compiler: - kind: dataform-compile-local - description: Use this tool to compile a local Dataform project. +kind: tools +name: my_dataform_compiler +type: dataform-compile-local +description: Use this tool to compile a local Dataform project. ``` ## Reference | **field** | **type** | **required** | **description** | |:------------|:---------|:-------------|:---------------------------------------------------| -| kind | string | true | Must be "dataform-compile-local". | +| type | string | true | Must be "dataform-compile-local". | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/dataplex/dataplex-lookup-entry.md b/docs/en/resources/tools/dataplex/dataplex-lookup-entry.md index ea19ed6451..669d47efc7 100644 --- a/docs/en/resources/tools/dataplex/dataplex-lookup-entry.md +++ b/docs/en/resources/tools/dataplex/dataplex-lookup-entry.md @@ -56,17 +56,17 @@ applying IAM permissions and roles to an identity. ## Example ```yaml -tools: - lookup_entry: - kind: dataplex-lookup-entry - source: my-dataplex-source - description: Use this tool to retrieve a specific entry in Dataplex Catalog. +kind: tools +name: lookup_entry +type: dataplex-lookup-entry +source: my-dataplex-source +description: Use this tool to retrieve a specific entry in Dataplex Catalog. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "dataplex-lookup-entry". | +| type | string | true | Must be "dataplex-lookup-entry". | | source | string | true | Name of the source the tool should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/dataplex/dataplex-search-aspect-types.md b/docs/en/resources/tools/dataplex/dataplex-search-aspect-types.md index 75d7793966..1bb17579ba 100644 --- a/docs/en/resources/tools/dataplex/dataplex-search-aspect-types.md +++ b/docs/en/resources/tools/dataplex/dataplex-search-aspect-types.md @@ -49,17 +49,17 @@ applying IAM permissions and roles to an identity. ## Example ```yaml -tools: - dataplex-search-aspect-types: - kind: dataplex-search-aspect-types - source: my-dataplex-source - description: Use this tool to find aspect types relevant to the query. +kind: tools +name: dataplex-search-aspect-types +type: dataplex-search-aspect-types +source: my-dataplex-source +description: Use this tool to find aspect types relevant to the query. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "dataplex-search-aspect-types". | +| type | string | true | Must be "dataplex-search-aspect-types". | | source | string | true | Name of the source the tool should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/dataplex/dataplex-search-entries.md b/docs/en/resources/tools/dataplex/dataplex-search-entries.md index 75099d64e8..b7c0bd3deb 100644 --- a/docs/en/resources/tools/dataplex/dataplex-search-entries.md +++ b/docs/en/resources/tools/dataplex/dataplex-search-entries.md @@ -49,17 +49,17 @@ applying IAM permissions and roles to an identity. ## Example ```yaml -tools: - dataplex-search-entries: - kind: dataplex-search-entries - source: my-dataplex-source - description: Use this tool to get all the entries based on the provided query. +kind: tools +name: dataplex-search-entries +type: dataplex-search-entries +source: my-dataplex-source +description: Use this tool to get all the entries based on the provided query. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "dataplex-search-entries". | +| type | string | true | Must be "dataplex-search-entries". | | source | string | true | Name of the source the tool should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/dgraph/dgraph-dql.md b/docs/en/resources/tools/dgraph/dgraph-dql.md index 3c8e7685f3..ba33a35a60 100644 --- a/docs/en/resources/tools/dgraph/dgraph-dql.md +++ b/docs/en/resources/tools/dgraph/dgraph-dql.md @@ -41,83 +41,83 @@ query. {{< tabpane persist="header" >}} {{< tab header="Query" lang="yaml" >}} -tools: - search_user: - kind: dgraph-dql - source: my-dgraph-source - statement: | - query all($role: string){ - users(func: has(name)) @filter(eq(role, $role) AND ge(age, 30) AND le(age, 50)) { - uid - name - email - role - age - } - } - isQuery: true - timeout: 20s - description: | - Use this tool to retrieve the details of users who are admins and are between 30 and 50 years old. - The query returns the user's name, email, role, and age. - This can be helpful when you want to fetch admin users within a specific age range. - Example: Fetch admins aged between 30 and 50: - [ - { - "name": "Alice", - "role": "admin", - "age": 35 - }, - { - "name": "Bob", - "role": "admin", - "age": 45 - } - ] - parameters: - - name: $role - type: string - description: admin +kind: tools +name: search_user +type: dgraph-dql +source: my-dgraph-source +statement: | + query all($role: string){ + users(func: has(name)) @filter(eq(role, $role) AND ge(age, 30) AND le(age, 50)) { + uid + name + email + role + age + } + } +isQuery: true +timeout: 20s +description: | + Use this tool to retrieve the details of users who are admins and are between 30 and 50 years old. + The query returns the user's name, email, role, and age. + This can be helpful when you want to fetch admin users within a specific age range. + Example: Fetch admins aged between 30 and 50: + [ + { + "name": "Alice", + "role": "admin", + "age": 35 + }, + { + "name": "Bob", + "role": "admin", + "age": 45 + } + ] +parameters: + - name: $role + type: string + description: admin {{< /tab >}} {{< tab header="Mutation" lang="yaml" >}} -tools: - dgraph-manage-user-instance: - kind: dgraph-dql - source: my-dgraph-source - isQuery: false - statement: | - { - set { - _:user1 $user1 . - _:user1 $email1 . - _:user1 "admin" . - _:user1 "35" . +kind: tools +name: dgraph-manage-user-instance +type: dgraph-dql +source: my-dgraph-source +isQuery: false +statement: | + { + set { + _:user1 $user1 . + _:user1 $email1 . + _:user1 "admin" . + _:user1 "35" . - _:user2 $user2 . - _:user2 $email2 . - _:user2 "admin" . - _:user2 "45" . - } - } - description: | - Use this tool to insert or update user data into the Dgraph database. - The mutation adds or updates user details like name, email, role, and age. - Example: Add users Alice and Bob as admins with specific ages. - parameters: - - name: user1 - type: string - description: Alice - - name: email1 - type: string - description: alice@email.com - - name: user2 - type: string - description: Bob - - name: email2 - type: string - description: bob@email.com + _:user2 $user2 . + _:user2 $email2 . + _:user2 "admin" . + _:user2 "45" . + } + } +description: | + Use this tool to insert or update user data into the Dgraph database. + The mutation adds or updates user details like name, email, role, and age. + Example: Add users Alice and Bob as admins with specific ages. +parameters: + - name: user1 + type: string + description: Alice + - name: email1 + type: string + description: alice@email.com + - name: user2 + type: string + description: Bob + - name: email2 + type: string + description: bob@email.com {{< /tab >}} {{< /tabpane >}} @@ -126,7 +126,7 @@ tools: | **field** | **type** | **required** | **description** | |-------------|:---------------------------------------:|:------------:|-------------------------------------------------------------------------------------------| -| kind | string | true | Must be "dgraph-dql". | +| type | string | true | Must be "dgraph-dql". | | source | string | true | Name of the source the dql query should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | dql statement to execute | diff --git a/docs/en/resources/tools/elasticsearch/elasticsearch-esql.md b/docs/en/resources/tools/elasticsearch/elasticsearch-esql.md index 2df0ef6e54..1e0f5c70c1 100644 --- a/docs/en/resources/tools/elasticsearch/elasticsearch-esql.md +++ b/docs/en/resources/tools/elasticsearch/elasticsearch-esql.md @@ -20,20 +20,20 @@ for more information. ## Example ```yaml -tools: - query_my_index: - kind: elasticsearch-esql - source: elasticsearch-source - description: Use this tool to execute ES|QL queries. - query: | - FROM my-index - | KEEP * - | LIMIT ?limit - parameters: - - name: limit - type: integer - description: Limit the number of results. - required: true +kind: tools +name: query_my_index +type: elasticsearch-esql +source: elasticsearch-source +description: Use this tool to execute ES|QL queries. +query: | + FROM my-index + | KEEP * + | LIMIT ?limit +parameters: + - name: limit + type: integer + description: Limit the number of results. + required: true ``` ## Parameters diff --git a/docs/en/resources/tools/firebird/firebird-execute-sql.md b/docs/en/resources/tools/firebird/firebird-execute-sql.md index fa75d04137..830037c3cf 100644 --- a/docs/en/resources/tools/firebird/firebird-execute-sql.md +++ b/docs/en/resources/tools/firebird/firebird-execute-sql.md @@ -25,17 +25,17 @@ statement against the `source`. ## Example ```yaml -tools: - execute_sql_tool: - kind: firebird-execute-sql - source: my_firebird_db - description: Use this tool to execute a SQL statement against the Firebird database. +kind: tools +name: execute_sql_tool +type: firebird-execute-sql +source: my_firebird_db +description: Use this tool to execute a SQL statement against the Firebird database. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "firebird-execute-sql". | +| type | string | true | Must be "firebird-execute-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/firebird/firebird-sql.md b/docs/en/resources/tools/firebird/firebird-sql.md index d07c3cb53a..3e6614b9af 100644 --- a/docs/en/resources/tools/firebird/firebird-sql.md +++ b/docs/en/resources/tools/firebird/firebird-sql.md @@ -32,68 +32,68 @@ prepared statement. > names, or other parts of the query. ```yaml -tools: - search_flights_by_number: - kind: firebird-sql - source: my_firebird_db - statement: | - SELECT * FROM flights - WHERE airline = ? - AND flight_number = ? - LIMIT 10 - description: | - Use this tool to get information for a specific flight. - Takes an airline code and flight number and returns info on the flight. - Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. - A airline code is a code for an airline service consisting of two-character - airline designator and followed by flight number, which is 1 to 4 digit number. - For example, if given CY 0123, the airline is "CY", and flight_number is "123". - Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". - If the tool returns more than one option choose the date closes to today. - Example: - {{ - "airline": "CY", - "flight_number": "888", - }} - Example: - {{ - "airline": "DL", - "flight_number": "1234", - }} - parameters: - - name: airline - type: string - description: Airline unique 2 letter identifier - - name: flight_number - type: string - description: 1 to 4 digit number +kind: tools +name: search_flights_by_number +type: firebird-sql +source: my_firebird_db +statement: | + SELECT * FROM flights + WHERE airline = ? + AND flight_number = ? + LIMIT 10 +description: | + Use this tool to get information for a specific flight. + Takes an airline code and flight number and returns info on the flight. + Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. + A airline code is a code for an airline service consisting of two-character + airline designator and followed by flight number, which is 1 to 4 digit number. + For example, if given CY 0123, the airline is "CY", and flight_number is "123". + Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". + If the tool returns more than one option choose the date closes to today. + Example: + {{ + "airline": "CY", + "flight_number": "888", + }} + Example: + {{ + "airline": "DL", + "flight_number": "1234", + }} +parameters: + - name: airline + type: string + description: Airline unique 2 letter identifier + - name: flight_number + type: string + description: 1 to 4 digit number ``` ### Example with Named Parameters ```yaml -tools: - search_flights_by_airline: - kind: firebird-sql - source: my_firebird_db - statement: | - SELECT * FROM flights - WHERE airline = :airline - AND departure_date >= :start_date - AND departure_date <= :end_date - ORDER BY departure_date - description: | - Search for flights by airline within a date range using named parameters. - parameters: - - name: airline - type: string - description: Airline unique 2 letter identifier - - name: start_date - type: string - description: Start date in YYYY-MM-DD format - - name: end_date - type: string - description: End date in YYYY-MM-DD format +kind: tools +name: search_flights_by_airline +type: firebird-sql +source: my_firebird_db +statement: | + SELECT * FROM flights + WHERE airline = :airline + AND departure_date >= :start_date + AND departure_date <= :end_date + ORDER BY departure_date +description: | + Search for flights by airline within a date range using named parameters. +parameters: + - name: airline + type: string + description: Airline unique 2 letter identifier + - name: start_date + type: string + description: Start date in YYYY-MM-DD format + - name: end_date + type: string + description: End date in YYYY-MM-DD format ``` ### Example with Template Parameters @@ -105,29 +105,29 @@ tools: > [templateParameters](../#template-parameters). ```yaml -tools: - list_table: - kind: firebird-sql - source: my_firebird_db - statement: | - SELECT * FROM {{.tableName}} - description: | - Use this tool to list all information from a specific table. - Example: - {{ - "tableName": "flights", - }} - templateParameters: - - name: tableName - type: string - description: Table to select from +kind: tools +name: list_table +type: firebird-sql +source: my_firebird_db +statement: | + SELECT * FROM {{.tableName}} +description: | + Use this tool to list all information from a specific table. + Example: + {{ + "tableName": "flights", + }} +templateParameters: + - name: tableName + type: string + description: Table to select from ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:---------------------------------------------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "firebird-sql". | +| type | string | true | Must be "firebird-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | SQL statement to execute on. | diff --git a/docs/en/resources/tools/firestore/firestore-add-documents.md b/docs/en/resources/tools/firestore/firestore-add-documents.md index 13f8b207ea..b777be7f8d 100644 --- a/docs/en/resources/tools/firestore/firestore-add-documents.md +++ b/docs/en/resources/tools/firestore/firestore-add-documents.md @@ -59,11 +59,11 @@ must be wrapped with its type indicator: ### Basic Document Creation ```yaml -tools: - add-company-doc: - kind: firestore-add-documents - source: my-firestore - description: Add a new company document +kind: tools +name: add-company-doc +type: firestore-add-documents +source: my-firestore +description: Add a new company document ``` Usage: @@ -246,14 +246,14 @@ Usage: The tool can be configured to require authentication: ```yaml -tools: - secure-add-docs: - kind: firestore-add-documents - source: prod-firestore - description: Add documents with authentication required - authRequired: - - google-oauth - - api-key +kind: tools +name: secure-add-docs +type: firestore-add-documents +source: prod-firestore +description: Add documents with authentication required +authRequired: + - google-oauth + - api-key ``` ## Error Handling diff --git a/docs/en/resources/tools/firestore/firestore-delete-documents.md b/docs/en/resources/tools/firestore/firestore-delete-documents.md index 66343231d0..36d6b61fc3 100644 --- a/docs/en/resources/tools/firestore/firestore-delete-documents.md +++ b/docs/en/resources/tools/firestore/firestore-delete-documents.md @@ -23,17 +23,17 @@ efficient batch deletion and returns the success status for each document. ## Example ```yaml -tools: - delete_user_documents: - kind: firestore-delete-documents - source: my-firestore-source - description: Use this tool to delete multiple documents from Firestore. +kind: tools +name: delete_user_documents +type: firestore-delete-documents +source: my-firestore-source +description: Use this tool to delete multiple documents from Firestore. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------------:|:------------:|----------------------------------------------------------| -| kind | string | true | Must be "firestore-delete-documents". | +| type | string | true | Must be "firestore-delete-documents". | | source | string | true | Name of the Firestore source to delete documents from. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/firestore/firestore-get-documents.md b/docs/en/resources/tools/firestore/firestore-get-documents.md index 0bc1bef1b8..a4a5b0df0c 100644 --- a/docs/en/resources/tools/firestore/firestore-get-documents.md +++ b/docs/en/resources/tools/firestore/firestore-get-documents.md @@ -23,17 +23,17 @@ such as existence status, creation time, update time, and read time. ## Example ```yaml -tools: - get_user_documents: - kind: firestore-get-documents - source: my-firestore-source - description: Use this tool to retrieve multiple documents from Firestore. +kind: tools +name: get_user_documents +type: firestore-get-documents +source: my-firestore-source +description: Use this tool to retrieve multiple documents from Firestore. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------------:|:------------:|------------------------------------------------------------| -| kind | string | true | Must be "firestore-get-documents". | +| type | string | true | Must be "firestore-get-documents". | | source | string | true | Name of the Firestore source to retrieve documents from. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/firestore/firestore-get-rules.md b/docs/en/resources/tools/firestore/firestore-get-rules.md index 568dc6b1da..28b48e53ab 100644 --- a/docs/en/resources/tools/firestore/firestore-get-rules.md +++ b/docs/en/resources/tools/firestore/firestore-get-rules.md @@ -23,17 +23,17 @@ content along with metadata such as the ruleset name, and timestamps. ## Example ```yaml -tools: - get_firestore_rules: - kind: firestore-get-rules - source: my-firestore-source - description: Use this tool to retrieve the active Firestore security rules. +kind: tools +name: get_firestore_rules +type: firestore-get-rules +source: my-firestore-source +description: Use this tool to retrieve the active Firestore security rules. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:-------------:|:------------:|-------------------------------------------------------| -| kind | string | true | Must be "firestore-get-rules". | +| type | string | true | Must be "firestore-get-rules". | | source | string | true | Name of the Firestore source to retrieve rules from. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/firestore/firestore-list-collections.md b/docs/en/resources/tools/firestore/firestore-list-collections.md index b2cc7ef6b6..971aed02b8 100644 --- a/docs/en/resources/tools/firestore/firestore-list-collections.md +++ b/docs/en/resources/tools/firestore/firestore-list-collections.md @@ -26,17 +26,17 @@ not provided, it lists all root-level collections in the database. ## Example ```yaml -tools: - list_firestore_collections: - kind: firestore-list-collections - source: my-firestore-source - description: Use this tool to list collections in Firestore. +kind: tools +name: list_firestore_collections +type: firestore-list-collections +source: my-firestore-source +description: Use this tool to list collections in Firestore. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:----------------:|:------------:|--------------------------------------------------------| -| kind | string | true | Must be "firestore-list-collections". | +| type | string | true | Must be "firestore-list-collections". | | source | string | true | Name of the Firestore source to list collections from. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/firestore/firestore-query-collection.md b/docs/en/resources/tools/firestore/firestore-query-collection.md index f615a9bc2e..774aa9f29e 100644 --- a/docs/en/resources/tools/firestore/firestore-query-collection.md +++ b/docs/en/resources/tools/firestore/firestore-query-collection.md @@ -18,17 +18,17 @@ with filters, ordering, and limit capabilities. To use this tool, you need to configure it in your YAML configuration file: ```yaml -sources: - my-firestore: - kind: firestore - project: my-gcp-project - database: "(default)" - -tools: - query_collection: - kind: firestore-query-collection - source: my-firestore - description: Query Firestore collections with advanced filtering +kind: sources +name: my-firestore +type: firestore +project: my-gcp-project +database: "(default)" +--- +kind: tools +name: query_collection +type: firestore-query-collection +source: my-firestore +description: Query Firestore collections with advanced filtering ``` ## Parameters diff --git a/docs/en/resources/tools/firestore/firestore-query.md b/docs/en/resources/tools/firestore/firestore-query.md index 20fe9c8fd7..a3027e02d2 100644 --- a/docs/en/resources/tools/firestore/firestore-query.md +++ b/docs/en/resources/tools/firestore/firestore-query.md @@ -38,87 +38,87 @@ developers can use to create custom tools with specific query patterns. ### Basic Configuration ```yaml -tools: - query_countries: - kind: firestore-query - source: my-firestore-source - description: Query countries with dynamic filters - collectionPath: "countries" - filters: | - { - "field": "continent", - "op": "==", - "value": {"stringValue": "{{.continent}}"} - } - parameters: - - name: continent - type: string - description: Continent to filter by - required: true +kind: tools +name: query_countries +type: firestore-query +source: my-firestore-source +description: Query countries with dynamic filters +collectionPath: "countries" +filters: | + { + "field": "continent", + "op": "==", + "value": {"stringValue": "{{.continent}}"} + } +parameters: + - name: continent + type: string + description: Continent to filter by + required: true ``` ### Advanced Configuration with Complex Filters ```yaml -tools: - advanced_query: - kind: firestore-query - source: my-firestore-source - description: Advanced query with complex filters - collectionPath: "{{.collection}}" - filters: | +kind: tools +name: advanced_query +type: firestore-query +source: my-firestore-source +description: Advanced query with complex filters +collectionPath: "{{.collection}}" +filters: | + { + "or": [ + {"field": "status", "op": "==", "value": {"stringValue": "{{.status}}"}}, { - "or": [ - {"field": "status", "op": "==", "value": {"stringValue": "{{.status}}"}}, - { - "and": [ - {"field": "priority", "op": ">", "value": {"integerValue": "{{.priority}}"}}, - {"field": "area", "op": "<", "value": {"doubleValue": {{.maxArea}}}}, - {"field": "active", "op": "==", "value": {"booleanValue": {{.isActive}}}} - ] - } + "and": [ + {"field": "priority", "op": ">", "value": {"integerValue": "{{.priority}}"}}, + {"field": "area", "op": "<", "value": {"doubleValue": {{.maxArea}}}}, + {"field": "active", "op": "==", "value": {"booleanValue": {{.isActive}}}} ] } - select: - - name - - status - - priority - orderBy: - field: "{{.sortField}}" - direction: "{{.sortDirection}}" - limit: 100 - analyzeQuery: true - parameters: - - name: collection - type: string - description: Collection to query - required: true - - name: status - type: string - description: Status to filter by - required: true - - name: priority - type: string - description: Minimum priority value - required: true - - name: maxArea - type: float - description: Maximum area value - required: true - - name: isActive - type: boolean - description: Filter by active status - required: true - - name: sortField - type: string - description: Field to sort by - required: false - default: "createdAt" - - name: sortDirection - type: string - description: Sort direction (ASCENDING or DESCENDING) - required: false - default: "DESCENDING" + ] + } +select: + - name + - status + - priority +orderBy: + field: "{{.sortField}}" + direction: "{{.sortDirection}}" +limit: 100 +analyzeQuery: true +parameters: + - name: collection + type: string + description: Collection to query + required: true + - name: status + type: string + description: Status to filter by + required: true + - name: priority + type: string + description: Minimum priority value + required: true + - name: maxArea + type: float + description: Maximum area value + required: true + - name: isActive + type: boolean + description: Filter by active status + required: true + - name: sortField + type: string + description: Field to sort by + required: false + default: "createdAt" + - name: sortDirection + type: string + description: Sort direction (ASCENDING or DESCENDING) + required: false + default: "DESCENDING" ``` ## Parameters @@ -127,7 +127,7 @@ tools: | Parameter | Type | Required | Description | |------------------|---------|----------|-------------------------------------------------------------------------------------------------------------| -| `kind` | string | Yes | Must be `firestore-query` | +| `type` | string | Yes | Must be `firestore-query` | | `source` | string | Yes | Name of the Firestore source to use | | `description` | string | Yes | Description of what this tool does | | `collectionPath` | string | Yes | Path to the collection to query (supports templates) | @@ -254,103 +254,103 @@ The tool supports all Firestore native JSON value types: ### Example 1: Query with Dynamic Collection Path ```yaml -tools: - user_documents: - kind: firestore-query - source: my-firestore - description: Query user-specific documents - collectionPath: "users/{{.userId}}/documents" - filters: | - { - "field": "type", - "op": "==", - "value": {"stringValue": "{{.docType}}"} - } - parameters: - - name: userId - type: string - description: User ID - required: true - - name: docType - type: string - description: Document type to filter - required: true +kind: tools +name: user_documents +type: firestore-query +source: my-firestore +description: Query user-specific documents +collectionPath: "users/{{.userId}}/documents" +filters: | + { + "field": "type", + "op": "==", + "value": {"stringValue": "{{.docType}}"} + } +parameters: + - name: userId + type: string + description: User ID + required: true + - name: docType + type: string + description: Document type to filter + required: true ``` ### Example 2: Complex Geographic Query ```yaml -tools: - location_search: - kind: firestore-query - source: my-firestore - description: Search locations by area and population - collectionPath: "cities" - filters: | - { - "and": [ - {"field": "country", "op": "==", "value": {"stringValue": "{{.country}}"}}, - {"field": "population", "op": ">", "value": {"integerValue": "{{.minPopulation}}"}}, - {"field": "area", "op": "<", "value": {"doubleValue": {{.maxArea}}}} - ] - } - orderBy: - field: "population" - direction: "DESCENDING" - limit: 50 - parameters: - - name: country - type: string - description: Country code - required: true - - name: minPopulation - type: string - description: Minimum population (as string for large numbers) - required: true - - name: maxArea - type: float - description: Maximum area in square kilometers - required: true +kind: tools +name: location_search +type: firestore-query +source: my-firestore +description: Search locations by area and population +collectionPath: "cities" +filters: | + { + "and": [ + {"field": "country", "op": "==", "value": {"stringValue": "{{.country}}"}}, + {"field": "population", "op": ">", "value": {"integerValue": "{{.minPopulation}}"}}, + {"field": "area", "op": "<", "value": {"doubleValue": {{.maxArea}}}} + ] + } +orderBy: + field: "population" + direction: "DESCENDING" +limit: 50 +parameters: + - name: country + type: string + description: Country code + required: true + - name: minPopulation + type: string + description: Minimum population (as string for large numbers) + required: true + - name: maxArea + type: float + description: Maximum area in square kilometers + required: true ``` ### Example 3: Time-based Query with Analysis ```yaml -tools: - activity_log: - kind: firestore-query - source: my-firestore - description: Query activity logs within time range - collectionPath: "logs" - filters: | - { - "and": [ - {"field": "timestamp", "op": ">=", "value": {"timestampValue": "{{.startTime}}"}}, - {"field": "timestamp", "op": "<=", "value": {"timestampValue": "{{.endTime}}"}}, - {"field": "severity", "op": "in", "value": {"arrayValue": {"values": [ - {"stringValue": "ERROR"}, - {"stringValue": "CRITICAL"} - ]}}} - ] - } - select: - - timestamp - - message - - severity - - userId - orderBy: - field: "timestamp" - direction: "DESCENDING" - analyzeQuery: true - parameters: - - name: startTime - type: string - description: Start time in RFC3339 format - required: true - - name: endTime - type: string - description: End time in RFC3339 format - required: true +kind: tools +name: activity_log +type: firestore-query +source: my-firestore +description: Query activity logs within time range +collectionPath: "logs" +filters: | + { + "and": [ + {"field": "timestamp", "op": ">=", "value": {"timestampValue": "{{.startTime}}"}}, + {"field": "timestamp", "op": "<=", "value": {"timestampValue": "{{.endTime}}"}}, + {"field": "severity", "op": "in", "value": {"arrayValue": {"values": [ + {"stringValue": "ERROR"}, + {"stringValue": "CRITICAL"} + ]}}} + ] + } +select: + - timestamp + - message + - severity + - userId +orderBy: + field: "timestamp" + direction: "DESCENDING" +analyzeQuery: true +parameters: + - name: startTime + type: string + description: Start time in RFC3339 format + required: true + - name: endTime + type: string + description: End time in RFC3339 format + required: true ``` ## Usage diff --git a/docs/en/resources/tools/firestore/firestore-update-document.md b/docs/en/resources/tools/firestore/firestore-update-document.md index 56aad8d371..0cbda2cbfa 100644 --- a/docs/en/resources/tools/firestore/firestore-update-document.md +++ b/docs/en/resources/tools/firestore/firestore-update-document.md @@ -76,7 +76,7 @@ deleted. To delete a field, include it in the `updateMask` but omit it from | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------| -| kind | string | true | Must be "firestore-update-document". | +| type | string | true | Must be "firestore-update-document". | | source | string | true | Name of the Firestore source to update documents in. | | description | string | true | Description of the tool that is passed to the LLM. | @@ -85,11 +85,11 @@ deleted. To delete a field, include it in the `updateMask` but omit it from ### Basic Document Update (Full Merge) ```yaml -tools: - update-user-doc: - kind: firestore-update-document - source: my-firestore - description: Update a user document +kind: tools +name: update-user-doc +type: firestore-update-document +source: my-firestore +description: Update a user document ``` Usage: @@ -299,14 +299,14 @@ In this example: The tool can be configured to require authentication: ```yaml -tools: - secure-update-doc: - kind: firestore-update-document - source: prod-firestore - description: Update documents with authentication required - authRequired: - - google-oauth - - api-key +kind: tools +name: secure-update-doc +type: firestore-update-document +source: prod-firestore +description: Update documents with authentication required +authRequired: + - google-oauth + - api-key ``` ## Error Handling diff --git a/docs/en/resources/tools/firestore/firestore-validate-rules.md b/docs/en/resources/tools/firestore/firestore-validate-rules.md index 6f74bec527..3e6b069521 100644 --- a/docs/en/resources/tools/firestore/firestore-validate-rules.md +++ b/docs/en/resources/tools/firestore/firestore-validate-rules.md @@ -17,11 +17,11 @@ reporting with source positions and code snippets. ## Configuration ```yaml -tools: - firestore-validate-rules: - kind: firestore-validate-rules - source: - description: "Checks the provided Firestore Rules source for syntax and validation errors" +kind: tools +name: firestore-validate-rules +type: firestore-validate-rules +source: +description: "Checks the provided Firestore Rules source for syntax and validation errors" ``` ## Authentication diff --git a/docs/en/resources/tools/http/http.md b/docs/en/resources/tools/http/http.md index 466a86c36d..19272c9035 100644 --- a/docs/en/resources/tools/http/http.md +++ b/docs/en/resources/tools/http/http.md @@ -31,36 +31,38 @@ For example, the following config allows you to reach different paths of the same server using multiple Tools: ```yaml -sources: - my-http-source: - kind: http - baseUrl: https://api.example.com - -tools: - my-post-tool: - kind: http - source: my-http-source - method: POST - path: /update - description: Tool to update information to the example API - - my-get-tool: - kind: http - source: my-http-source - method: GET - path: /search - description: Tool to search information from the example API - - my-dynamic-path-tool: - kind: http - source: my-http-source - method: GET - path: /{{.myPathParam}}/search - description: Tool to reach endpoint based on the input to `myPathParam` - pathParams: - - name: myPathParam - type: string - description: The dynamic path parameter +kind: sources +name: my-http-source +type: http +baseUrl: https://api.example.com +--- +kind: tools +name: my-post-tool +type: http +source: my-http-source +method: POST +path: /update +description: Tool to update information to the example API +--- +kind: tools +name: my-get-tool +type: http +source: my-http-source +method: GET +path: /search +description: Tool to search information from the example API +--- +kind: tools +name: my-dynamic-path-tool +type: http +source: my-http-source +method: GET +path: /{{.myPathParam}}/search +description: Tool to reach endpoint based on the input to `myPathParam` +pathParams: + - name: myPathParam + type: string + description: The dynamic path parameter ``` @@ -77,15 +79,16 @@ The HTTP Tool allows you to specify headers in two different ways: same for every invocation: ```yaml -my-http-tool: - kind: http - source: my-http-source - method: GET - path: /search - description: Tool to search data from API - headers: - Authorization: API_KEY - Content-Type: application/json +kind: tools +name: my-http-tool +type: http +source: my-http-source +method: GET +path: /search +description: Tool to search data from API +headers: + Authorization: API_KEY + Content-Type: application/json ``` - Dynamic headers can be specified as parameters in the `headerParams` field. @@ -93,16 +96,17 @@ my-http-tool: is determined by the LLM input upon Tool invocation: ```yaml -my-http-tool: - kind: http - source: my-http-source - method: GET - path: /search - description: some description - headerParams: - - name: Content-Type # Example LLM input: "application/json" - description: request content type - type: string +kind: tools +name: my-http-tool +type: http +source: my-http-source +method: GET +path: /search +description: some description +headerParams: + - name: Content-Type # Example LLM input: "application/json" + description: request content type + type: string ``` ### Query parameters @@ -115,28 +119,30 @@ filtering or sorting data. the URL itself: ```yaml -my-http-tool: - kind: http - source: my-http-source - method: GET - path: /search?language=en&id=1 - description: Tool to search for item with ID 1 in English +kind: tools +name: my-http-tool +type: http +source: my-http-source +method: GET +path: /search?language=en&id=1 +description: Tool to search for item with ID 1 in English ``` - Dynamic request query parameters should be specified as parameters in the `queryParams` section: ```yaml -my-http-tool: - kind: http - source: my-http-source - method: GET - path: /search - description: Tool to search for item with ID - queryParams: - - name: id - description: item ID - type: integer +kind: tools +name: my-http-tool +type: http +source: my-http-source +method: GET +path: /search +description: Tool to search for item with ID +queryParams: + - name: id + description: item ID + type: integer ``` ### Request body @@ -150,24 +156,25 @@ body payload upon Tool invocation. Example: ```yaml -my-http-tool: - kind: http - source: my-http-source - method: GET - path: /search - description: Tool to search for person with name and age - requestBody: | - { - "age": {{.age}}, - "name": "{{.name}}" - } - bodyParams: - - name: age - description: age number - type: integer - - name: name - description: name string - type: string +kind: tools +name: my-http-tool +type: http +source: my-http-source +method: GET +path: /search +description: Tool to search for person with name and age +requestBody: | + { + "age": {{.age}}, + "name": "{{.name}}" + } +bodyParams: + - name: age + description: age number + type: integer + - name: name + description: name string + type: string ``` #### Formatting Parameters @@ -211,45 +218,46 @@ will send the following output: ## Example ```yaml -my-http-tool: - kind: http - source: my-http-source - method: GET - path: /search +kind: tools +name: my-http-tool +type: http +source: my-http-source +method: GET +path: /search +description: some description +authRequired: + - my-google-auth-service + - other-auth-service +queryParams: + - name: country description: some description - authRequired: - - my-google-auth-service - - other-auth-service - queryParams: - - name: country - description: some description - type: string - requestBody: | - { - "age": {{.age}}, - "city": "{{.city}}" - } - bodyParams: - - name: age - description: age number - type: integer - - name: city - description: city string - type: string - headers: - Authorization: API_KEY - Content-Type: application/json - headerParams: - - name: Language - description: language string - type: string + type: string +requestBody: | + { + "age": {{.age}}, + "city": "{{.city}}" + } +bodyParams: + - name: age + description: age number + type: integer + - name: city + description: city string + type: string +headers: + Authorization: API_KEY + Content-Type: application/json +headerParams: + - name: Language + description: language string + type: string ``` ## Reference | **field** | **type** | **required** | **description** | |--------------|:---------------------------------------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "http". | +| type | string | true | Must be "http". | | source | string | true | Name of the source the HTTP request should be sent to. | | description | string | true | Description of the tool that is passed to the LLM. | | path | string | true | The path of the HTTP request. You can include static query parameters in the path string. | diff --git a/docs/en/resources/tools/looker/looker-add-dashboard-element.md b/docs/en/resources/tools/looker/looker-add-dashboard-element.md index 3c0a65f2d3..5e217fc865 100644 --- a/docs/en/resources/tools/looker/looker-add-dashboard-element.md +++ b/docs/en/resources/tools/looker/looker-add-dashboard-element.md @@ -25,42 +25,42 @@ It's compatible with the following sources: ## Example ```yaml -tools: - add_dashboard_element: - kind: looker-add-dashboard-element - source: looker-source - description: | - This tool creates a new tile (element) within an existing Looker dashboard. - Tiles are added in the order this tool is called for a given `dashboard_id`. +kind: tools +name: add_dashboard_element +type: looker-add-dashboard-element +source: looker-source +description: | + This tool creates a new tile (element) within an existing Looker dashboard. + Tiles are added in the order this tool is called for a given `dashboard_id`. - CRITICAL ORDER OF OPERATIONS: - 1. Create the dashboard using `make_dashboard`. - 2. Add any dashboard-level filters using `add_dashboard_filter`. - 3. Then, add elements (tiles) using this tool. + CRITICAL ORDER OF OPERATIONS: + 1. Create the dashboard using `make_dashboard`. + 2. Add any dashboard-level filters using `add_dashboard_filter`. + 3. Then, add elements (tiles) using this tool. - Required Parameters: - - dashboard_id: The ID of the target dashboard, obtained from `make_dashboard`. - - model_name, explore_name, fields: These query parameters are inherited - from the `query` tool and are required to define the data for the tile. + Required Parameters: + - dashboard_id: The ID of the target dashboard, obtained from `make_dashboard`. + - model_name, explore_name, fields: These query parameters are inherited + from the `query` tool and are required to define the data for the tile. - Optional Parameters: - - title: An optional title for the dashboard tile. - - pivots, filters, sorts, limit, query_timezone: These query parameters are - inherited from the `query` tool and can be used to customize the tile's query. - - vis_config: A JSON object defining the visualization settings for this tile. - The structure and options are the same as for the `query_url` tool's `vis_config`. + Optional Parameters: + - title: An optional title for the dashboard tile. + - pivots, filters, sorts, limit, query_timezone: These query parameters are + inherited from the `query` tool and can be used to customize the tile's query. + - vis_config: A JSON object defining the visualization settings for this tile. + The structure and options are the same as for the `query_url` tool's `vis_config`. - Connecting to Dashboard Filters: - A dashboard element can be connected to one or more dashboard filters (created with - `add_dashboard_filter`). To do this, specify the `name` of the dashboard filter - and the `field` from the element's query that the filter should apply to. - The format for specifying the field is `view_name.field_name`. + Connecting to Dashboard Filters: + A dashboard element can be connected to one or more dashboard filters (created with + `add_dashboard_filter`). To do this, specify the `name` of the dashboard filter + and the `field` from the element's query that the filter should apply to. + The format for specifying the field is `view_name.field_name`. ``` ## Reference | **field** | **type** | **required** | **description** | |:------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-add-dashboard-element". | +| type | string | true | Must be "looker-add-dashboard-element". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | \ No newline at end of file diff --git a/docs/en/resources/tools/looker/looker-add-dashboard-filter.md b/docs/en/resources/tools/looker/looker-add-dashboard-filter.md index e5cf5ba34d..ca88afebfc 100644 --- a/docs/en/resources/tools/looker/looker-add-dashboard-filter.md +++ b/docs/en/resources/tools/looker/looker-add-dashboard-filter.md @@ -39,37 +39,37 @@ It's compatible with the following sources: ## Example ```yaml -tools: - add_dashboard_filter: - kind: looker-add-dashboard-filter - source: looker-source - description: | - This tool adds a filter to a Looker dashboard. +kind: tools +name: add_dashboard_filter +type: looker-add-dashboard-filter +source: looker-source +description: | + This tool adds a filter to a Looker dashboard. - CRITICAL ORDER OF OPERATIONS: - 1. Create a dashboard using `make_dashboard`. - 2. Add all desired filters using this tool (`add_dashboard_filter`). - 3. Finally, add dashboard elements (tiles) using `add_dashboard_element`. + CRITICAL ORDER OF OPERATIONS: + 1. Create a dashboard using `make_dashboard`. + 2. Add all desired filters using this tool (`add_dashboard_filter`). + 3. Finally, add dashboard elements (tiles) using `add_dashboard_element`. - Parameters: - - dashboard_id (required): The ID from `make_dashboard`. - - name (required): A unique internal identifier for the filter. You will use this `name` later in `add_dashboard_element` to bind tiles to this filter. - - title (required): The label displayed to users in the UI. - - filter_type (required): One of `date_filter`, `number_filter`, `string_filter`, or `field_filter`. - - default_value (optional): The initial value for the filter. + Parameters: + - dashboard_id (required): The ID from `make_dashboard`. + - name (required): A unique internal identifier for the filter. You will use this `name` later in `add_dashboard_element` to bind tiles to this filter. + - title (required): The label displayed to users in the UI. + - filter_type (required): One of `date_filter`, `number_filter`, `string_filter`, or `field_filter`. + - default_value (optional): The initial value for the filter. - Field Filters (`flter_type: field_filter`): - If creating a field filter, you must also provide: - - model - - explore - - dimension - The filter will inherit suggestions and type information from this LookML field. + Field Filters (`flter_type: field_filter`): + If creating a field filter, you must also provide: + - model + - explore + - dimension + The filter will inherit suggestions and type information from this LookML field. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-add-dashboard-filter". | +| type | string | true | Must be "looker-add-dashboard-filter". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | \ No newline at end of file diff --git a/docs/en/resources/tools/looker/looker-conversational-analytics.md b/docs/en/resources/tools/looker/looker-conversational-analytics.md index 150f347cf7..6e064cf7a4 100644 --- a/docs/en/resources/tools/looker/looker-conversational-analytics.md +++ b/docs/en/resources/tools/looker/looker-conversational-analytics.md @@ -29,21 +29,21 @@ It's compatible with the following sources: ## Example ```yaml -tools: - ask_data_insights: - kind: looker-conversational-analytics - source: looker-source - description: | - Use this tool to ask questions about your data using the Looker Conversational - Analytics API. You must provide a natural language query and a list of - 1 to 5 model and explore combinations (e.g. [{'model': 'the_model', 'explore': 'the_explore'}]). - Use the 'get_models' and 'get_explores' tools to discover available models and explores. +kind: tools +name: ask_data_insights +type: looker-conversational-analytics +source: looker-source +description: | + Use this tool to ask questions about your data using the Looker Conversational + Analytics API. You must provide a natural language query and a list of + 1 to 5 model and explore combinations (e.g. [{'model': 'the_model', 'explore': 'the_explore'}]). + Use the 'get_models' and 'get_explores' tools to discover available models and explores. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "lookerca-conversational-analytics". | +| type | string | true | Must be "lookerca-conversational-analytics". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-create-project-file.md b/docs/en/resources/tools/looker/looker-create-project-file.md index 826dda98e9..3e36fe73d0 100644 --- a/docs/en/resources/tools/looker/looker-create-project-file.md +++ b/docs/en/resources/tools/looker/looker-create-project-file.md @@ -22,29 +22,29 @@ as well as the file content. ## Example ```yaml -tools: - create_project_file: - kind: looker-create-project-file - source: looker-source - description: | - This tool creates a new LookML file within a specified project, populating - it with the provided content. +kind: tools +name: create_project_file +type: looker-create-project-file +source: looker-source +description: | + This tool creates a new LookML file within a specified project, populating + it with the provided content. - Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. + Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. - Parameters: - - project_id (required): The unique ID of the LookML project. - - file_path (required): The desired path and filename for the new file within the project. - - content (required): The full LookML content to write into the new file. + Parameters: + - project_id (required): The unique ID of the LookML project. + - file_path (required): The desired path and filename for the new file within the project. + - content (required): The full LookML content to write into the new file. - Output: - A confirmation message upon successful file creation. + Output: + A confirmation message upon successful file creation. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-create-project-file". | +| type | string | true | Must be "looker-create-project-file". | | source | string | true | Name of the source Looker instance. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-delete-project-file.md b/docs/en/resources/tools/looker/looker-delete-project-file.md index e5bf06948d..11419b8a19 100644 --- a/docs/en/resources/tools/looker/looker-delete-project-file.md +++ b/docs/en/resources/tools/looker/looker-delete-project-file.md @@ -21,28 +21,28 @@ It's compatible with the following sources: ## Example ```yaml -tools: - delete_project_file: - kind: looker-delete-project-file - source: looker-source - description: | - This tool permanently deletes a specified LookML file from within a project. - Use with caution, as this action cannot be undone through the API. +kind: tools +name: delete_project_file +type: looker-delete-project-file +source: looker-source +description: | + This tool permanently deletes a specified LookML file from within a project. + Use with caution, as this action cannot be undone through the API. - Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. + Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. - Parameters: - - project_id (required): The unique ID of the LookML project. - - file_path (required): The exact path to the LookML file to delete within the project. + Parameters: + - project_id (required): The unique ID of the LookML project. + - file_path (required): The exact path to the LookML file to delete within the project. - Output: - A confirmation message upon successful file deletion. + Output: + A confirmation message upon successful file deletion. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-delete-project-file". | +| type | string | true | Must be "looker-delete-project-file". | | source | string | true | Name of the source Looker instance. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-dev-mode.md b/docs/en/resources/tools/looker/looker-dev-mode.md index 9f69343ad5..33509ea54b 100644 --- a/docs/en/resources/tools/looker/looker-dev-mode.md +++ b/docs/en/resources/tools/looker/looker-dev-mode.md @@ -22,24 +22,24 @@ to exit dev mode. ## Example ```yaml -tools: - dev_mode: - kind: looker-dev-mode - source: looker-source - description: | - This tool allows toggling the Looker IDE session between Development Mode and Production Mode. - Development Mode enables making and testing changes to LookML projects. +kind: tools +name: dev_mode +type: looker-dev-mode +source: looker-source +description: | + This tool allows toggling the Looker IDE session between Development Mode and Production Mode. + Development Mode enables making and testing changes to LookML projects. - Parameters: - - enable (required): A boolean value. - - `true`: Switches the current session to Development Mode. - - `false`: Switches the current session to Production Mode. + Parameters: + - enable (required): A boolean value. + - `true`: Switches the current session to Development Mode. + - `false`: Switches the current session to Production Mode. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-dev-mode". | +| type | string | true | Must be "looker-dev-mode". | | source | string | true | Name of the source Looker instance. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-generate-embed-url.md b/docs/en/resources/tools/looker/looker-generate-embed-url.md index 1e136165da..bf7f4c1f23 100644 --- a/docs/en/resources/tools/looker/looker-generate-embed-url.md +++ b/docs/en/resources/tools/looker/looker-generate-embed-url.md @@ -31,28 +31,28 @@ supplied to this tool. ## Example ```yaml -tools: - generate_embed_url: - kind: looker-generate-embed-url - source: looker-source - description: | - This tool generates a signed, private embed URL for specific Looker content, - allowing users to access it directly. +kind: tools +name: generate_embed_url +type: looker-generate-embed-url +source: looker-source +description: | + This tool generates a signed, private embed URL for specific Looker content, + allowing users to access it directly. - Parameters: - - type (required): The type of content to embed. Common values include: - - `dashboards` - - `looks` - - `explore` - - id (required): The unique identifier for the content. - - For dashboards and looks, use the numeric ID (e.g., "123"). - - For explores, use the format "model_name/explore_name". + Parameters: + - type (required): The type of content to embed. Common values include: + - `dashboards` + - `looks` + - `explore` + - id (required): The unique identifier for the content. + - For dashboards and looks, use the numeric ID (e.g., "123"). + - For explores, use the format "model_name/explore_name". ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-generate-embed-url" | +| type | string | true | Must be "looker-generate-embed-url" | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-get-connection-databases.md b/docs/en/resources/tools/looker/looker-get-connection-databases.md index 23611fc2a1..b46459c26e 100644 --- a/docs/en/resources/tools/looker/looker-get-connection-databases.md +++ b/docs/en/resources/tools/looker/looker-get-connection-databases.md @@ -21,27 +21,27 @@ It's compatible with the following sources: ## Example ```yaml -tools: - get_connection_databases: - kind: looker-get-connection-databases - source: looker-source - description: | - This tool retrieves a list of databases available through a specified Looker connection. - This is only applicable for connections that support multiple databases. - Use `get_connections` to check if a connection supports multiple databases. +kind: tools +name: get_connection_databases +type: looker-get-connection-databases +source: looker-source +description: | + This tool retrieves a list of databases available through a specified Looker connection. + This is only applicable for connections that support multiple databases. + Use `get_connections` to check if a connection supports multiple databases. - Parameters: - - connection_name (required): The name of the database connection, obtained from `get_connections`. + Parameters: + - connection_name (required): The name of the database connection, obtained from `get_connections`. - Output: - A JSON array of strings, where each string is the name of an available database. - If the connection does not support multiple databases, an empty list or an error will be returned. + Output: + A JSON array of strings, where each string is the name of an available database. + If the connection does not support multiple databases, an empty list or an error will be returned. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-get-connection-databases". | +| type | string | true | Must be "looker-get-connection-databases". | | source | string | true | Name of the source Looker instance. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-get-connection-schemas.md b/docs/en/resources/tools/looker/looker-get-connection-schemas.md index 0ef34015c3..de568b8e0e 100644 --- a/docs/en/resources/tools/looker/looker-get-connection-schemas.md +++ b/docs/en/resources/tools/looker/looker-get-connection-schemas.md @@ -21,27 +21,27 @@ It's compatible with the following sources: ## Example ```yaml -tools: - get_connection_schemas: - kind: looker-get-connection-schemas - source: looker-source - description: | - This tool retrieves a list of database schemas available through a specified - Looker connection. +kind: tools +name: get_connection_schemas +type: looker-get-connection-schemas +source: looker-source +description: | + This tool retrieves a list of database schemas available through a specified + Looker connection. - Parameters: - - connection_name (required): The name of the database connection, obtained from `get_connections`. - - database (optional): An optional database name to filter the schemas. - Only applicable for connections that support multiple databases. + Parameters: + - connection_name (required): The name of the database connection, obtained from `get_connections`. + - database (optional): An optional database name to filter the schemas. + Only applicable for connections that support multiple databases. - Output: - A JSON array of strings, where each string is the name of an available schema. + Output: + A JSON array of strings, where each string is the name of an available schema. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-get-connection-schemas". | +| type | string | true | Must be "looker-get-connection-schemas". | | source | string | true | Name of the source Looker instance. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-get-connection-table-columns.md b/docs/en/resources/tools/looker/looker-get-connection-table-columns.md index f4db6445fe..8f19bd372b 100644 --- a/docs/en/resources/tools/looker/looker-get-connection-table-columns.md +++ b/docs/en/resources/tools/looker/looker-get-connection-table-columns.md @@ -21,31 +21,31 @@ It's compatible with the following sources: ## Example ```yaml -tools: - get_connection_table_columns: - kind: looker-get-connection-table-columns - source: looker-source - description: | - This tool retrieves a list of columns for one or more specified tables within a - given database schema and connection. +kind: tools +name: get_connection_table_columns +type: looker-get-connection-table-columns +source: looker-source +description: | + This tool retrieves a list of columns for one or more specified tables within a + given database schema and connection. - Parameters: - - connection_name (required): The name of the database connection, obtained from `get_connections`. - - schema (required): The name of the schema where the tables reside, obtained from `get_connection_schemas`. - - tables (required): A comma-separated string of table names for which to retrieve columns - (e.g., "users,orders,products"), obtained from `get_connection_tables`. - - database (optional): The name of the database to filter by. Only applicable for connections - that support multiple databases (check with `get_connections`). + Parameters: + - connection_name (required): The name of the database connection, obtained from `get_connections`. + - schema (required): The name of the schema where the tables reside, obtained from `get_connection_schemas`. + - tables (required): A comma-separated string of table names for which to retrieve columns + (e.g., "users,orders,products"), obtained from `get_connection_tables`. + - database (optional): The name of the database to filter by. Only applicable for connections + that support multiple databases (check with `get_connections`). - Output: - A JSON array of objects, where each object represents a column and contains details - such as `table_name`, `column_name`, `data_type`, and `is_nullable`. + Output: + A JSON array of objects, where each object represents a column and contains details + such as `table_name`, `column_name`, `data_type`, and `is_nullable`. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-get-connection-table-columns". | +| type | string | true | Must be "looker-get-connection-table-columns". | | source | string | true | Name of the source Looker instance. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-get-connection-tables.md b/docs/en/resources/tools/looker/looker-get-connection-tables.md index 86a2830cd9..5d191828bf 100644 --- a/docs/en/resources/tools/looker/looker-get-connection-tables.md +++ b/docs/en/resources/tools/looker/looker-get-connection-tables.md @@ -22,28 +22,28 @@ and an optional `db` parameter. ## Example ```yaml -tools: - get_connection_tables: - kind: looker-get-connection-tables - source: looker-source - description: | - This tool retrieves a list of tables available within a specified database schema - through a Looker connection. +kind: tools +name: get_connection_tables +type: looker-get-connection-tables +source: looker-source +description: | + This tool retrieves a list of tables available within a specified database schema + through a Looker connection. - Parameters: - - connection_name (required): The name of the database connection, obtained from `get_connections`. - - schema (required): The name of the schema to list tables from, obtained from `get_connection_schemas`. - - database (optional): The name of the database to filter by. Only applicable for connections - that support multiple databases (check with `get_connections`). + Parameters: + - connection_name (required): The name of the database connection, obtained from `get_connections`. + - schema (required): The name of the schema to list tables from, obtained from `get_connection_schemas`. + - database (optional): The name of the database to filter by. Only applicable for connections + that support multiple databases (check with `get_connections`). - Output: - A JSON array of strings, where each string is the name of an available table. + Output: + A JSON array of strings, where each string is the name of an available table. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-get-connection-tables". | +| type | string | true | Must be "looker-get-connection-tables". | | source | string | true | Name of the source Looker instance. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-get-connections.md b/docs/en/resources/tools/looker/looker-get-connections.md index c6c0159789..52a1c96780 100644 --- a/docs/en/resources/tools/looker/looker-get-connections.md +++ b/docs/en/resources/tools/looker/looker-get-connections.md @@ -21,29 +21,29 @@ It's compatible with the following sources: ## Example ```yaml -tools: - get_connections: - kind: looker-get-connections - source: looker-source - description: | - This tool retrieves a list of all database connections configured in the Looker system. +kind: tools +name: get_connections +type: looker-get-connections +source: looker-source +description: | + This tool retrieves a list of all database connections configured in the Looker system. - Parameters: - This tool takes no parameters. + Parameters: + This tool takes no parameters. - Output: - A JSON array of objects, each representing a database connection and including details such as: - - `name`: The connection's unique identifier. - - `dialect`: The database dialect (e.g., "mysql", "postgresql", "bigquery"). - - `default_schema`: The default schema for the connection. - - `database`: The associated database name (if applicable). - - `supports_multiple_databases`: A boolean indicating if the connection can access multiple databases. + Output: + A JSON array of objects, each representing a database connection and including details such as: + - `name`: The connection's unique identifier. + - `dialect`: The database dialect (e.g., "mysql", "postgresql", "bigquery"). + - `default_schema`: The default schema for the connection. + - `database`: The associated database name (if applicable). + - `supports_multiple_databases`: A boolean indicating if the connection can access multiple databases. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-get-connections". | +| type | string | true | Must be "looker-get-connections". | | source | string | true | Name of the source Looker instance. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-get-dashboards.md b/docs/en/resources/tools/looker/looker-get-dashboards.md index db5c9e532e..a8f117d278 100644 --- a/docs/en/resources/tools/looker/looker-get-dashboards.md +++ b/docs/en/resources/tools/looker/looker-get-dashboards.md @@ -28,36 +28,36 @@ default to 100 and 0. ## Example ```yaml -tools: - get_dashboards: - kind: looker-get-dashboards - source: looker-source - description: | - This tool searches for saved dashboards in a Looker instance. It returns a list of JSON objects, each representing a dashboard. - - Search Parameters: - - title (optional): Filter by dashboard title (supports wildcards). - - folder_id (optional): Filter by the ID of the folder where the dashboard is saved. - - user_id (optional): Filter by the ID of the user who created the dashboard. - - description (optional): Filter by description content (supports wildcards). - - id (optional): Filter by specific dashboard ID. - - limit (optional): Maximum number of results to return. Defaults to a system limit. - - offset (optional): Starting point for pagination. - - String Search Behavior: - - Case-insensitive matching. - - Supports SQL LIKE pattern match wildcards: - - `%`: Matches any sequence of zero or more characters. (e.g., `"finan%"` matches "financial", "finance") - - `_`: Matches any single character. (e.g., `"s_les"` matches "sales") - - Special expressions for null checks: - - `"IS NULL"`: Matches dashboards where the field is null. - - `"NOT NULL"`: Excludes dashboards where the field is null. +kind: tools +name: get_dashboards +type: looker-get-dashboards +source: looker-source +description: | + This tool searches for saved dashboards in a Looker instance. It returns a list of JSON objects, each representing a dashboard. + + Search Parameters: + - title (optional): Filter by dashboard title (supports wildcards). + - folder_id (optional): Filter by the ID of the folder where the dashboard is saved. + - user_id (optional): Filter by the ID of the user who created the dashboard. + - description (optional): Filter by description content (supports wildcards). + - id (optional): Filter by specific dashboard ID. + - limit (optional): Maximum number of results to return. Defaults to a system limit. + - offset (optional): Starting point for pagination. + + String Search Behavior: + - Case-insensitive matching. + - Supports SQL LIKE pattern match wildcards: + - `%`: Matches any sequence of zero or more characters. (e.g., `"finan%"` matches "financial", "finance") + - `_`: Matches any single character. (e.g., `"s_les"` matches "sales") + - Special expressions for null checks: + - `"IS NULL"`: Matches dashboards where the field is null. + - `"NOT NULL"`: Excludes dashboards where the field is null. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-get-dashboards" | +| type | string | true | Must be "looker-get-dashboards" | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-get-dimensions.md b/docs/en/resources/tools/looker/looker-get-dimensions.md index 17f3bb68f7..41a899eac7 100644 --- a/docs/en/resources/tools/looker/looker-get-dimensions.md +++ b/docs/en/resources/tools/looker/looker-get-dimensions.md @@ -23,25 +23,25 @@ It's compatible with the following sources: ## Example ```yaml -tools: - get_dimensions: - kind: looker-get-dimensions - source: looker-source - description: | - This tool retrieves a list of dimensions defined within a specific Looker explore. - Dimensions are non-aggregatable attributes or characteristics of your data - (e.g., product name, order date, customer city) that can be used for grouping, - filtering, or segmenting query results. +kind: tools +name: get_dimensions +type: looker-get-dimensions +source: looker-source +description: | + This tool retrieves a list of dimensions defined within a specific Looker explore. + Dimensions are non-aggregatable attributes or characteristics of your data + (e.g., product name, order date, customer city) that can be used for grouping, + filtering, or segmenting query results. - Parameters: - - model_name (required): The name of the LookML model, obtained from `get_models`. - - explore_name (required): The name of the explore within the model, obtained from `get_explores`. + Parameters: + - model_name (required): The name of the LookML model, obtained from `get_models`. + - explore_name (required): The name of the explore within the model, obtained from `get_explores`. - Output Details: - - If a dimension includes a `suggestions` field, its contents are valid values - that can be used directly as filters for that dimension. - - If a `suggest_explore` and `suggest_dimension` are provided, you can query - that specified explore and dimension to retrieve a list of valid filter values. + Output Details: + - If a dimension includes a `suggestions` field, its contents are valid values + that can be used directly as filters for that dimension. + - If a `suggest_explore` and `suggest_dimension` are provided, you can query + that specified explore and dimension to retrieve a list of valid filter values. ``` @@ -66,6 +66,6 @@ The response is a json array with the following elements: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-get-dimensions". | +| type | string | true | Must be "looker-get-dimensions". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-get-explores.md b/docs/en/resources/tools/looker/looker-get-explores.md index d92942de9d..3b7a695d1b 100644 --- a/docs/en/resources/tools/looker/looker-get-explores.md +++ b/docs/en/resources/tools/looker/looker-get-explores.md @@ -35,24 +35,24 @@ The return type is an array of maps, each map is formatted like: ## Example ```yaml -tools: - get_explores: - kind: looker-get-explores - source: looker-source - description: | - This tool retrieves a list of explores defined within a specific LookML model. - Explores represent a curated view of your data, typically joining several - tables together to allow for focused analysis on a particular subject area. - The output provides details like the explore's `name` and `label`. +kind: tools +name: get_explores +type: looker-get-explores +source: looker-source +description: | + This tool retrieves a list of explores defined within a specific LookML model. + Explores represent a curated view of your data, typically joining several + tables together to allow for focused analysis on a particular subject area. + The output provides details like the explore's `name` and `label`. - Parameters: - - model_name (required): The name of the LookML model, obtained from `get_models`. + Parameters: + - model_name (required): The name of the LookML model, obtained from `get_models`. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-get-explores". | +| type | string | true | Must be "looker-get-explores". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-get-filters.md b/docs/en/resources/tools/looker/looker-get-filters.md index 2657936fd6..a455363de2 100644 --- a/docs/en/resources/tools/looker/looker-get-filters.md +++ b/docs/en/resources/tools/looker/looker-get-filters.md @@ -23,23 +23,23 @@ It's compatible with the following sources: ## Example ```yaml -tools: - get_filters: - kind: looker-get-filters - source: looker-source - description: | - This tool retrieves a list of "filter-only fields" defined within a specific - Looker explore. These are special fields defined in LookML specifically to - create user-facing filter controls that do not directly affect the `GROUP BY` - clause of the SQL query. They are often used in conjunction with liquid templating - to create dynamic queries. +kind: tools +name: get_filters +type: looker-get-filters +source: looker-source +description: | + This tool retrieves a list of "filter-only fields" defined within a specific + Looker explore. These are special fields defined in LookML specifically to + create user-facing filter controls that do not directly affect the `GROUP BY` + clause of the SQL query. They are often used in conjunction with liquid templating + to create dynamic queries. - Note: Regular dimensions and measures can also be used as filters in a query. - This tool *only* returns fields explicitly defined as `filter:` in LookML. + Note: Regular dimensions and measures can also be used as filters in a query. + This tool *only* returns fields explicitly defined as `filter:` in LookML. - Parameters: - - model_name (required): The name of the LookML model, obtained from `get_models`. - - explore_name (required): The name of the explore within the model, obtained from `get_explores`. + Parameters: + - model_name (required): The name of the LookML model, obtained from `get_models`. + - explore_name (required): The name of the explore within the model, obtained from `get_explores`. ``` The response is a json array with the following elements: @@ -63,6 +63,6 @@ The response is a json array with the following elements: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-get-filters". | +| type | string | true | Must be "looker-get-filters". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-get-looks.md b/docs/en/resources/tools/looker/looker-get-looks.md index 06bc5f7856..a15f5f31ba 100644 --- a/docs/en/resources/tools/looker/looker-get-looks.md +++ b/docs/en/resources/tools/looker/looker-get-looks.md @@ -29,37 +29,37 @@ default to 100 and 0. ## Example ```yaml -tools: - get_looks: - kind: looker-get-looks - source: looker-source - description: | - This tool searches for saved Looks (pre-defined queries and visualizations) - in a Looker instance. It returns a list of JSON objects, each representing a Look. +kind: tools +name: get_looks +type: looker-get-looks +source: looker-source +description: | + This tool searches for saved Looks (pre-defined queries and visualizations) + in a Looker instance. It returns a list of JSON objects, each representing a Look. - Search Parameters: - - title (optional): Filter by Look title (supports wildcards). - - folder_id (optional): Filter by the ID of the folder where the Look is saved. - - user_id (optional): Filter by the ID of the user who created the Look. - - description (optional): Filter by description content (supports wildcards). - - id (optional): Filter by specific Look ID. - - limit (optional): Maximum number of results to return. Defaults to a system limit. - - offset (optional): Starting point for pagination. + Search Parameters: + - title (optional): Filter by Look title (supports wildcards). + - folder_id (optional): Filter by the ID of the folder where the Look is saved. + - user_id (optional): Filter by the ID of the user who created the Look. + - description (optional): Filter by description content (supports wildcards). + - id (optional): Filter by specific Look ID. + - limit (optional): Maximum number of results to return. Defaults to a system limit. + - offset (optional): Starting point for pagination. - String Search Behavior: - - Case-insensitive matching. - - Supports SQL LIKE pattern match wildcards: - - `%`: Matches any sequence of zero or more characters. (e.g., `"dan%"` matches "danger", "Danzig") - - `_`: Matches any single character. (e.g., `"D_m%"` matches "Damage", "dump") - - Special expressions for null checks: - - `"IS NULL"`: Matches Looks where the field is null. - - `"NOT NULL"`: Excludes Looks where the field is null. + String Search Behavior: + - Case-insensitive matching. + - Supports SQL LIKE pattern match wildcards: + - `%`: Matches any sequence of zero or more characters. (e.g., `"dan%"` matches "danger", "Danzig") + - `_`: Matches any single character. (e.g., `"D_m%"` matches "Damage", "dump") + - Special expressions for null checks: + - `"IS NULL"`: Matches Looks where the field is null. + - `"NOT NULL"`: Excludes Looks where the field is null. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-get-looks" | +| type | string | true | Must be "looker-get-looks" | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-get-measures.md b/docs/en/resources/tools/looker/looker-get-measures.md index 7304031855..bcc390a19f 100644 --- a/docs/en/resources/tools/looker/looker-get-measures.md +++ b/docs/en/resources/tools/looker/looker-get-measures.md @@ -23,24 +23,24 @@ It's compatible with the following sources: ## Example ```yaml -tools: - get_measures: - kind: looker-get-measures - source: looker-source - description: | - This tool retrieves a list of measures defined within a specific Looker explore. - Measures are aggregatable metrics (e.g., total sales, average price, count of users) - that are used for calculations and quantitative analysis in your queries. +kind: tools +name: get_measures +type: looker-get-measures +source: looker-source +description: | + This tool retrieves a list of measures defined within a specific Looker explore. + Measures are aggregatable metrics (e.g., total sales, average price, count of users) + that are used for calculations and quantitative analysis in your queries. - Parameters: - - model_name (required): The name of the LookML model, obtained from `get_models`. - - explore_name (required): The name of the explore within the model, obtained from `get_explores`. + Parameters: + - model_name (required): The name of the LookML model, obtained from `get_models`. + - explore_name (required): The name of the explore within the model, obtained from `get_explores`. - Output Details: - - If a measure includes a `suggestions` field, its contents are valid values - that can be used directly as filters for that measure. - - If a `suggest_explore` and `suggest_dimension` are provided, you can query - that specified explore and dimension to retrieve a list of valid filter values. + Output Details: + - If a measure includes a `suggestions` field, its contents are valid values + that can be used directly as filters for that measure. + - If a `suggest_explore` and `suggest_dimension` are provided, you can query + that specified explore and dimension to retrieve a list of valid filter values. ``` @@ -65,6 +65,6 @@ The response is a json array with the following elements: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-get-measures". | +| type | string | true | Must be "looker-get-measures". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-get-models.md b/docs/en/resources/tools/looker/looker-get-models.md index 81002cf3a2..dbaf456384 100644 --- a/docs/en/resources/tools/looker/looker-get-models.md +++ b/docs/en/resources/tools/looker/looker-get-models.md @@ -21,23 +21,23 @@ It's compatible with the following sources: ## Example ```yaml -tools: - get_models: - kind: looker-get-models - source: looker-source - description: | - This tool retrieves a list of available LookML models in the Looker instance. - LookML models define the data structure and relationships that users can query. - The output includes details like the model's `name` and `label`, which are - essential for subsequent calls to tools like `get_explores` or `query`. +kind: tools +name: get_models +type: looker-get-models +source: looker-source +description: | + This tool retrieves a list of available LookML models in the Looker instance. + LookML models define the data structure and relationships that users can query. + The output includes details like the model's `name` and `label`, which are + essential for subsequent calls to tools like `get_explores` or `query`. - This tool takes no parameters. + This tool takes no parameters. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-get-models". | +| type | string | true | Must be "looker-get-models". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-get-parameters.md b/docs/en/resources/tools/looker/looker-get-parameters.md index f40398568d..bc3413093b 100644 --- a/docs/en/resources/tools/looker/looker-get-parameters.md +++ b/docs/en/resources/tools/looker/looker-get-parameters.md @@ -23,20 +23,20 @@ It's compatible with the following sources: ## Example ```yaml -tools: - get_parameters: - kind: looker-get-parameters - source: looker-source - description: | - This tool retrieves a list of parameters defined within a specific Looker explore. - LookML parameters are dynamic input fields that allow users to influence query - behavior without directly modifying the underlying LookML. They are often used - with `liquid` templating to create flexible dashboards and reports, enabling - users to choose dimensions, measures, or other query components at runtime. +kind: tools +name: get_parameters +type: looker-get-parameters +source: looker-source +description: | + This tool retrieves a list of parameters defined within a specific Looker explore. + LookML parameters are dynamic input fields that allow users to influence query + behavior without directly modifying the underlying LookML. They are often used + with `liquid` templating to create flexible dashboards and reports, enabling + users to choose dimensions, measures, or other query components at runtime. - Parameters: - - model_name (required): The name of the LookML model, obtained from `get_models`. - - explore_name (required): The name of the explore within the model, obtained from `get_explores`. + Parameters: + - model_name (required): The name of the LookML model, obtained from `get_models`. + - explore_name (required): The name of the explore within the model, obtained from `get_explores`. ``` The response is a json array with the following elements: @@ -60,6 +60,6 @@ The response is a json array with the following elements: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-get-parameters". | +| type | string | true | Must be "looker-get-parameters". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-get-project-file.md b/docs/en/resources/tools/looker/looker-get-project-file.md index 440615efa4..83f23ce08e 100644 --- a/docs/en/resources/tools/looker/looker-get-project-file.md +++ b/docs/en/resources/tools/looker/looker-get-project-file.md @@ -21,26 +21,26 @@ It's compatible with the following sources: ## Example ```yaml -tools: - get_project_file: - kind: looker-get-project-file - source: looker-source - description: | - This tool retrieves the raw content of a specific LookML file from within a project. +kind: tools +name: get_project_file +type: looker-get-project-file +source: looker-source +description: | + This tool retrieves the raw content of a specific LookML file from within a project. - Parameters: - - project_id (required): The unique ID of the LookML project, obtained from `get_projects`. - - file_path (required): The path to the LookML file within the project, - typically obtained from `get_project_files`. + Parameters: + - project_id (required): The unique ID of the LookML project, obtained from `get_projects`. + - file_path (required): The path to the LookML file within the project, + typically obtained from `get_project_files`. - Output: - The raw text content of the specified LookML file. + Output: + The raw text content of the specified LookML file. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-get-project-file". | +| type | string | true | Must be "looker-get-project-file". | | source | string | true | Name of the source Looker instance. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-get-project-files.md b/docs/en/resources/tools/looker/looker-get-project-files.md index 48ea273228..dc28e61e66 100644 --- a/docs/en/resources/tools/looker/looker-get-project-files.md +++ b/docs/en/resources/tools/looker/looker-get-project-files.md @@ -21,26 +21,26 @@ It's compatible with the following sources: ## Example ```yaml -tools: - get_project_files: - kind: looker-get-project-files - source: looker-source - description: | - This tool retrieves a list of all LookML files within a specified project, - providing details about each file. +kind: tools +name: get_project_files +type: looker-get-project-files +source: looker-source +description: | + This tool retrieves a list of all LookML files within a specified project, + providing details about each file. - Parameters: - - project_id (required): The unique ID of the LookML project, obtained from `get_projects`. + Parameters: + - project_id (required): The unique ID of the LookML project, obtained from `get_projects`. - Output: - A JSON array of objects, each representing a LookML file and containing - details such as `path`, `id`, `type`, and `git_status`. + Output: + A JSON array of objects, each representing a LookML file and containing + details such as `path`, `id`, `type`, and `git_status`. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-get-project-files". | +| type | string | true | Must be "looker-get-project-files". | | source | string | true | Name of the source Looker instance. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-get-projects.md b/docs/en/resources/tools/looker/looker-get-projects.md index 7c582eeae0..f6ad8def95 100644 --- a/docs/en/resources/tools/looker/looker-get-projects.md +++ b/docs/en/resources/tools/looker/looker-get-projects.md @@ -21,27 +21,27 @@ It's compatible with the following sources: ## Example ```yaml -tools: - get_projects: - kind: looker-get-projects - source: looker-source - description: | - This tool retrieves a list of all LookML projects available on the Looker instance. - It is useful for identifying projects before performing actions like retrieving - project files or making modifications. +kind: tools +name: get_projects +type: looker-get-projects +source: looker-source +description: | + This tool retrieves a list of all LookML projects available on the Looker instance. + It is useful for identifying projects before performing actions like retrieving + project files or making modifications. - Parameters: - This tool takes no parameters. + Parameters: + This tool takes no parameters. - Output: - A JSON array of objects, each containing the `project_id` and `project_name` - for a LookML project. + Output: + A JSON array of objects, each containing the `project_id` and `project_name` + for a LookML project. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-get-projects". | +| type | string | true | Must be "looker-get-projects". | | source | string | true | Name of the source Looker instance. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-health-analyze.md b/docs/en/resources/tools/looker/looker-health-analyze.md index bc44d3f301..59b2575c44 100644 --- a/docs/en/resources/tools/looker/looker-health-analyze.md +++ b/docs/en/resources/tools/looker/looker-health-analyze.md @@ -37,29 +37,29 @@ instance. The `action` parameter selects the type of analysis to perform: ## Example ```yaml -tools: - health_analyze: - kind: looker-health-analyze - source: looker-source - description: | - This tool calculates the usage statistics for Looker projects, models, and explores. +kind: tools +name: health_analyze +type: looker-health-analyze +source: looker-source +description: | + This tool calculates the usage statistics for Looker projects, models, and explores. - Parameters: - - action (required): The type of resource to analyze. Can be `"projects"`, `"models"`, or `"explores"`. - - project (optional): The specific project ID to analyze. - - model (optional): The specific model name to analyze. Requires `project` if used without `explore`. - - explore (optional): The specific explore name to analyze. Requires `model` if used. - - timeframe (optional): The lookback period in days for usage data. Defaults to `90` days. - - min_queries (optional): The minimum number of queries for a resource to be considered active. Defaults to `1`. + Parameters: + - action (required): The type of resource to analyze. Can be `"projects"`, `"models"`, or `"explores"`. + - project (optional): The specific project ID to analyze. + - model (optional): The specific model name to analyze. Requires `project` if used without `explore`. + - explore (optional): The specific explore name to analyze. Requires `model` if used. + - timeframe (optional): The lookback period in days for usage data. Defaults to `90` days. + - min_queries (optional): The minimum number of queries for a resource to be considered active. Defaults to `1`. - Output: - The result is a JSON object containing usage metrics for the specified resources. + Output: + The result is a JSON object containing usage metrics for the specified resources. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-health-analyze" | +| type | string | true | Must be "looker-health-analyze" | | source | string | true | Looker source name | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-health-pulse.md b/docs/en/resources/tools/looker/looker-health-pulse.md index ccbc05be34..1ef1ab6e14 100644 --- a/docs/en/resources/tools/looker/looker-health-pulse.md +++ b/docs/en/resources/tools/looker/looker-health-pulse.md @@ -44,33 +44,33 @@ The `looker-health-pulse` tool performs health checks on a Looker instance. The ## Example ```yaml -tools: - health_pulse: - kind: looker-health-pulse - source: looker-source - description: | - This tool performs various health checks on a Looker instance. +kind: tools +name: health_pulse +type: looker-health-pulse +source: looker-source +description: | + This tool performs various health checks on a Looker instance. - Parameters: - - action (required): Specifies the type of health check to perform. - Choose one of the following: - - `check_db_connections`: Verifies database connectivity. - - `check_dashboard_performance`: Assesses dashboard loading performance. - - `check_dashboard_errors`: Identifies errors within dashboards. - - `check_explore_performance`: Evaluates explore query performance. - - `check_schedule_failures`: Reports on failed scheduled deliveries. - - `check_legacy_features`: Checks for the usage of legacy features. + Parameters: + - action (required): Specifies the type of health check to perform. + Choose one of the following: + - `check_db_connections`: Verifies database connectivity. + - `check_dashboard_performance`: Assesses dashboard loading performance. + - `check_dashboard_errors`: Identifies errors within dashboards. + - `check_explore_performance`: Evaluates explore query performance. + - `check_schedule_failures`: Reports on failed scheduled deliveries. + - `check_legacy_features`: Checks for the usage of legacy features. - Note on `check_legacy_features`: - This action is exclusively available in Looker Core instances. If invoked - on a non-Looker Core instance, it will return a notice rather than an error. - This notice should be considered normal behavior and not an indication of an issue. + Note on `check_legacy_features`: + This action is exclusively available in Looker Core instances. If invoked + on a non-Looker Core instance, it will return a notice rather than an error. + This notice should be considered normal behavior and not an indication of an issue. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-health-pulse" | +| type | string | true | Must be "looker-health-pulse" | | source | string | true | Looker source name | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-health-vacuum.md b/docs/en/resources/tools/looker/looker-health-vacuum.md index f4d635ccc5..dc9ec7545c 100644 --- a/docs/en/resources/tools/looker/looker-health-vacuum.md +++ b/docs/en/resources/tools/looker/looker-health-vacuum.md @@ -34,28 +34,28 @@ Identify unnused fields (*in this case, less than 1 query in the last 20 days*) and joins in the `order_items` explore and `thelook` model ```yaml -tools: - health_vacuum: - kind: looker-health-vacuum - source: looker-source - description: | - This tool identifies and suggests LookML models or explores that can be - safely removed due to inactivity or low usage. +kind: tools +name: health_vacuum +type: looker-health-vacuum +source: looker-source +description: | + This tool identifies and suggests LookML models or explores that can be + safely removed due to inactivity or low usage. - Parameters: - - action (required): The type of resource to analyze for removal candidates. Can be `"models"` or `"explores"`. - - project (optional): The specific project ID to consider. - - model (optional): The specific model name to consider. Requires `project` if used without `explore`. - - explore (optional): The specific explore name to consider. Requires `model` if used. - - timeframe (optional): The lookback period in days to assess usage. Defaults to `90` days. - - min_queries (optional): The minimum number of queries for a resource to be considered active. Defaults to `1`. + Parameters: + - action (required): The type of resource to analyze for removal candidates. Can be `"models"` or `"explores"`. + - project (optional): The specific project ID to consider. + - model (optional): The specific model name to consider. Requires `project` if used without `explore`. + - explore (optional): The specific explore name to consider. Requires `model` if used. + - timeframe (optional): The lookback period in days to assess usage. Defaults to `90` days. + - min_queries (optional): The minimum number of queries for a resource to be considered active. Defaults to `1`. - Output: - A JSON array of objects, each representing a model or explore that is a candidate for deletion due to low usage. + Output: + A JSON array of objects, each representing a model or explore that is a candidate for deletion due to low usage. ``` | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-health-vacuum" | +| type | string | true | Must be "looker-health-vacuum" | | source | string | true | Looker source name | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-make-dashboard.md b/docs/en/resources/tools/looker/looker-make-dashboard.md index f8112bcd5d..3e7dc03dda 100644 --- a/docs/en/resources/tools/looker/looker-make-dashboard.md +++ b/docs/en/resources/tools/looker/looker-make-dashboard.md @@ -27,30 +27,30 @@ It's compatible with the following sources: ## Example ```yaml -tools: - make_dashboard: - kind: looker-make-dashboard - source: looker-source - description: | - This tool creates a new, empty dashboard in Looker. Dashboards are stored - in the user's personal folder, and the dashboard name must be unique. - After creation, use `add_dashboard_filter` to add filters and - `add_dashboard_element` to add content tiles. +kind: tools +name: make_dashboard +type: looker-make-dashboard +source: looker-source +description: | + This tool creates a new, empty dashboard in Looker. Dashboards are stored + in the user's personal folder, and the dashboard name must be unique. + After creation, use `add_dashboard_filter` to add filters and + `add_dashboard_element` to add content tiles. - Required Parameters: - - title (required): A unique title for the new dashboard. - - description (required): A brief description of the dashboard's purpose. + Required Parameters: + - title (required): A unique title for the new dashboard. + - description (required): A brief description of the dashboard's purpose. - Output: - A JSON object containing a link (`url`) to the newly created dashboard and - its unique `id`. This `dashboard_id` is crucial for subsequent calls to - `add_dashboard_filter` and `add_dashboard_element`. + Output: + A JSON object containing a link (`url`) to the newly created dashboard and + its unique `id`. This `dashboard_id` is crucial for subsequent calls to + `add_dashboard_filter` and `add_dashboard_element`. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-make-dashboard" | +| type | string | true | Must be "looker-make-dashboard" | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-make-look.md b/docs/en/resources/tools/looker/looker-make-look.md index 9c69898437..58e8b09e0d 100644 --- a/docs/en/resources/tools/looker/looker-make-look.md +++ b/docs/en/resources/tools/looker/looker-make-look.md @@ -36,35 +36,35 @@ It's compatible with the following sources: ## Example ```yaml -tools: - make_look: - kind: looker-make-look - source: looker-source - description: | - This tool creates a new Look (saved query with visualization) in Looker. - The Look will be saved in the user's personal folder, and its name must be unique. +kind: tools +name: make_look +type: looker-make-look +source: looker-source +description: | + This tool creates a new Look (saved query with visualization) in Looker. + The Look will be saved in the user's personal folder, and its name must be unique. - Required Parameters: - - title: A unique title for the new Look. - - description: A brief description of the Look's purpose. - - model_name: The name of the LookML model (from `get_models`). - - explore_name: The name of the explore (from `get_explores`). - - fields: A list of field names (dimensions, measures, filters, or parameters) to include in the query. + Required Parameters: + - title: A unique title for the new Look. + - description: A brief description of the Look's purpose. + - model_name: The name of the LookML model (from `get_models`). + - explore_name: The name of the explore (from `get_explores`). + - fields: A list of field names (dimensions, measures, filters, or parameters) to include in the query. - Optional Parameters: - - pivots, filters, sorts, limit, query_timezone: These parameters are identical - to those described for the `query` tool. - - vis_config: A JSON object defining the visualization settings for the Look. - The structure and options are the same as for the `query_url` tool's `vis_config`. + Optional Parameters: + - pivots, filters, sorts, limit, query_timezone: These parameters are identical + to those described for the `query` tool. + - vis_config: A JSON object defining the visualization settings for the Look. + The structure and options are the same as for the `query_url` tool's `vis_config`. - Output: - A JSON object containing a link (`url`) to the newly created Look, along with its `id` and `slug`. + Output: + A JSON object containing a link (`url`) to the newly created Look, along with its `id` and `slug`. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-make-look" | +| type | string | true | Must be "looker-make-look" | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-query-sql.md b/docs/en/resources/tools/looker/looker-query-sql.md index 064464ea5d..ca61df1007 100644 --- a/docs/en/resources/tools/looker/looker-query-sql.md +++ b/docs/en/resources/tools/looker/looker-query-sql.md @@ -36,28 +36,28 @@ to find MCP Toolbox queries. ## Example ```yaml -tools: - query_sql: - kind: looker-query-sql - source: looker-source - description: | - This tool generates the underlying SQL query that Looker would execute - against the database for a given set of parameters. It is useful for - understanding how Looker translates a request into SQL. +kind: tools +name: query_sql +type: looker-query-sql +source: looker-source +description: | + This tool generates the underlying SQL query that Looker would execute + against the database for a given set of parameters. It is useful for + understanding how Looker translates a request into SQL. - Parameters: - All parameters for this tool are identical to those of the `query` tool. - This includes `model_name`, `explore_name`, `fields` (required), - and optional parameters like `pivots`, `filters`, `sorts`, `limit`, and `query_timezone`. + Parameters: + All parameters for this tool are identical to those of the `query` tool. + This includes `model_name`, `explore_name`, `fields` (required), + and optional parameters like `pivots`, `filters`, `sorts`, `limit`, and `query_timezone`. - Output: - The result of this tool is the raw SQL text. + Output: + The result of this tool is the raw SQL text. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-query-sql" | +| type | string | true | Must be "looker-query-sql" | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-query-url.md b/docs/en/resources/tools/looker/looker-query-url.md index af1f138509..b883a9c5d1 100644 --- a/docs/en/resources/tools/looker/looker-query-url.md +++ b/docs/en/resources/tools/looker/looker-query-url.md @@ -32,465 +32,465 @@ It's compatible with the following sources: ## Example ```yaml -tools: - query_url: - kind: looker-query-url - source: looker-source - description: | - This tool generates a shareable URL for a Looker query, allowing users to - explore the query further within the Looker UI. It returns the generated URL, - along with the `query_id` and `slug`. +kind: tools +name: query_url +type: looker-query-url +source: looker-source +description: | + This tool generates a shareable URL for a Looker query, allowing users to + explore the query further within the Looker UI. It returns the generated URL, + along with the `query_id` and `slug`. - Parameters: - All query parameters (e.g., `model_name`, `explore_name`, `fields`, `pivots`, - `filters`, `sorts`, `limit`, `query_timezone`) are the same as the `query` tool. + Parameters: + All query parameters (e.g., `model_name`, `explore_name`, `fields`, `pivots`, + `filters`, `sorts`, `limit`, `query_timezone`) are the same as the `query` tool. - Additionally, it accepts an optional `vis_config` parameter: - - vis_config (optional): A JSON object that controls the default visualization - settings for the generated query. + Additionally, it accepts an optional `vis_config` parameter: + - vis_config (optional): A JSON object that controls the default visualization + settings for the generated query. - vis_config Details: - The `vis_config` object supports a wide range of properties for various chart types. - Here are some notes on making visualizations. + vis_config Details: + The `vis_config` object supports a wide range of properties for various chart types. + Here are some notes on making visualizations. - ### Cartesian Charts (Area, Bar, Column, Line, Scatter) + ### Cartesian Charts (Area, Bar, Column, Line, Scatter) - These chart types share a large number of configuration options. + These chart types share a large number of configuration options. - **General** - * `type`: The type of visualization (`looker_area`, `looker_bar`, `looker_column`, `looker_line`, `looker_scatter`). - * `series_types`: Override the chart type for individual series. - * `show_view_names`: Display view names in labels and tooltips (`true`/`false`). - * `series_labels`: Provide custom names for series. + **General** + * `type`: The type of visualization (`looker_area`, `looker_bar`, `looker_column`, `looker_line`, `looker_scatter`). + * `series_types`: Override the chart type for individual series. + * `show_view_names`: Display view names in labels and tooltips (`true`/`false`). + * `series_labels`: Provide custom names for series. - **Styling & Colors** - * `colors`: An array of color values to be used for the chart series. - * `series_colors`: A mapping of series names to specific color values. - * `color_application`: Advanced controls for color palette application (collection, palette, reverse, etc.). - * `font_size`: Font size for labels (e.g., '12px'). + **Styling & Colors** + * `colors`: An array of color values to be used for the chart series. + * `series_colors`: A mapping of series names to specific color values. + * `color_application`: Advanced controls for color palette application (collection, palette, reverse, etc.). + * `font_size`: Font size for labels (e.g., '12px'). - **Legend** - * `hide_legend`: Show or hide the chart legend (`true`/`false`). - * `legend_position`: Placement of the legend (`'center'`, `'left'`, `'right'`). + **Legend** + * `hide_legend`: Show or hide the chart legend (`true`/`false`). + * `legend_position`: Placement of the legend (`'center'`, `'left'`, `'right'`). - **Axes** - * `swap_axes`: Swap the X and Y axes (`true`/`false`). - * `x_axis_scale`: Scale of the x-axis (`'auto'`, `'ordinal'`, `'linear'`, `'time'`). - * `x_axis_reversed`, `y_axis_reversed`: Reverse the direction of an axis (`true`/`false`). - * `x_axis_gridlines`, `y_axis_gridlines`: Display gridlines for an axis (`true`/`false`). - * `show_x_axis_label`, `show_y_axis_label`: Show or hide the axis title (`true`/`false`). - * `show_x_axis_ticks`, `show_y_axis_ticks`: Show or hide axis tick marks (`true`/`false`). - * `x_axis_label`, `y_axis_label`: Set a custom title for an axis. - * `x_axis_datetime_label`: A format string for datetime labels on the x-axis (e.g., `'%Y-%m'`). - * `x_padding_left`, `x_padding_right`: Adjust padding on the ends of the x-axis. - * `x_axis_label_rotation`, `x_axis_label_rotation_bar`: Set rotation for x-axis labels. - * `x_axis_zoom`, `y_axis_zoom`: Enable zooming on an axis (`true`/`false`). - * `y_axes`: An array of configuration objects for multiple y-axes. + **Axes** + * `swap_axes`: Swap the X and Y axes (`true`/`false`). + * `x_axis_scale`: Scale of the x-axis (`'auto'`, `'ordinal'`, `'linear'`, `'time'`). + * `x_axis_reversed`, `y_axis_reversed`: Reverse the direction of an axis (`true`/`false`). + * `x_axis_gridlines`, `y_axis_gridlines`: Display gridlines for an axis (`true`/`false`). + * `show_x_axis_label`, `show_y_axis_label`: Show or hide the axis title (`true`/`false`). + * `show_x_axis_ticks`, `show_y_axis_ticks`: Show or hide axis tick marks (`true`/`false`). + * `x_axis_label`, `y_axis_label`: Set a custom title for an axis. + * `x_axis_datetime_label`: A format string for datetime labels on the x-axis (e.g., `'%Y-%m'`). + * `x_padding_left`, `x_padding_right`: Adjust padding on the ends of the x-axis. + * `x_axis_label_rotation`, `x_axis_label_rotation_bar`: Set rotation for x-axis labels. + * `x_axis_zoom`, `y_axis_zoom`: Enable zooming on an axis (`true`/`false`). + * `y_axes`: An array of configuration objects for multiple y-axes. - **Data & Series** - * `stacking`: How to stack series (`''` for none, `'normal'`, `'percent'`). - * `ordering`: Order of series in a stack (`'none'`, etc.). - * `limit_displayed_rows`: Enable or disable limiting the number of rows displayed (`true`/`false`). - * `limit_displayed_rows_values`: Configuration for the row limit (e.g., `{ "first_last": "first", "show_hide": "show", "num_rows": 10 }`). - * `discontinuous_nulls`: How to render null values in line charts (`true`/`false`). - * `point_style`: Style for points on line and area charts (`'none'`, `'circle'`, `'circle_outline'`). - * `series_point_styles`: Override point styles for individual series. - * `interpolation`: Line interpolation style (`'linear'`, `'monotone'`, `'step'`, etc.). - * `show_value_labels`: Display values on data points (`true`/`false`). - * `label_value_format`: A format string for value labels. - * `show_totals_labels`: Display total labels on stacked charts (`true`/`false`). - * `totals_color`: Color for total labels. - * `show_silhouette`: Display a "silhouette" of hidden series in stacked charts (`true`/`false`). - * `hidden_series`: An array of series names to hide from the visualization. + **Data & Series** + * `stacking`: How to stack series (`''` for none, `'normal'`, `'percent'`). + * `ordering`: Order of series in a stack (`'none'`, etc.). + * `limit_displayed_rows`: Enable or disable limiting the number of rows displayed (`true`/`false`). + * `limit_displayed_rows_values`: Configuration for the row limit (e.g., `{ "first_last": "first", "show_hide": "show", "num_rows": 10 }`). + * `discontinuous_nulls`: How to render null values in line charts (`true`/`false`). + * `point_style`: Style for points on line and area charts (`'none'`, `'circle'`, `'circle_outline'`). + * `series_point_styles`: Override point styles for individual series. + * `interpolation`: Line interpolation style (`'linear'`, `'monotone'`, `'step'`, etc.). + * `show_value_labels`: Display values on data points (`true`/`false`). + * `label_value_format`: A format string for value labels. + * `show_totals_labels`: Display total labels on stacked charts (`true`/`false`). + * `totals_color`: Color for total labels. + * `show_silhouette`: Display a "silhouette" of hidden series in stacked charts (`true`/`false`). + * `hidden_series`: An array of series names to hide from the visualization. - **Scatter/Bubble Specific** - * `size_by_field`: The field used to determine the size of bubbles. - * `color_by_field`: The field used to determine the color of bubbles. - * `plot_size_by_field`: Whether to display the size-by field in the legend. - * `cluster_points`: Group nearby points into clusters (`true`/`false`). - * `quadrants_enabled`: Display quadrants on the chart (`true`/`false`). - * `quadrant_properties`: Configuration for quadrant labels and colors. - * `custom_quadrant_value_x`, `custom_quadrant_value_y`: Set quadrant boundaries as a percentage. - * `custom_quadrant_point_x`, `custom_quadrant_point_y`: Set quadrant boundaries to a specific value. + **Scatter/Bubble Specific** + * `size_by_field`: The field used to determine the size of bubbles. + * `color_by_field`: The field used to determine the color of bubbles. + * `plot_size_by_field`: Whether to display the size-by field in the legend. + * `cluster_points`: Group nearby points into clusters (`true`/`false`). + * `quadrants_enabled`: Display quadrants on the chart (`true`/`false`). + * `quadrant_properties`: Configuration for quadrant labels and colors. + * `custom_quadrant_value_x`, `custom_quadrant_value_y`: Set quadrant boundaries as a percentage. + * `custom_quadrant_point_x`, `custom_quadrant_point_y`: Set quadrant boundaries to a specific value. - **Miscellaneous** - * `reference_lines`: Configuration for displaying reference lines. - * `trend_lines`: Configuration for displaying trend lines. - * `trellis`: Configuration for creating trellis (small multiple) charts. - * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering interactions. + **Miscellaneous** + * `reference_lines`: Configuration for displaying reference lines. + * `trend_lines`: Configuration for displaying trend lines. + * `trellis`: Configuration for creating trellis (small multiple) charts. + * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering interactions. - ### Boxplot + ### Boxplot - * Inherits most of the Cartesian chart options. - * `type`: Must be `looker_boxplot`. + * Inherits most of the Cartesian chart options. + * `type`: Must be `looker_boxplot`. - ### Funnel + ### Funnel - * `type`: Must be `looker_funnel`. - * `orientation`: How data is read (`'automatic'`, `'dataInRows'`, `'dataInColumns'`). - * `percentType`: How percentages are calculated (`'percentOfMaxValue'`, `'percentOfPriorRow'`). - * `labelPosition`, `valuePosition`, `percentPosition`: Placement of labels (`'left'`, `'right'`, `'inline'`, `'hidden'`). - * `labelColor`, `labelColorEnabled`: Set a custom color for labels. - * `labelOverlap`: Allow labels to overlap (`true`/`false`). - * `barColors`: An array of colors for the funnel steps. - * `color_application`: Advanced color palette controls. - * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering. + * `type`: Must be `looker_funnel`. + * `orientation`: How data is read (`'automatic'`, `'dataInRows'`, `'dataInColumns'`). + * `percentType`: How percentages are calculated (`'percentOfMaxValue'`, `'percentOfPriorRow'`). + * `labelPosition`, `valuePosition`, `percentPosition`: Placement of labels (`'left'`, `'right'`, `'inline'`, `'hidden'`). + * `labelColor`, `labelColorEnabled`: Set a custom color for labels. + * `labelOverlap`: Allow labels to overlap (`true`/`false`). + * `barColors`: An array of colors for the funnel steps. + * `color_application`: Advanced color palette controls. + * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering. - ### Pie / Donut + ### Pie / Donut - * `type`: Must be `looker_pie`. - * `value_labels`: Where to display values (`'legend'`, `'labels'`). - * `label_type`: The format of data labels (`'labPer'`, `'labVal'`, `'lab'`, `'val'`, `'per'`). - * `start_angle`, `end_angle`: The start and end angles of the pie chart. - * `inner_radius`: The inner radius, used to create a donut chart. - * `series_colors`, `series_labels`: Override colors and labels for specific slices. - * `color_application`: Advanced color palette controls. - * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering. - * `advanced_vis_config`: A string containing JSON for advanced Highcharts configuration. + * `type`: Must be `looker_pie`. + * `value_labels`: Where to display values (`'legend'`, `'labels'`). + * `label_type`: The format of data labels (`'labPer'`, `'labVal'`, `'lab'`, `'val'`, `'per'`). + * `start_angle`, `end_angle`: The start and end angles of the pie chart. + * `inner_radius`: The inner radius, used to create a donut chart. + * `series_colors`, `series_labels`: Override colors and labels for specific slices. + * `color_application`: Advanced color palette controls. + * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering. + * `advanced_vis_config`: A string containing JSON for advanced Highcharts configuration. - ### Waterfall + ### Waterfall - * Inherits most of the Cartesian chart options. - * `type`: Must be `looker_waterfall`. - * `up_color`: Color for positive (increasing) values. - * `down_color`: Color for negative (decreasing) values. - * `total_color`: Color for the total bar. + * Inherits most of the Cartesian chart options. + * `type`: Must be `looker_waterfall`. + * `up_color`: Color for positive (increasing) values. + * `down_color`: Color for negative (decreasing) values. + * `total_color`: Color for the total bar. - ### Word Cloud + ### Word Cloud - * `type`: Must be `looker_wordcloud`. - * `rotation`: Enable random word rotation (`true`/`false`). - * `colors`: An array of colors for the words. - * `color_application`: Advanced color palette controls. - * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering. + * `type`: Must be `looker_wordcloud`. + * `rotation`: Enable random word rotation (`true`/`false`). + * `colors`: An array of colors for the words. + * `color_application`: Advanced color palette controls. + * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering. - These are some sample vis_config settings. + These are some sample vis_config settings. - A bar chart - - {{ - "defaults_version": 1, - "label_density": 25, - "legend_position": "center", - "limit_displayed_rows": false, - "ordering": "none", - "plot_size_by_field": false, - "point_style": "none", - "show_null_labels": false, - "show_silhouette": false, - "show_totals_labels": false, - "show_value_labels": false, - "show_view_names": false, - "show_x_axis_label": true, - "show_x_axis_ticks": true, - "show_y_axis_labels": true, - "show_y_axis_ticks": true, - "stacking": "normal", - "totals_color": "#808080", - "trellis": "", - "type": "looker_bar", - "x_axis_gridlines": false, - "x_axis_reversed": false, - "x_axis_scale": "auto", - "x_axis_zoom": true, - "y_axis_combined": true, - "y_axis_gridlines": true, - "y_axis_reversed": false, - "y_axis_scale_mode": "linear", - "y_axis_tick_density": "default", - "y_axis_tick_density_custom": 5, - "y_axis_zoom": true - }} + A bar chart - + {{ + "defaults_version": 1, + "label_density": 25, + "legend_position": "center", + "limit_displayed_rows": false, + "ordering": "none", + "plot_size_by_field": false, + "point_style": "none", + "show_null_labels": false, + "show_silhouette": false, + "show_totals_labels": false, + "show_value_labels": false, + "show_view_names": false, + "show_x_axis_label": true, + "show_x_axis_ticks": true, + "show_y_axis_labels": true, + "show_y_axis_ticks": true, + "stacking": "normal", + "totals_color": "#808080", + "trellis": "", + "type": "looker_bar", + "x_axis_gridlines": false, + "x_axis_reversed": false, + "x_axis_scale": "auto", + "x_axis_zoom": true, + "y_axis_combined": true, + "y_axis_gridlines": true, + "y_axis_reversed": false, + "y_axis_scale_mode": "linear", + "y_axis_tick_density": "default", + "y_axis_tick_density_custom": 5, + "y_axis_zoom": true + }} - A column chart with an option advanced_vis_config - - {{ - "advanced_vis_config": "{ chart: { type: 'pie', spacingBottom: 50, spacingLeft: 50, spacingRight: 50, spacingTop: 50, }, legend: { enabled: false, }, plotOptions: { pie: { dataLabels: { enabled: true, format: '\u003cb\u003e{key}\u003c/b\u003e\u003cspan style=\"font-weight: normal\"\u003e - {percentage:.2f}%\u003c/span\u003e', }, showInLegend: false, }, }, series: [], }", - "colors": [ - "grey" - ], - "defaults_version": 1, - "hidden_fields": [], - "label_density": 25, - "legend_position": "center", - "limit_displayed_rows": false, - "note_display": "below", - "note_state": "collapsed", - "note_text": "Unsold inventory only", - "ordering": "none", - "plot_size_by_field": false, - "point_style": "none", - "series_colors": {}, - "show_null_labels": false, - "show_silhouette": false, - "show_totals_labels": false, - "show_value_labels": true, - "show_view_names": false, - "show_x_axis_label": true, - "show_x_axis_ticks": true, - "show_y_axis_labels": true, - "show_y_axis_ticks": true, - "stacking": "normal", - "totals_color": "#808080", - "trellis": "", - "type": "looker_column", - "x_axis_gridlines": false, - "x_axis_reversed": false, - "x_axis_scale": "auto", - "x_axis_zoom": true, - "y_axes": [], - "y_axis_combined": true, - "y_axis_gridlines": true, - "y_axis_reversed": false, - "y_axis_scale_mode": "linear", - "y_axis_tick_density": "default", - "y_axis_tick_density_custom": 5, - "y_axis_zoom": true - }} + A column chart with an option advanced_vis_config - + {{ + "advanced_vis_config": "{ chart: { type: 'pie', spacingBottom: 50, spacingLeft: 50, spacingRight: 50, spacingTop: 50, }, legend: { enabled: false, }, plotOptions: { pie: { dataLabels: { enabled: true, format: '\u003cb\u003e{key}\u003c/b\u003e\u003cspan style=\"font-weight: normal\"\u003e - {percentage:.2f}%\u003c/span\u003e', }, showInLegend: false, }, }, series: [], }", + "colors": [ + "grey" + ], + "defaults_version": 1, + "hidden_fields": [], + "label_density": 25, + "legend_position": "center", + "limit_displayed_rows": false, + "note_display": "below", + "note_state": "collapsed", + "note_text": "Unsold inventory only", + "ordering": "none", + "plot_size_by_field": false, + "point_style": "none", + "series_colors": {}, + "show_null_labels": false, + "show_silhouette": false, + "show_totals_labels": false, + "show_value_labels": true, + "show_view_names": false, + "show_x_axis_label": true, + "show_x_axis_ticks": true, + "show_y_axis_labels": true, + "show_y_axis_ticks": true, + "stacking": "normal", + "totals_color": "#808080", + "trellis": "", + "type": "looker_column", + "x_axis_gridlines": false, + "x_axis_reversed": false, + "x_axis_scale": "auto", + "x_axis_zoom": true, + "y_axes": [], + "y_axis_combined": true, + "y_axis_gridlines": true, + "y_axis_reversed": false, + "y_axis_scale_mode": "linear", + "y_axis_tick_density": "default", + "y_axis_tick_density_custom": 5, + "y_axis_zoom": true + }} - A line chart - - {{ - "defaults_version": 1, - "hidden_pivots": {}, - "hidden_series": [], - "interpolation": "linear", - "label_density": 25, - "legend_position": "center", - "limit_displayed_rows": false, - "plot_size_by_field": false, - "point_style": "none", - "series_types": {}, - "show_null_points": true, - "show_value_labels": false, - "show_view_names": false, - "show_x_axis_label": true, - "show_x_axis_ticks": true, - "show_y_axis_labels": true, - "show_y_axis_ticks": true, - "stacking": "", - "trellis": "", - "type": "looker_line", - "x_axis_gridlines": false, - "x_axis_reversed": false, - "x_axis_scale": "auto", - "y_axis_combined": true, - "y_axis_gridlines": true, - "y_axis_reversed": false, - "y_axis_scale_mode": "linear", - "y_axis_tick_density": "default", - "y_axis_tick_density_custom": 5 - }} + A line chart - + {{ + "defaults_version": 1, + "hidden_pivots": {}, + "hidden_series": [], + "interpolation": "linear", + "label_density": 25, + "legend_position": "center", + "limit_displayed_rows": false, + "plot_size_by_field": false, + "point_style": "none", + "series_types": {}, + "show_null_points": true, + "show_value_labels": false, + "show_view_names": false, + "show_x_axis_label": true, + "show_x_axis_ticks": true, + "show_y_axis_labels": true, + "show_y_axis_ticks": true, + "stacking": "", + "trellis": "", + "type": "looker_line", + "x_axis_gridlines": false, + "x_axis_reversed": false, + "x_axis_scale": "auto", + "y_axis_combined": true, + "y_axis_gridlines": true, + "y_axis_reversed": false, + "y_axis_scale_mode": "linear", + "y_axis_tick_density": "default", + "y_axis_tick_density_custom": 5 + }} - An area chart - - {{ - "defaults_version": 1, - "interpolation": "linear", - "label_density": 25, - "legend_position": "center", - "limit_displayed_rows": false, - "plot_size_by_field": false, - "point_style": "none", - "series_types": {}, - "show_null_points": true, - "show_silhouette": false, - "show_totals_labels": false, - "show_value_labels": false, - "show_view_names": false, - "show_x_axis_label": true, - "show_x_axis_ticks": true, - "show_y_axis_labels": true, - "show_y_axis_ticks": true, - "stacking": "normal", - "totals_color": "#808080", - "trellis": "", - "type": "looker_area", - "x_axis_gridlines": false, - "x_axis_reversed": false, - "x_axis_scale": "auto", - "x_axis_zoom": true, - "y_axis_combined": true, - "y_axis_gridlines": true, - "y_axis_reversed": false, - "y_axis_scale_mode": "linear", - "y_axis_tick_density": "default", - "y_axis_tick_density_custom": 5, - "y_axis_zoom": true - }} + An area chart - + {{ + "defaults_version": 1, + "interpolation": "linear", + "label_density": 25, + "legend_position": "center", + "limit_displayed_rows": false, + "plot_size_by_field": false, + "point_style": "none", + "series_types": {}, + "show_null_points": true, + "show_silhouette": false, + "show_totals_labels": false, + "show_value_labels": false, + "show_view_names": false, + "show_x_axis_label": true, + "show_x_axis_ticks": true, + "show_y_axis_labels": true, + "show_y_axis_ticks": true, + "stacking": "normal", + "totals_color": "#808080", + "trellis": "", + "type": "looker_area", + "x_axis_gridlines": false, + "x_axis_reversed": false, + "x_axis_scale": "auto", + "x_axis_zoom": true, + "y_axis_combined": true, + "y_axis_gridlines": true, + "y_axis_reversed": false, + "y_axis_scale_mode": "linear", + "y_axis_tick_density": "default", + "y_axis_tick_density_custom": 5, + "y_axis_zoom": true + }} - A scatter plot - - {{ - "cluster_points": false, - "custom_quadrant_point_x": 5, - "custom_quadrant_point_y": 5, - "custom_value_label_column": "", - "custom_x_column": "", - "custom_y_column": "", - "defaults_version": 1, - "hidden_fields": [], - "hidden_pivots": {}, - "hidden_points_if_no": [], - "hidden_series": [], - "interpolation": "linear", - "label_density": 25, - "legend_position": "center", - "limit_displayed_rows": false, - "limit_displayed_rows_values": { - "first_last": "first", - "num_rows": 0, - "show_hide": "hide" - }, - "plot_size_by_field": false, - "point_style": "circle", - "quadrant_properties": { - "0": { - "color": "", - "label": "Quadrant 1" - }, - "1": { - "color": "", - "label": "Quadrant 2" - }, - "2": { - "color": "", - "label": "Quadrant 3" - }, - "3": { - "color": "", - "label": "Quadrant 4" - } - }, - "quadrants_enabled": false, - "series_labels": {}, - "series_types": {}, - "show_null_points": false, - "show_value_labels": false, - "show_view_names": true, - "show_x_axis_label": true, - "show_x_axis_ticks": true, - "show_y_axis_labels": true, - "show_y_axis_ticks": true, - "size_by_field": "roi", - "stacking": "normal", - "swap_axes": true, - "trellis": "", - "type": "looker_scatter", - "x_axis_gridlines": false, - "x_axis_reversed": false, - "x_axis_scale": "auto", - "x_axis_zoom": true, - "y_axes": [ - { - "label": "", - "orientation": "bottom", - "series": [ - { - "axisId": "Channel_0 - average_of_roi_first", - "id": "Channel_0 - average_of_roi_first", - "name": "Channel_0" - }, - { - "axisId": "Channel_1 - average_of_roi_first", - "id": "Channel_1 - average_of_roi_first", - "name": "Channel_1" - }, - { - "axisId": "Channel_2 - average_of_roi_first", - "id": "Channel_2 - average_of_roi_first", - "name": "Channel_2" - }, - { - "axisId": "Channel_3 - average_of_roi_first", - "id": "Channel_3 - average_of_roi_first", - "name": "Channel_3" - }, - { - "axisId": "Channel_4 - average_of_roi_first", - "id": "Channel_4 - average_of_roi_first", - "name": "Channel_4" - } - ], - "showLabels": true, - "showValues": true, - "tickDensity": "custom", - "tickDensityCustom": 100, - "type": "linear", - "unpinAxis": false - } - ], - "y_axis_combined": true, - "y_axis_gridlines": true, - "y_axis_reversed": false, - "y_axis_scale_mode": "linear", - "y_axis_tick_density": "default", - "y_axis_tick_density_custom": 5, - "y_axis_zoom": true - }} + A scatter plot - + {{ + "cluster_points": false, + "custom_quadrant_point_x": 5, + "custom_quadrant_point_y": 5, + "custom_value_label_column": "", + "custom_x_column": "", + "custom_y_column": "", + "defaults_version": 1, + "hidden_fields": [], + "hidden_pivots": {}, + "hidden_points_if_no": [], + "hidden_series": [], + "interpolation": "linear", + "label_density": 25, + "legend_position": "center", + "limit_displayed_rows": false, + "limit_displayed_rows_values": { + "first_last": "first", + "num_rows": 0, + "show_hide": "hide" + }, + "plot_size_by_field": false, + "point_style": "circle", + "quadrant_properties": { + "0": { + "color": "", + "label": "Quadrant 1" + }, + "1": { + "color": "", + "label": "Quadrant 2" + }, + "2": { + "color": "", + "label": "Quadrant 3" + }, + "3": { + "color": "", + "label": "Quadrant 4" + } + }, + "quadrants_enabled": false, + "series_labels": {}, + "series_types": {}, + "show_null_points": false, + "show_value_labels": false, + "show_view_names": true, + "show_x_axis_label": true, + "show_x_axis_ticks": true, + "show_y_axis_labels": true, + "show_y_axis_ticks": true, + "size_by_field": "roi", + "stacking": "normal", + "swap_axes": true, + "trellis": "", + "type": "looker_scatter", + "x_axis_gridlines": false, + "x_axis_reversed": false, + "x_axis_scale": "auto", + "x_axis_zoom": true, + "y_axes": [ + { + "label": "", + "orientation": "bottom", + "series": [ + { + "axisId": "Channel_0 - average_of_roi_first", + "id": "Channel_0 - average_of_roi_first", + "name": "Channel_0" + }, + { + "axisId": "Channel_1 - average_of_roi_first", + "id": "Channel_1 - average_of_roi_first", + "name": "Channel_1" + }, + { + "axisId": "Channel_2 - average_of_roi_first", + "id": "Channel_2 - average_of_roi_first", + "name": "Channel_2" + }, + { + "axisId": "Channel_3 - average_of_roi_first", + "id": "Channel_3 - average_of_roi_first", + "name": "Channel_3" + }, + { + "axisId": "Channel_4 - average_of_roi_first", + "id": "Channel_4 - average_of_roi_first", + "name": "Channel_4" + } + ], + "showLabels": true, + "showValues": true, + "tickDensity": "custom", + "tickDensityCustom": 100, + "type": "linear", + "unpinAxis": false + } + ], + "y_axis_combined": true, + "y_axis_gridlines": true, + "y_axis_reversed": false, + "y_axis_scale_mode": "linear", + "y_axis_tick_density": "default", + "y_axis_tick_density_custom": 5, + "y_axis_zoom": true + }} - A single record visualization - - {{ - "defaults_version": 1, - "show_view_names": false, - "type": "looker_single_record" - }} + A single record visualization - + {{ + "defaults_version": 1, + "show_view_names": false, + "type": "looker_single_record" + }} - A single value visualization - - {{ - "comparison_reverse_colors": false, - "comparison_type": "value", "conditional_formatting_include_nulls": false, "conditional_formatting_include_totals": false, - "custom_color": "#1A73E8", - "custom_color_enabled": true, - "defaults_version": 1, - "enable_conditional_formatting": false, - "series_types": {}, - "show_comparison": false, - "show_comparison_label": true, - "show_single_value_title": true, - "single_value_title": "Total Clicks", - "type": "single_value" - }} + A single value visualization - + {{ + "comparison_reverse_colors": false, + "comparison_type": "value", "conditional_formatting_include_nulls": false, "conditional_formatting_include_totals": false, + "custom_color": "#1A73E8", + "custom_color_enabled": true, + "defaults_version": 1, + "enable_conditional_formatting": false, + "series_types": {}, + "show_comparison": false, + "show_comparison_label": true, + "show_single_value_title": true, + "single_value_title": "Total Clicks", + "type": "single_value" + }} - A Pie chart - - {{ - "defaults_version": 1, - "label_density": 25, - "label_type": "labPer", - "legend_position": "center", - "limit_displayed_rows": false, - "ordering": "none", - "plot_size_by_field": false, - "point_style": "none", - "series_types": {}, - "show_null_labels": false, - "show_silhouette": false, - "show_totals_labels": false, - "show_value_labels": false, - "show_view_names": false, - "show_x_axis_label": true, - "show_x_axis_ticks": true, - "show_y_axis_labels": true, - "show_y_axis_ticks": true, - "stacking": "", - "totals_color": "#808080", - "trellis": "", - "type": "looker_pie", - "value_labels": "legend", - "x_axis_gridlines": false, - "x_axis_reversed": false, - "x_axis_scale": "auto", - "y_axis_combined": true, - "y_axis_gridlines": true, - "y_axis_reversed": false, - "y_axis_scale_mode": "linear", - "y_axis_tick_density": "default", - "y_axis_tick_density_custom": 5 - }} + A Pie chart - + {{ + "defaults_version": 1, + "label_density": 25, + "label_type": "labPer", + "legend_position": "center", + "limit_displayed_rows": false, + "ordering": "none", + "plot_size_by_field": false, + "point_style": "none", + "series_types": {}, + "show_null_labels": false, + "show_silhouette": false, + "show_totals_labels": false, + "show_value_labels": false, + "show_view_names": false, + "show_x_axis_label": true, + "show_x_axis_ticks": true, + "show_y_axis_labels": true, + "show_y_axis_ticks": true, + "stacking": "", + "totals_color": "#808080", + "trellis": "", + "type": "looker_pie", + "value_labels": "legend", + "x_axis_gridlines": false, + "x_axis_reversed": false, + "x_axis_scale": "auto", + "y_axis_combined": true, + "y_axis_gridlines": true, + "y_axis_reversed": false, + "y_axis_scale_mode": "linear", + "y_axis_tick_density": "default", + "y_axis_tick_density_custom": 5 + }} - The result is a JSON object with the id, slug, the url, and - the long_url. + The result is a JSON object with the id, slug, the url, and + the long_url. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-query-url" | +| type | string | true | Must be "looker-query-url" | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-query.md b/docs/en/resources/tools/looker/looker-query.md index 7ba3292763..6fe10c0a2e 100644 --- a/docs/en/resources/tools/looker/looker-query.md +++ b/docs/en/resources/tools/looker/looker-query.md @@ -36,37 +36,37 @@ to find MCP Toolbox queries. ## Example ```yaml -tools: - query: - kind: looker-query - source: looker-source - description: | - This tool runs a query against a LookML model and returns the results in JSON format. +kind: tools +name: query +type: looker-query +source: looker-source +description: | + This tool runs a query against a LookML model and returns the results in JSON format. - Required Parameters: - - model_name: The name of the LookML model (from `get_models`). - - explore_name: The name of the explore (from `get_explores`). - - fields: A list of field names (dimensions, measures, filters, or parameters) to include in the query. + Required Parameters: + - model_name: The name of the LookML model (from `get_models`). + - explore_name: The name of the explore (from `get_explores`). + - fields: A list of field names (dimensions, measures, filters, or parameters) to include in the query. - Optional Parameters: - - pivots: A list of fields to pivot the results by. These fields must also be included in the `fields` list. - - filters: A map of filter expressions, e.g., `{"view.field": "value", "view.date": "7 days"}`. - - Do not quote field names. - - Use `not null` instead of `-NULL`. - - If a value contains a comma, enclose it in single quotes (e.g., "'New York, NY'"). - - sorts: A list of fields to sort by, optionally including direction (e.g., `["view.field desc"]`). - - limit: Row limit (default 500). Use "-1" for unlimited. - - query_timezone: specific timezone for the query (e.g. `America/Los_Angeles`). + Optional Parameters: + - pivots: A list of fields to pivot the results by. These fields must also be included in the `fields` list. + - filters: A map of filter expressions, e.g., `{"view.field": "value", "view.date": "7 days"}`. + - Do not quote field names. + - Use `not null` instead of `-NULL`. + - If a value contains a comma, enclose it in single quotes (e.g., "'New York, NY'"). + - sorts: A list of fields to sort by, optionally including direction (e.g., `["view.field desc"]`). + - limit: Row limit (default 500). Use "-1" for unlimited. + - query_timezone: specific timezone for the query (e.g. `America/Los_Angeles`). - Note: Use `get_dimensions`, `get_measures`, `get_filters`, and `get_parameters` to find valid fields. + Note: Use `get_dimensions`, `get_measures`, `get_filters`, and `get_parameters` to find valid fields. - The result of the query tool is JSON + The result of the query tool is JSON ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-query" | +| type | string | true | Must be "looker-query" | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-run-dashboard.md b/docs/en/resources/tools/looker/looker-run-dashboard.md index cc2c2072df..2682977b49 100644 --- a/docs/en/resources/tools/looker/looker-run-dashboard.md +++ b/docs/en/resources/tools/looker/looker-run-dashboard.md @@ -22,26 +22,26 @@ It's compatible with the following sources: ## Example ```yaml -tools: - run_dashboard: - kind: looker-run-dashboard - source: looker-source - description: | - This tool executes the queries associated with each tile in a specified dashboard - and returns the aggregated data in a JSON structure. +kind: tools +name: run_dashboard +type: looker-run-dashboard +source: looker-source +description: | + This tool executes the queries associated with each tile in a specified dashboard + and returns the aggregated data in a JSON structure. - Parameters: - - dashboard_id (required): The unique identifier of the dashboard to run, - typically obtained from the `get_dashboards` tool. + Parameters: + - dashboard_id (required): The unique identifier of the dashboard to run, + typically obtained from the `get_dashboards` tool. - Output: - The data from all dashboard tiles is returned as a JSON object. + Output: + The data from all dashboard tiles is returned as a JSON object. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-run-dashboard" | +| type | string | true | Must be "looker-run-dashboard" | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-run-look.md b/docs/en/resources/tools/looker/looker-run-look.md index eb2f57eedb..f2a2989323 100644 --- a/docs/en/resources/tools/looker/looker-run-look.md +++ b/docs/en/resources/tools/looker/looker-run-look.md @@ -22,26 +22,26 @@ It's compatible with the following sources: ## Example ```yaml -tools: - run_look: - kind: looker-run-look - source: looker-source - description: | - This tool executes the query associated with a saved Look and - returns the resulting data in a JSON structure. +kind: tools +name: run_look +type: looker-run-look +source: looker-source +description: | + This tool executes the query associated with a saved Look and + returns the resulting data in a JSON structure. - Parameters: - - look_id (required): The unique identifier of the Look to run, - typically obtained from the `get_looks` tool. + Parameters: + - look_id (required): The unique identifier of the Look to run, + typically obtained from the `get_looks` tool. - Output: - The query results are returned as a JSON object. + Output: + The query results are returned as a JSON object. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-run-look" | +| type | string | true | Must be "looker-run-look" | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-update-project-file.md b/docs/en/resources/tools/looker/looker-update-project-file.md index af8cabd81b..de4b092f1c 100644 --- a/docs/en/resources/tools/looker/looker-update-project-file.md +++ b/docs/en/resources/tools/looker/looker-update-project-file.md @@ -22,28 +22,28 @@ as well as the new file content. ## Example ```yaml -tools: - update_project_file: - kind: looker-update-project-file - source: looker-source - description: | - This tool modifies the content of an existing LookML file within a specified project. +kind: tools +name: update_project_file +type: looker-update-project-file +source: looker-source +description: | + This tool modifies the content of an existing LookML file within a specified project. - Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. + Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. - Parameters: - - project_id (required): The unique ID of the LookML project. - - file_path (required): The exact path to the LookML file to modify within the project. - - content (required): The new, complete LookML content to overwrite the existing file. + Parameters: + - project_id (required): The unique ID of the LookML project. + - file_path (required): The exact path to the LookML file to modify within the project. + - content (required): The new, complete LookML content to overwrite the existing file. - Output: - A confirmation message upon successful file modification. + Output: + A confirmation message upon successful file modification. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "looker-update-project-file". | +| type | string | true | Must be "looker-update-project-file". | | source | string | true | Name of the source Looker instance. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/looker/looker-validate-project.md b/docs/en/resources/tools/looker/looker-validate-project.md new file mode 100644 index 0000000000..956588b11d --- /dev/null +++ b/docs/en/resources/tools/looker/looker-validate-project.md @@ -0,0 +1,47 @@ +--- +title: "looker-validate-project" +type: docs +weight: 1 +description: > + A "looker-validate-project" tool checks the syntax of a LookML project and reports any errors +aliases: +- /resources/tools/looker-validate-project +--- + +## About + +A "looker-validate-project" tool checks the syntax of a LookML project and reports any errors + +It's compatible with the following sources: + +- [looker](../../sources/looker.md) + +`looker-validate-project` accepts a project_id parameter. + +## Example + +```yaml +tools: + validate_project: + kind: looker-validate-project + source: looker-source + description: | + This tool checks a LookML project for syntax errors. + + Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. + + Parameters: + - project_id (required): The unique ID of the LookML project. + + Output: + A list of error details including the file path and line number, and also a list of models + that are not currently valid due to LookML errors. +``` + +## Reference + +| **field** | **type** | **required** | **description** | +|-------------|:--------:|:------------:|----------------------------------------------------| +| kind | string | true | Must be "looker-validate-project". | +| source | string | true | Name of the source Looker instance. | +| description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/mindsdb/_index.md b/docs/en/resources/tools/mindsdb/_index.md index 1a2f2cdff1..89d7828adb 100644 --- a/docs/en/resources/tools/mindsdb/_index.md +++ b/docs/en/resources/tools/mindsdb/_index.md @@ -117,20 +117,20 @@ federated database capabilities. Here's a complete working configuration that has been tested: ```yaml -sources: - my-pg-source: - kind: mindsdb - host: 127.0.0.1 - port: 47335 - database: files - user: mindsdb - -tools: - mindsdb-execute-sql: - kind: mindsdb-execute-sql - source: my-pg-source - description: | - Execute SQL queries directly on MindsDB database. - Use this tool to run any SQL statement against your MindsDB instance. - Example: SELECT * FROM my_table LIMIT 10 +kind: sources +name: my-pg-source +type: mindsdb +host: 127.0.0.1 +port: 47335 +database: files +user: mindsdb +--- +kind: tools +name: mindsdb-execute-sql +type: mindsdb-execute-sql +source: my-pg-source +description: | + Execute SQL queries directly on MindsDB database. + Use this tool to run any SQL statement against your MindsDB instance. + Example: SELECT * FROM my_table LIMIT 10 ``` diff --git a/docs/en/resources/tools/mindsdb/mindsdb-execute-sql.md b/docs/en/resources/tools/mindsdb/mindsdb-execute-sql.md index 1228caf837..7b6da914d5 100644 --- a/docs/en/resources/tools/mindsdb/mindsdb-execute-sql.md +++ b/docs/en/resources/tools/mindsdb/mindsdb-execute-sql.md @@ -97,11 +97,11 @@ ORDER BY created_at DESC; ## Example ```yaml -tools: - execute_sql_tool: - kind: mindsdb-execute-sql - source: my-mindsdb-instance - description: Use this tool to execute SQL statements across multiple datasources and ML models. +kind: tools +name: execute_sql_tool +type: mindsdb-execute-sql +source: my-mindsdb-instance +description: Use this tool to execute SQL statements across multiple datasources and ML models. ``` ### Working Configuration Example @@ -109,28 +109,28 @@ tools: Here's a working configuration that has been tested: ```yaml -sources: - my-pg-source: - kind: mindsdb - host: 127.0.0.1 - port: 47335 - database: files - user: mindsdb - -tools: - mindsdb-execute-sql: - kind: mindsdb-execute-sql - source: my-pg-source - description: | - Execute SQL queries directly on MindsDB database. - Use this tool to run any SQL statement against your MindsDB instance. - Example: SELECT * FROM my_table LIMIT 10 +kind: sources +name: my-pg-source +type: mindsdb +host: 127.0.0.1 +port: 47335 +database: files +user: mindsdb +--- +kind: tools +name: mindsdb-execute-sql +type: mindsdb-execute-sql +source: my-pg-source +description: | + Execute SQL queries directly on MindsDB database. + Use this tool to run any SQL statement against your MindsDB instance. + Example: SELECT * FROM my_table LIMIT 10 ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "mindsdb-execute-sql". | +| type | string | true | Must be "mindsdb-execute-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/mindsdb/mindsdb-sql.md b/docs/en/resources/tools/mindsdb/mindsdb-sql.md index b0cfc189dc..44bea0d27c 100644 --- a/docs/en/resources/tools/mindsdb/mindsdb-sql.md +++ b/docs/en/resources/tools/mindsdb/mindsdb-sql.md @@ -97,41 +97,41 @@ ORDER BY created_at DESC; > names, or other parts of the query. ```yaml -tools: - search_flights_by_number: - kind: mindsdb-sql - source: my-mindsdb-instance - statement: | - SELECT * FROM flights - WHERE airline = ? - AND flight_number = ? - LIMIT 10 - description: | - Use this tool to get information for a specific flight. - Takes an airline code and flight number and returns info on the flight. - Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. - A airline code is a code for an airline service consisting of two-character - airline designator and followed by flight number, which is 1 to 4 digit number. - For example, if given CY 0123, the airline is "CY", and flight_number is "123". - Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". - If the tool returns more than one option choose the date closes to today. - Example: - {{ - "airline": "CY", - "flight_number": "888", - }} - Example: - {{ - "airline": "DL", - "flight_number": "1234", - }} - parameters: - - name: airline - type: string - description: Airline unique 2 letter identifier - - name: flight_number - type: string - description: 1 to 4 digit number +kind: tools +name: search_flights_by_number +type: mindsdb-sql +source: my-mindsdb-instance +statement: | + SELECT * FROM flights + WHERE airline = ? + AND flight_number = ? + LIMIT 10 +description: | + Use this tool to get information for a specific flight. + Takes an airline code and flight number and returns info on the flight. + Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. + A airline code is a code for an airline service consisting of two-character + airline designator and followed by flight number, which is 1 to 4 digit number. + For example, if given CY 0123, the airline is "CY", and flight_number is "123". + Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". + If the tool returns more than one option choose the date closes to today. + Example: + {{ + "airline": "CY", + "flight_number": "888", + }} + Example: + {{ + "airline": "DL", + "flight_number": "1234", + }} +parameters: + - name: airline + type: string + description: Airline unique 2 letter identifier + - name: flight_number + type: string + description: 1 to 4 digit number ``` ### Example with Template Parameters @@ -143,29 +143,29 @@ tools: > [templateParameters](../#template-parameters). ```yaml -tools: - list_table: - kind: mindsdb-sql - source: my-mindsdb-instance - statement: | - SELECT * FROM {{.tableName}}; - description: | - Use this tool to list all information from a specific table. - Example: - {{ - "tableName": "flights", - }} - templateParameters: - - name: tableName - type: string - description: Table to select from +kind: tools +name: list_table +type: mindsdb-sql +source: my-mindsdb-instance +statement: | + SELECT * FROM {{.tableName}}; +description: | + Use this tool to list all information from a specific table. + Example: + {{ + "tableName": "flights", + }} +templateParameters: + - name: tableName + type: string + description: Table to select from ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:------------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "mindsdb-sql". | +| type | string | true | Must be "mindsdb-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | SQL statement to execute on. | diff --git a/docs/en/resources/tools/mongodb/mongodb-aggregate.md b/docs/en/resources/tools/mongodb/mongodb-aggregate.md index e37217ec77..43707e2a4e 100644 --- a/docs/en/resources/tools/mongodb/mongodb-aggregate.md +++ b/docs/en/resources/tools/mongodb/mongodb-aggregate.md @@ -22,7 +22,7 @@ array of documents produced by the final stage of the pipeline. A `readOnly` flag can be set to `true` as a safety measure to ensure the pipeline does not contain any write stages (like `$out` or `$merge`). -This tool is compatible with the following source kind: +This tool is compatible with the following source type: * [`mongodb`](../../sources/mongodb.md) @@ -32,45 +32,45 @@ Here is an example that calculates the average price and total count of products for each category, but only for products with an "active" status. ```yaml -tools: - get_category_stats: - kind: mongodb-aggregate - source: my-mongo-source - description: Calculates average price and count of products, grouped by category. - database: ecommerce - collection: products - readOnly: true - pipelinePayload: | - [ - { - "$match": { - "status": {{json .status_filter}} - } - }, - { - "$group": { - "_id": "$category", - "average_price": { "$avg": "$price" }, - "item_count": { "$sum": 1 } - } - }, - { - "$sort": { - "average_price": -1 - } - } - ] - pipelineParams: - - name: status_filter - type: string - description: The product status to filter by (e.g., "active"). +kind: tools +name: get_category_stats +type: mongodb-aggregate +source: my-mongo-source +description: Calculates average price and count of products, grouped by category. +database: ecommerce +collection: products +readOnly: true +pipelinePayload: | + [ + { + "$match": { + "status": {{json .status_filter}} + } + }, + { + "$group": { + "_id": "$category", + "average_price": { "$avg": "$price" }, + "item_count": { "$sum": 1 } + } + }, + { + "$sort": { + "average_price": -1 + } + } + ] +pipelineParams: + - name: status_filter + type: string + description: The product status to filter by (e.g., "active"). ``` ## Reference | **field** | **type** | **required** | **description** | |:----------------|:---------|:-------------|:---------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be `mongodb-aggregate`. | +| type | string | true | Must be `mongodb-aggregate`. | | source | string | true | The name of the `mongodb` source to use. | | description | string | true | A description of the tool that is passed to the LLM. | | database | string | true | The name of the MongoDB database containing the collection. | diff --git a/docs/en/resources/tools/mongodb/mongodb-delete-many.md b/docs/en/resources/tools/mongodb/mongodb-delete-many.md index 9e35a72124..b6ffcacc25 100644 --- a/docs/en/resources/tools/mongodb/mongodb-delete-many.md +++ b/docs/en/resources/tools/mongodb/mongodb-delete-many.md @@ -17,7 +17,7 @@ The tool returns the total count of documents that were deleted. If the filter does not match any documents (i.e., the deleted count is 0), the tool will return an error. -This tool is compatible with the following source kind: +This tool is compatible with the following source type: * [`mongodb`](../../sources/mongodb.md) @@ -29,26 +29,26 @@ Here is an example that performs a cleanup task by deleting all products from the `inventory` collection that belong to a discontinued brand. ```yaml -tools: - retire_brand_products: - kind: mongodb-delete-many - source: my-mongo-source - description: Deletes all products from a specified discontinued brand. - database: ecommerce - collection: inventory - filterPayload: | - { "brand_name": {{json .brand_to_delete}} } - filterParams: - - name: brand_to_delete - type: string - description: The name of the discontinued brand whose products should be deleted. +kind: tools +name: retire_brand_products +type: mongodb-delete-many +source: my-mongo-source +description: Deletes all products from a specified discontinued brand. +database: ecommerce +collection: inventory +filterPayload: | + { "brand_name": {{json .brand_to_delete}} } +filterParams: + - name: brand_to_delete + type: string + description: The name of the discontinued brand whose products should be deleted. ``` ## Reference | **field** | **type** | **required** | **description** | |:--------------|:---------|:-------------|:--------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be `mongodb-delete-many`. | +| type | string | true | Must be `mongodb-delete-many`. | | source | string | true | The name of the `mongodb` source to use. | | description | string | true | A description of the tool that is passed to the LLM. | | database | string | true | The name of the MongoDB database containing the collection. | diff --git a/docs/en/resources/tools/mongodb/mongodb-delete-one.md b/docs/en/resources/tools/mongodb/mongodb-delete-one.md index 0cede56c13..3d9dc8dfa9 100644 --- a/docs/en/resources/tools/mongodb/mongodb-delete-one.md +++ b/docs/en/resources/tools/mongodb/mongodb-delete-one.md @@ -21,7 +21,7 @@ such as a user account or a single item from an inventory based on a unique ID. The tool returns the number of documents deleted, which will be either `1` if a document was found and deleted, or `0` if no matching document was found. -This tool is compatible with the following source kind: +This tool is compatible with the following source type: * [`mongodb`](../../sources/mongodb.md) @@ -33,26 +33,26 @@ Here is an example that deletes a specific user account from the `users` collection by matching their unique email address. This is a permanent action. ```yaml -tools: - delete_user_account: - kind: mongodb-delete-one - source: my-mongo-source - description: Permanently deletes a user account by their email address. - database: user_data - collection: users - filterPayload: | - { "email": {{json .email_address}} } - filterParams: - - name: email_address - type: string - description: The email of the user account to delete. +kind: tools +name: delete_user_account +type: mongodb-delete-one +source: my-mongo-source +description: Permanently deletes a user account by their email address. +database: user_data +collection: users +filterPayload: | + { "email": {{json .email_address}} } +filterParams: + - name: email_address + type: string + description: The email of the user account to delete. ``` ## Reference | **field** | **type** | **required** | **description** | |:--------------|:---------|:-------------|:-------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be `mongodb-delete-one`. | +| type | string | true | Must be `mongodb-delete-one`. | | source | string | true | The name of the `mongodb` source to use. | | description | string | true | A description of the tool that is passed to the LLM. | | database | string | true | The name of the MongoDB database containing the collection. | diff --git a/docs/en/resources/tools/mongodb/mongodb-find-one.md b/docs/en/resources/tools/mongodb/mongodb-find-one.md index 395262d91a..47f72cb289 100644 --- a/docs/en/resources/tools/mongodb/mongodb-find-one.md +++ b/docs/en/resources/tools/mongodb/mongodb-find-one.md @@ -18,7 +18,7 @@ returned. Otherwise, the selection is not guaranteed. The tool returns a single JSON object representing the document, wrapped in a JSON array. -This tool is compatible with the following source kind: +This tool is compatible with the following source type: * [`mongodb`](../../sources/mongodb.md) @@ -31,31 +31,31 @@ and returning their profile information, while excluding sensitive fields like the password hash. ```yaml -tools: - get_user_profile: - kind: mongodb-find-one - source: my-mongo-source - description: Retrieves a user's profile by their email address. - database: user_data - collection: profiles - filterPayload: | - { "email": {{json .email}} } - filterParams: - - name: email - type: string - description: The email address of the user to find. - projectPayload: | - { - "password_hash": 0, - "login_history": 0 - } +kind: tools +name: get_user_profile +type: mongodb-find-one +source: my-mongo-source +description: Retrieves a user's profile by their email address. +database: user_data +collection: profiles +filterPayload: | + { "email": {{json .email}} } +filterParams: + - name: email + type: string + description: The email address of the user to find. +projectPayload: | + { + "password_hash": 0, + "login_history": 0 + } ``` ## Reference | **field** | **type** | **required** | **description** | |:---------------|:---------|:-------------|:---------------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be `mongodb-find-one`. | +| type | string | true | Must be `mongodb-find-one`. | | source | string | true | The name of the `mongodb` source to use. | | description | string | true | A description of the tool that is passed to the LLM. | | database | string | true | The name of the MongoDB database to query. | diff --git a/docs/en/resources/tools/mongodb/mongodb-find.md b/docs/en/resources/tools/mongodb/mongodb-find.md index d927c737c1..c2ce3da0a3 100644 --- a/docs/en/resources/tools/mongodb/mongodb-find.md +++ b/docs/en/resources/tools/mongodb/mongodb-find.md @@ -18,7 +18,7 @@ results (**sorting**), and restricting the number of documents returned The tool returns a JSON array of the documents found. -This tool is compatible with the following source kind: +This tool is compatible with the following source type: * [`mongodb`](../../sources/mongodb.md) @@ -29,40 +29,40 @@ live in a specific city. The results are sorted by their last name, and only their first name, last name, and email are returned. ```yaml -tools: - find_local_customers: - kind: mongodb-find - source: my-mongo-source - description: Finds customers by city, sorted by last name. - database: crm - collection: customers - limit: 10 - filterPayload: | - { "address.city": {{json .city}} } - filterParams: - - name: city - type: string - description: The city to search for customers in. - projectPayload: | - { - "first_name": 1, - "last_name": 1, - "email": 1, - "_id": 0 - } - sortPayload: | - { "last_name": {{json .sort_order}} } - sortParams: - - name: sort_order - type: integer - description: The sort order (1 for ascending, -1 for descending). +kind: tools +name: find_local_customers +type: mongodb-find +source: my-mongo-source +description: Finds customers by city, sorted by last name. +database: crm +collection: customers +limit: 10 +filterPayload: | + { "address.city": {{json .city}} } +filterParams: + - name: city + type: string + description: The city to search for customers in. +projectPayload: | + { + "first_name": 1, + "last_name": 1, + "email": 1, + "_id": 0 + } +sortPayload: | + { "last_name": {{json .sort_order}} } +sortParams: + - name: sort_order + type: integer + description: The sort order (1 for ascending, -1 for descending). ``` ## Reference | **field** | **type** | **required** | **description** | |:---------------|:---------|:-------------|:----------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be `mongodb-find`. | +| type | string | true | Must be `mongodb-find`. | | source | string | true | The name of the `mongodb` source to use. | | description | string | true | A description of the tool that is passed to the LLM. | | database | string | true | The name of the MongoDB database to query. | diff --git a/docs/en/resources/tools/mongodb/mongodb-insert-many.md b/docs/en/resources/tools/mongodb/mongodb-insert-many.md index cc6c385375..cc6e0438d7 100644 --- a/docs/en/resources/tools/mongodb/mongodb-insert-many.md +++ b/docs/en/resources/tools/mongodb/mongodb-insert-many.md @@ -19,7 +19,7 @@ be a string containing a **JSON array of document objects**. Upon successful insertion, the tool returns a JSON array containing the unique `_id` of **each** new document that was created. -This tool is compatible with the following source kind: +This tool is compatible with the following source type: * [`mongodb`](../../sources/mongodb.md) @@ -30,14 +30,14 @@ This tool is compatible with the following source kind: Here is an example configuration for a tool that logs multiple events at once. ```yaml -tools: - log_batch_events: - kind: mongodb-insert-many - source: my-mongo-source - description: Inserts a batch of event logs into the database. - database: logging - collection: events - canonical: true +kind: tools +name: log_batch_events +type: mongodb-insert-many +source: my-mongo-source +description: Inserts a batch of event logs into the database. +database: logging +collection: events +canonical: true ``` An LLM would call this tool by providing an array of documents as a JSON string @@ -50,7 +50,7 @@ in the `data` parameter, like this: | **field** | **type** | **required** | **description** | |:------------|:---------|:-------------|:------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be `mongodb-insert-many`. | +| type | string | true | Must be `mongodb-insert-many`. | | source | string | true | The name of the `mongodb` source to use. | | description | string | true | A description of the tool that is passed to the LLM. | | database | string | true | The name of the MongoDB database containing the collection. | diff --git a/docs/en/resources/tools/mongodb/mongodb-insert-one.md b/docs/en/resources/tools/mongodb/mongodb-insert-one.md index 7214f2b4d6..8545cc8e10 100644 --- a/docs/en/resources/tools/mongodb/mongodb-insert-one.md +++ b/docs/en/resources/tools/mongodb/mongodb-insert-one.md @@ -17,7 +17,7 @@ This tool takes one required parameter named `data`, which must be a string containing the JSON object you want to insert. Upon successful insertion, the tool returns the unique `_id` of the newly created document. -This tool is compatible with the following source kind: +This tool is compatible with the following source type: * [`mongodb`](../../sources/mongodb.md) @@ -27,14 +27,14 @@ Here is an example configuration for a tool that adds a new user to a `users` collection. ```yaml -tools: - create_new_user: - kind: mongodb-insert-one - source: my-mongo-source - description: Creates a new user record in the database. - database: user_data - collection: users - canonical: false +kind: tools +name: create_new_user +type: mongodb-insert-one +source: my-mongo-source +description: Creates a new user record in the database. +database: user_data +collection: users +canonical: false ``` An LLM would call this tool by providing the document as a JSON string in the @@ -45,7 +45,7 @@ An LLM would call this tool by providing the document as a JSON string in the | **field** | **type** | **required** | **description** | |:------------|:---------|:-------------|:------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be `mongodb-insert-one`. | +| type | string | true | Must be `mongodb-insert-one`. | | source | string | true | The name of the `mongodb` source to use. | | description | string | true | A description of the tool that is passed to the LLM. | | database | string | true | The name of the MongoDB database containing the collection. | diff --git a/docs/en/resources/tools/mongodb/mongodb-update-many.md b/docs/en/resources/tools/mongodb/mongodb-update-many.md index ef3b144364..5fdb2e650e 100644 --- a/docs/en/resources/tools/mongodb/mongodb-update-many.md +++ b/docs/en/resources/tools/mongodb/mongodb-update-many.md @@ -17,7 +17,7 @@ MongoDB collection that match a given filter. It locates the documents using a The tool returns an array of three integers: `[ModifiedCount, UpsertedCount, MatchedCount]`. -This tool is compatible with the following source kind: +This tool is compatible with the following source type: * [`mongodb`](../../sources/mongodb.md) @@ -29,37 +29,37 @@ Here's an example configuration. This tool applies a discount to all items within a specific category and also marks them as being on sale. ```yaml -tools: - apply_category_discount: - kind: mongodb-update-many - source: my-mongo-source - description: Use this tool to apply a discount to all items in a given category. - database: products - collection: inventory - filterPayload: | - { "category": {{json .category_name}} } - filterParams: - - name: category_name - type: string - description: The category of items to update. - updatePayload: | - { - "$mul": { "price": {{json .discount_multiplier}} }, - "$set": { "on_sale": true } - } - updateParams: - - name: discount_multiplier - type: number - description: The multiplier to apply to the price (e.g., 0.8 for a 20% discount). - canonical: false - upsert: false +kind: tools +name: apply_category_discount +type: mongodb-update-many +source: my-mongo-source +description: Use this tool to apply a discount to all items in a given category. +database: products +collection: inventory +filterPayload: | + { "category": {{json .category_name}} } +filterParams: + - name: category_name + type: string + description: The category of items to update. +updatePayload: | + { + "$mul": { "price": {{json .discount_multiplier}} }, + "$set": { "on_sale": true } + } +updateParams: + - name: discount_multiplier + type: number + description: The multiplier to apply to the price (e.g., 0.8 for a 20% discount). +canonical: false +upsert: false ``` ## Reference | **field** | **type** | **required** | **description** | |:--------------|:---------|:-------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be `mongodb-update-many`. | +| type | string | true | Must be `mongodb-update-many`. | | source | string | true | The name of the `mongodb` source to use. | | description | string | true | A description of the tool that is passed to the LLM. | | database | string | true | The name of the MongoDB database containing the collection. | diff --git a/docs/en/resources/tools/mongodb/mongodb-update-one.md b/docs/en/resources/tools/mongodb/mongodb-update-one.md index 063ea0b192..e0bf51e3dd 100644 --- a/docs/en/resources/tools/mongodb/mongodb-update-one.md +++ b/docs/en/resources/tools/mongodb/mongodb-update-one.md @@ -15,7 +15,7 @@ collection. It locates the document to be updated using a `filterPayload` and applies modifications defined in an `updatePayload`. If the filter matches multiple documents, only the first one found will be updated. -This tool is compatible with the following source kind: +This tool is compatible with the following source type: * [`mongodb`](../../sources/mongodb.md) @@ -29,37 +29,37 @@ collection where the `item` field matches a provided value. If no matching document is found, the `upsert: true` option will create a new one. ```yaml -tools: - update_inventory_item: - kind: mongodb-update-one - source: my-mongo-source - description: Use this tool to update an item's stock and status in the inventory. - database: products - collection: inventory - filterPayload: | - { "item": {{json .item_name}} } - filterParams: - - name: item_name - type: string - description: The name of the item to update. - updatePayload: | - { "$set": { "stock": {{json .new_stock}}, "status": {{json .new_status}} } } - updateParams: - - name: new_stock - type: integer - description: The new stock quantity. - - name: new_status - type: string - description: The new status of the item (e.g., "In Stock", "Backordered"). - canonical: false - upsert: true +kind: tools +name: update_inventory_item +type: mongodb-update-one +source: my-mongo-source +description: Use this tool to update an item's stock and status in the inventory. +database: products +collection: inventory +filterPayload: | + { "item": {{json .item_name}} } +filterParams: + - name: item_name + type: string + description: The name of the item to update. +updatePayload: | + { "$set": { "stock": {{json .new_stock}}, "status": {{json .new_status}} } } +updateParams: + - name: new_stock + type: integer + description: The new stock quantity. + - name: new_status + type: string + description: The new status of the item (e.g., "In Stock", "Backordered"). +canonical: false +upsert: true ``` ## Reference | **field** | **type** | **required** | **description** | |:--------------|:---------|:-------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be `mongodb-update-one`. | +| type | string | true | Must be `mongodb-update-one`. | | source | string | true | The name of the `mongodb` source to use. | | description | string | true | A description of the tool that is passed to the LLM. | | database | string | true | The name of the MongoDB database containing the collection. | diff --git a/docs/en/resources/tools/mssql/mssql-execute-sql.md b/docs/en/resources/tools/mssql/mssql-execute-sql.md index 4667dcf46b..23bdbf6640 100644 --- a/docs/en/resources/tools/mssql/mssql-execute-sql.md +++ b/docs/en/resources/tools/mssql/mssql-execute-sql.md @@ -26,17 +26,17 @@ statement against the `source`. ## Example ```yaml -tools: - execute_sql_tool: - kind: mssql-execute-sql - source: my-mssql-instance - description: Use this tool to execute sql statement. +kind: tools +name: execute_sql_tool +type: mssql-execute-sql +source: my-mssql-instance +description: Use this tool to execute sql statement. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:------------------------------------------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "mssql-execute-sql". | +| type | string | true | Must be "mssql-execute-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/mssql/mssql-list-tables.md b/docs/en/resources/tools/mssql/mssql-list-tables.md index 4a7fdf7a8d..d80c79cc17 100644 --- a/docs/en/resources/tools/mssql/mssql-list-tables.md +++ b/docs/en/resources/tools/mssql/mssql-list-tables.md @@ -32,17 +32,17 @@ The tool takes the following input parameters: ## Example ```yaml -tools: - mssql_list_tables: - kind: mssql-list-tables - source: mssql-source - description: Use this tool to retrieve schema information for all or specified tables. Output format can be simple (only table names) or detailed. +kind: tools +name: mssql_list_tables +type: mssql-list-tables +source: mssql-source +description: Use this tool to retrieve schema information for all or specified tables. Output format can be simple (only table names) or detailed. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------| -| kind | string | true | Must be "mssql-list-tables". | +| type | string | true | Must be "mssql-list-tables". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the agent. | diff --git a/docs/en/resources/tools/mssql/mssql-sql.md b/docs/en/resources/tools/mssql/mssql-sql.md index e4c63184c2..c5d212f096 100644 --- a/docs/en/resources/tools/mssql/mssql-sql.md +++ b/docs/en/resources/tools/mssql/mssql-sql.md @@ -36,41 +36,41 @@ db.QueryContext(ctx, `select * from t where ID = @ID and Name = @p2;`, sql.Named > names, or other parts of the query. ```yaml -tools: - search_flights_by_number: - kind: mssql-sql - source: my-instance - statement: | - SELECT * FROM flights - WHERE airline = @airline - AND flight_number = @flight_number - LIMIT 10 - description: | - Use this tool to get information for a specific flight. - Takes an airline code and flight number and returns info on the flight. - Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. - A airline code is a code for an airline service consisting of two-character - airline designator and followed by flight number, which is 1 to 4 digit number. - For example, if given CY 0123, the airline is "CY", and flight_number is "123". - Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". - If the tool returns more than one option choose the date closes to today. - Example: - {{ - "airline": "CY", - "flight_number": "888", - }} - Example: - {{ - "airline": "DL", - "flight_number": "1234", - }} - parameters: - - name: airline - type: string - description: Airline unique 2 letter identifier - - name: flight_number - type: string - description: 1 to 4 digit number +kind: tools +name: search_flights_by_number +type: mssql-sql +source: my-instance +statement: | + SELECT * FROM flights + WHERE airline = @airline + AND flight_number = @flight_number + LIMIT 10 +description: | + Use this tool to get information for a specific flight. + Takes an airline code and flight number and returns info on the flight. + Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. + A airline code is a code for an airline service consisting of two-character + airline designator and followed by flight number, which is 1 to 4 digit number. + For example, if given CY 0123, the airline is "CY", and flight_number is "123". + Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". + If the tool returns more than one option choose the date closes to today. + Example: + {{ + "airline": "CY", + "flight_number": "888", + }} + Example: + {{ + "airline": "DL", + "flight_number": "1234", + }} +parameters: + - name: airline + type: string + description: Airline unique 2 letter identifier + - name: flight_number + type: string + description: 1 to 4 digit number ``` ### Example with Template Parameters @@ -82,29 +82,29 @@ tools: > [templateParameters](..#template-parameters). ```yaml -tools: - list_table: - kind: mssql-sql - source: my-instance - statement: | - SELECT * FROM {{.tableName}}; - description: | - Use this tool to list all information from a specific table. - Example: - {{ - "tableName": "flights", - }} - templateParameters: - - name: tableName - type: string - description: Table to select from +kind: tools +name: list_table +type: mssql-sql +source: my-instance +statement: | + SELECT * FROM {{.tableName}}; +description: | + Use this tool to list all information from a specific table. + Example: + {{ + "tableName": "flights", + }} +templateParameters: + - name: tableName + type: string + description: Table to select from ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:--------------------------------------------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "mssql-sql". | +| type | string | true | Must be "mssql-sql". | | source | string | true | Name of the source the T-SQL statement should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | SQL statement to execute. | diff --git a/docs/en/resources/tools/mysql/mysql-execute-sql.md b/docs/en/resources/tools/mysql/mysql-execute-sql.md index ff68b06516..dce8070471 100644 --- a/docs/en/resources/tools/mysql/mysql-execute-sql.md +++ b/docs/en/resources/tools/mysql/mysql-execute-sql.md @@ -26,17 +26,17 @@ statement against the `source`. ## Example ```yaml -tools: - execute_sql_tool: - kind: mysql-execute-sql - source: my-mysql-instance - description: Use this tool to execute sql statement. +kind: tools +name: execute_sql_tool +type: mysql-execute-sql +source: my-mysql-instance +description: Use this tool to execute sql statement. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "mysql-execute-sql". | +| type | string | true | Must be "mysql-execute-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/mysql/mysql-get-query-plan.md b/docs/en/resources/tools/mysql/mysql-get-query-plan.md index d77b81e097..b3e259b5af 100644 --- a/docs/en/resources/tools/mysql/mysql-get-query-plan.md +++ b/docs/en/resources/tools/mysql/mysql-get-query-plan.md @@ -23,17 +23,17 @@ statement against the `source`. ## Example ```yaml -tools: - get_query_plan_tool: - kind: mysql-get-query-plan - source: my-mysql-instance - description: Use this tool to get the execution plan for a sql statement. +kind: tools +name: get_query_plan_tool +type: mysql-get-query-plan +source: my-mysql-instance +description: Use this tool to get the execution plan for a sql statement. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "mysql-get-query-plan". | +| type | string | true | Must be "mysql-get-query-plan". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/mysql/mysql-list-active-queries.md b/docs/en/resources/tools/mysql/mysql-list-active-queries.md index f7a2721aba..7f0ba3c0fd 100644 --- a/docs/en/resources/tools/mysql/mysql-list-active-queries.md +++ b/docs/en/resources/tools/mysql/mysql-list-active-queries.md @@ -27,11 +27,11 @@ This tool takes 2 optional input parameters: ## Example ```yaml -tools: - list_active_queries: - kind: mysql-list-active-queries - source: my-mysql-instance - description: Lists top N (default 10) ongoing queries from processlist and innodb_trx, ordered by execution time in descending order. Returns detailed information of those queries in json format, including process id, query, transaction duration, transaction wait duration, process time, transaction state, process state, username with host, transaction rows locked, transaction rows modified, and db schema. +kind: tools +name: list_active_queries +type: mysql-list-active-queries +source: my-mysql-instance +description: Lists top N (default 10) ongoing queries from processlist and innodb_trx, ordered by execution time in descending order. Returns detailed information of those queries in json format, including process id, query, transaction duration, transaction wait duration, process time, transaction state, process state, username with host, transaction rows locked, transaction rows modified, and db schema. ``` The response is a json array with the following fields: @@ -57,6 +57,6 @@ The response is a json array with the following fields: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "mysql-list-active-queries". | +| type | string | true | Must be "mysql-list-active-queries". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/mysql/mysql-list-table-fragmentation.md b/docs/en/resources/tools/mysql/mysql-list-table-fragmentation.md index 1af1308a71..3e0ce0c5a9 100644 --- a/docs/en/resources/tools/mysql/mysql-list-table-fragmentation.md +++ b/docs/en/resources/tools/mysql/mysql-list-table-fragmentation.md @@ -34,11 +34,11 @@ This tool takes 4 optional input parameters: ## Example ```yaml -tools: - list_table_fragmentation: - kind: mysql-list-table-fragmentation - source: my-mysql-instance - description: List table fragmentation in MySQL, by calculating the size of the data and index files and free space allocated to each table. The query calculates fragmentation percentage which represents the proportion of free space relative to the total data and index size. Storage can be reclaimed for tables with high fragmentation using OPTIMIZE TABLE. +kind: tools +name: list_table_fragmentation +type: mysql-list-table-fragmentation +source: my-mysql-instance +description: List table fragmentation in MySQL, by calculating the size of the data and index files and free space allocated to each table. The query calculates fragmentation percentage which represents the proportion of free space relative to the total data and index size. Storage can be reclaimed for tables with high fragmentation using OPTIMIZE TABLE. ``` The response is a json array with the following fields: @@ -58,6 +58,6 @@ The response is a json array with the following fields: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "mysql-list-table-fragmentation". | +| type | string | true | Must be "mysql-list-table-fragmentation". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/mysql/mysql-list-tables-missing-unique-indexes.md b/docs/en/resources/tools/mysql/mysql-list-tables-missing-unique-indexes.md index 087860141a..7c1c08296a 100644 --- a/docs/en/resources/tools/mysql/mysql-list-tables-missing-unique-indexes.md +++ b/docs/en/resources/tools/mysql/mysql-list-tables-missing-unique-indexes.md @@ -27,11 +27,11 @@ parameters: ## Example ```yaml -tools: - list_tables_missing_unique_indexes: - kind: mysql-list-tables-missing-unique-indexes - source: my-mysql-instance - description: Find tables that do not have primary or unique key constraint. A primary key or unique key is the only mechanism that guaranttes a row is unique. Without them, the database-level protection against data integrity issues will be missing. +kind: tools +name: list_tables_missing_unique_indexes +type: mysql-list-tables-missing-unique-indexes +source: my-mysql-instance +description: Find tables that do not have primary or unique key constraint. A primary key or unique key is the only mechanism that guaranttes a row is unique. Without them, the database-level protection against data integrity issues will be missing. ``` The response is a json array with the following fields: @@ -47,6 +47,6 @@ The response is a json array with the following fields: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "mysql-list-active-queries". | +| type | string | true | Must be "mysql-list-active-queries". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/mysql/mysql-list-tables.md b/docs/en/resources/tools/mysql/mysql-list-tables.md index f07eef7c68..62da629c3b 100644 --- a/docs/en/resources/tools/mysql/mysql-list-tables.md +++ b/docs/en/resources/tools/mysql/mysql-list-tables.md @@ -33,17 +33,17 @@ The tool takes the following input parameters: ## Example ```yaml -tools: - mysql_list_tables: - kind: mysql-list-tables - source: mysql-source - description: Use this tool to retrieve schema information for all or specified tables. Output format can be simple (only table names) or detailed. +kind: tools +name: mysql_list_tables +type: mysql-list-tables +source: mysql-source +description: Use this tool to retrieve schema information for all or specified tables. Output format can be simple (only table names) or detailed. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------| -| kind | string | true | Must be "mysql-list-tables". | +| type | string | true | Must be "mysql-list-tables". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the agent. | diff --git a/docs/en/resources/tools/mysql/mysql-sql.md b/docs/en/resources/tools/mysql/mysql-sql.md index 56850ba0b2..9d534a6c03 100644 --- a/docs/en/resources/tools/mysql/mysql-sql.md +++ b/docs/en/resources/tools/mysql/mysql-sql.md @@ -30,41 +30,41 @@ and expects parameters in the SQL query to be in the form of placeholders `?`. > names, or other parts of the query. ```yaml -tools: - search_flights_by_number: - kind: mysql-sql - source: my-mysql-instance - statement: | - SELECT * FROM flights - WHERE airline = ? - AND flight_number = ? - LIMIT 10 - description: | - Use this tool to get information for a specific flight. - Takes an airline code and flight number and returns info on the flight. - Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. - A airline code is a code for an airline service consisting of two-character - airline designator and followed by flight number, which is 1 to 4 digit number. - For example, if given CY 0123, the airline is "CY", and flight_number is "123". - Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". - If the tool returns more than one option choose the date closes to today. - Example: - {{ - "airline": "CY", - "flight_number": "888", - }} - Example: - {{ - "airline": "DL", - "flight_number": "1234", - }} - parameters: - - name: airline - type: string - description: Airline unique 2 letter identifier - - name: flight_number - type: string - description: 1 to 4 digit number +kind: tools +name: search_flights_by_number +type: mysql-sql +source: my-mysql-instance +statement: | + SELECT * FROM flights + WHERE airline = ? + AND flight_number = ? + LIMIT 10 +description: | + Use this tool to get information for a specific flight. + Takes an airline code and flight number and returns info on the flight. + Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. + A airline code is a code for an airline service consisting of two-character + airline designator and followed by flight number, which is 1 to 4 digit number. + For example, if given CY 0123, the airline is "CY", and flight_number is "123". + Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". + If the tool returns more than one option choose the date closes to today. + Example: + {{ + "airline": "CY", + "flight_number": "888", + }} + Example: + {{ + "airline": "DL", + "flight_number": "1234", + }} +parameters: + - name: airline + type: string + description: Airline unique 2 letter identifier + - name: flight_number + type: string + description: 1 to 4 digit number ``` ### Example with Template Parameters @@ -76,29 +76,29 @@ tools: > [templateParameters](..#template-parameters). ```yaml -tools: - list_table: - kind: mysql-sql - source: my-mysql-instance - statement: | - SELECT * FROM {{.tableName}}; - description: | - Use this tool to list all information from a specific table. - Example: - {{ - "tableName": "flights", - }} - templateParameters: - - name: tableName - type: string - description: Table to select from +kind: tools +name: list_table +type: mysql-sql +source: my-mysql-instance +statement: | + SELECT * FROM {{.tableName}}; +description: | + Use this tool to list all information from a specific table. + Example: + {{ + "tableName": "flights", + }} +templateParameters: + - name: tableName + type: string + description: Table to select from ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:------------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "mysql-sql". | +| type | string | true | Must be "mysql-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | SQL statement to execute on. | diff --git a/docs/en/resources/tools/neo4j/neo4j-cypher.md b/docs/en/resources/tools/neo4j/neo4j-cypher.md index 1086f13693..f058c4eca6 100644 --- a/docs/en/resources/tools/neo4j/neo4j-cypher.md +++ b/docs/en/resources/tools/neo4j/neo4j-cypher.md @@ -31,46 +31,46 @@ their name: e.g. `$id`. ## Example ```yaml -tools: - search_movies_by_actor: - kind: neo4j-cypher - source: my-neo4j-movies-instance - statement: | - MATCH (m:Movie)<-[:ACTED_IN]-(p:Person) - WHERE p.name = $name AND m.year > $year - RETURN m.title, m.year - LIMIT 10 - description: | - Use this tool to get a list of movies for a specific actor and a given minimum release year. - Takes a full actor name, e.g. "Tom Hanks" and a year e.g 1993 and returns a list of movie titles and release years. - Do NOT use this tool with a movie title. Do NOT guess an actor name, Do NOT guess a year. - A actor name is a fully qualified name with first and last name separated by a space. - For example, if given "Hanks, Tom" the actor name is "Tom Hanks". - If the tool returns more than one option choose the most recent movies. - Example: - {{ - "name": "Meg Ryan", - "year": 1993 - }} - Example: - {{ - "name": "Clint Eastwood", - "year": 2000 - }} - parameters: - - name: name - type: string - description: Full actor name, "firstname lastname" - - name: year - type: integer - description: 4 digit number starting in 1900 up to the current year +kind: tools +name: search_movies_by_actor +type: neo4j-cypher +source: my-neo4j-movies-instance +statement: | + MATCH (m:Movie)<-[:ACTED_IN]-(p:Person) + WHERE p.name = $name AND m.year > $year + RETURN m.title, m.year + LIMIT 10 +description: | + Use this tool to get a list of movies for a specific actor and a given minimum release year. + Takes a full actor name, e.g. "Tom Hanks" and a year e.g 1993 and returns a list of movie titles and release years. + Do NOT use this tool with a movie title. Do NOT guess an actor name, Do NOT guess a year. + A actor name is a fully qualified name with first and last name separated by a space. + For example, if given "Hanks, Tom" the actor name is "Tom Hanks". + If the tool returns more than one option choose the most recent movies. + Example: + {{ + "name": "Meg Ryan", + "year": 1993 + }} + Example: + {{ + "name": "Clint Eastwood", + "year": 2000 + }} +parameters: + - name: name + type: string + description: Full actor name, "firstname lastname" + - name: year + type: integer + description: 4 digit number starting in 1900 up to the current year ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:---------------------------------------:|:------------:|----------------------------------------------------------------------------------------------| -| kind | string | true | Must be "neo4j-cypher". | +| type | string | true | Must be "neo4j-cypher". | | source | string | true | Name of the source the Cypher query should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | Cypher statement to execute | diff --git a/docs/en/resources/tools/neo4j/neo4j-execute-cypher.md b/docs/en/resources/tools/neo4j/neo4j-execute-cypher.md index 642f4234cf..e3828693d8 100644 --- a/docs/en/resources/tools/neo4j/neo4j-execute-cypher.md +++ b/docs/en/resources/tools/neo4j/neo4j-execute-cypher.md @@ -37,26 +37,26 @@ parameter to validate a query without executing it. ## Example ```yaml -tools: - query_neo4j: - kind: neo4j-execute-cypher - source: my-neo4j-prod-db - readOnly: true - description: | - Use this tool to execute a Cypher query against the production database. - Only read-only queries are allowed. - Takes a single 'cypher' parameter containing the full query string. - Example: - {{ - "cypher": "MATCH (m:Movie {title: 'The Matrix'}) RETURN m.released" - }} +kind: tools +name: query_neo4j +type: neo4j-execute-cypher +source: my-neo4j-prod-db +readOnly: true +description: | + Use this tool to execute a Cypher query against the production database. + Only read-only queries are allowed. + Takes a single 'cypher' parameter containing the full query string. + Example: + {{ + "cypher": "MATCH (m:Movie {title: 'The Matrix'}) RETURN m.released" + }} ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "neo4j-cypher". | +| type | string | true | Must be "neo4j-cypher". | | source | string | true | Name of the source the Cypher query should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | readOnly | boolean | false | If set to `true`, the tool will reject any write operations in the Cypher query. Default is `false`. | diff --git a/docs/en/resources/tools/neo4j/neo4j-schema.md b/docs/en/resources/tools/neo4j/neo4j-schema.md index aee380ebc5..31c3bcb81a 100644 --- a/docs/en/resources/tools/neo4j/neo4j-schema.md +++ b/docs/en/resources/tools/neo4j/neo4j-schema.md @@ -28,24 +28,24 @@ tool is compatible with a `neo4j` source and takes no parameters. ## Example ```yaml -tools: - get_movie_db_schema: - kind: neo4j-schema - source: my-neo4j-movies-instance - description: | - Use this tool to get the full schema of the movie database. - This provides information on all available node labels (like Movie, Person), - relationships (like ACTED_IN), and the properties on each. - This tool takes no parameters. - # Optional configuration to cache the schema for 2 hours - cacheExpireMinutes: 120 +kind: tools +name: get_movie_db_schema +type: neo4j-schema +source: my-neo4j-movies-instance +description: | + Use this tool to get the full schema of the movie database. + This provides information on all available node labels (like Movie, Person), + relationships (like ACTED_IN), and the properties on each. + This tool takes no parameters. +# Optional configuration to cache the schema for 2 hours +cacheExpireMinutes: 120 ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:--------:|:------------:|---------------------------------------------------------| -| kind | string | true | Must be `neo4j-schema`. | +| type | string | true | Must be `neo4j-schema`. | | source | string | true | Name of the source the schema should be extracted from. | | description | string | true | Description of the tool that is passed to the LLM. | | cacheExpireMinutes | integer | false | Cache expiration time in minutes. Defaults to 60. | diff --git a/docs/en/resources/tools/oceanbase/oceanbase-execute-sql.md b/docs/en/resources/tools/oceanbase/oceanbase-execute-sql.md index 36432b2e8b..fe43ff54b9 100644 --- a/docs/en/resources/tools/oceanbase/oceanbase-execute-sql.md +++ b/docs/en/resources/tools/oceanbase/oceanbase-execute-sql.md @@ -24,17 +24,17 @@ statement against the `source`. ## Example ```yaml -tools: - execute_sql_tool: - kind: oceanbase-execute-sql - source: my-oceanbase-instance - description: Use this tool to execute sql statement. +kind: tools +name: execute_sql_tool +type: oceanbase-execute-sql +source: my-oceanbase-instance +description: Use this tool to execute sql statement. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "oceanbase-execute-sql". | +| type | string | true | Must be "oceanbase-execute-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/oceanbase/oceanbase-sql.md b/docs/en/resources/tools/oceanbase/oceanbase-sql.md index ee33d3cd7c..aafdadc3d6 100644 --- a/docs/en/resources/tools/oceanbase/oceanbase-sql.md +++ b/docs/en/resources/tools/oceanbase/oceanbase-sql.md @@ -29,31 +29,31 @@ form of placeholders `?`. > names, or other parts of the query. ```yaml -tools: - search_flights_by_number: - kind: oceanbase-sql - source: my-oceanbase-instance - statement: | - SELECT * FROM flights - WHERE airline = ? - AND flight_number = ? - LIMIT 10 - description: | - Use this tool to get information for a specific flight. - Takes an airline code and flight number and returns info on the flight. - Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. - Example: - {{ - "airline": "CY", - "flight_number": "888", - }} - parameters: - - name: airline - type: string - description: Airline unique 2 letter identifier - - name: flight_number - type: string - description: 1 to 4 digit number +kind: tools +name: search_flights_by_number +type: oceanbase-sql +source: my-oceanbase-instance +statement: | + SELECT * FROM flights + WHERE airline = ? + AND flight_number = ? + LIMIT 10 +description: | + Use this tool to get information for a specific flight. + Takes an airline code and flight number and returns info on the flight. + Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. + Example: + {{ + "airline": "CY", + "flight_number": "888", + }} +parameters: + - name: airline + type: string + description: Airline unique 2 letter identifier + - name: flight_number + type: string + description: 1 to 4 digit number ``` ### Example with Template Parameters @@ -64,64 +64,64 @@ tools: > recommended for performance and safety reasons. ```yaml -tools: - list_table: - kind: oceanbase-sql - source: my-oceanbase-instance - statement: | - SELECT * FROM {{.tableName}}; - description: | - Use this tool to list all information from a specific table. - Example: - {{ - "tableName": "flights", - }} - templateParameters: - - name: tableName - type: string - description: Table to select from +kind: tools +name: list_table +type: oceanbase-sql +source: my-oceanbase-instance +statement: | + SELECT * FROM {{.tableName}}; +description: | + Use this tool to list all information from a specific table. + Example: + {{ + "tableName": "flights", + }} +templateParameters: + - name: tableName + type: string + description: Table to select from ``` ### Example with Array Parameters ```yaml -tools: - search_flights_by_ids: - kind: oceanbase-sql - source: my-oceanbase-instance - statement: | - SELECT * FROM flights - WHERE id IN (?) - AND status IN (?) - description: | - Use this tool to get information for multiple flights by their IDs and statuses. - Example: - {{ - "flight_ids": [1, 2, 3], - "statuses": ["active", "scheduled"] - }} - parameters: - - name: flight_ids - type: array - description: List of flight IDs to search for - items: - name: flight_id - type: integer - description: Individual flight ID - - name: statuses - type: array - description: List of flight statuses to filter by - items: - name: status - type: string - description: Individual flight status +kind: tools +name: search_flights_by_ids +type: oceanbase-sql +source: my-oceanbase-instance +statement: | + SELECT * FROM flights + WHERE id IN (?) + AND status IN (?) +description: | + Use this tool to get information for multiple flights by their IDs and statuses. + Example: + {{ + "flight_ids": [1, 2, 3], + "statuses": ["active", "scheduled"] + }} +parameters: + - name: flight_ids + type: array + description: List of flight IDs to search for + items: + name: flight_id + type: integer + description: Individual flight ID + - name: statuses + type: array + description: List of flight statuses to filter by + items: + name: status + type: string + description: Individual flight status ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:--------------------------------------------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "oceanbase-sql". | +| type | string | true | Must be "oceanbase-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | SQL statement to execute on. | diff --git a/docs/en/resources/tools/oracle/oracle-execute-sql.md b/docs/en/resources/tools/oracle/oracle-execute-sql.md index c66cce2b8c..cbfd79037b 100644 --- a/docs/en/resources/tools/oracle/oracle-execute-sql.md +++ b/docs/en/resources/tools/oracle/oracle-execute-sql.md @@ -24,8 +24,9 @@ statement against the `source`. ## Example ```yaml -tools: - execute_sql_tool: - kind: oracle-execute-sql - source: my-oracle-instance - description: Use this tool to execute sql statement. +kind: tools +name: execute_sql_tool +type: oracle-execute-sql +source: my-oracle-instance +description: Use this tool to execute sql statement. +``` diff --git a/docs/en/resources/tools/oracle/oracle-sql.md b/docs/en/resources/tools/oracle/oracle-sql.md index 045031f60c..49d665be36 100644 --- a/docs/en/resources/tools/oracle/oracle-sql.md +++ b/docs/en/resources/tools/oracle/oracle-sql.md @@ -29,29 +29,29 @@ to be in the native Oracle format (e.g., `:1`, `:2`). > names, or other parts of the query. ```yaml -tools: - search_flights_by_number: - kind: oracle-sql - source: my-oracle-instance - statement: | - SELECT * FROM flights - WHERE airline = :1 - AND flight_number = :2 - FETCH FIRST 10 ROWS ONLY - description: | - Use this tool to get information for a specific flight. - Takes an airline code and flight number and returns info on the flight. - Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. - Example: - {{ - "airline": "CY", - "flight_number": "888", - }} - parameters: - - name: airline - type: string - description: Airline unique 2 letter identifier - - name: flight_number - type: string - description: 1 to 4 digit number +kind: tools +name: search_flights_by_number +type: oracle-sql +source: my-oracle-instance +statement: | + SELECT * FROM flights + WHERE airline = :1 + AND flight_number = :2 + FETCH FIRST 10 ROWS ONLY +description: | + Use this tool to get information for a specific flight. + Takes an airline code and flight number and returns info on the flight. + Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. + Example: + {{ + "airline": "CY", + "flight_number": "888", + }} +parameters: + - name: airline + type: string + description: Airline unique 2 letter identifier + - name: flight_number + type: string + description: 1 to 4 digit number ``` diff --git a/docs/en/resources/tools/postgres/postgres-database-overview.md b/docs/en/resources/tools/postgres/postgres-database-overview.md index 485a8625fb..1dcb23dc45 100644 --- a/docs/en/resources/tools/postgres/postgres-database-overview.md +++ b/docs/en/resources/tools/postgres/postgres-database-overview.md @@ -23,12 +23,12 @@ This tool does not take any input parameters. ## Example ```yaml -tools: - database_overview: - kind: postgres-database-overview - source: cloudsql-pg-source - description: | - fetches the current state of the PostgreSQL server. It returns the postgres version, whether it's a replica, uptime duration, maximum connection limit, number of current connections, number of active connections and the percentage of connections in use. +kind: tools +name: database_overview +type: postgres-database-overview +source: cloudsql-pg-source +description: | + fetches the current state of the PostgreSQL server. It returns the postgres version, whether it's a replica, uptime duration, maximum connection limit, number of current connections, number of active connections and the percentage of connections in use. ``` The response is a JSON object with the following elements: @@ -49,6 +49,6 @@ The response is a JSON object with the following elements: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------| -| kind | string | true | Must be "postgres-database-overview". | +| type | string | true | Must be "postgres-database-overview". | | source | string | true | Name of the source the SQL should execute on. | | description | string | false | Description of the tool that is passed to the agent. | diff --git a/docs/en/resources/tools/postgres/postgres-execute-sql.md b/docs/en/resources/tools/postgres/postgres-execute-sql.md index 6670efcb0b..07a3847405 100644 --- a/docs/en/resources/tools/postgres/postgres-execute-sql.md +++ b/docs/en/resources/tools/postgres/postgres-execute-sql.md @@ -27,17 +27,17 @@ statement against the `source`. ## Example ```yaml -tools: - execute_sql_tool: - kind: postgres-execute-sql - source: my-pg-instance - description: Use this tool to execute sql statement. +kind: tools +name: execute_sql_tool +type: postgres-execute-sql +source: my-pg-instance +description: Use this tool to execute sql statement. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "postgres-execute-sql". | +| type | string | true | Must be "postgres-execute-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/postgres/postgres-get-column-cardinality.md b/docs/en/resources/tools/postgres/postgres-get-column-cardinality.md index 2563ead4f1..4cd778d19d 100644 --- a/docs/en/resources/tools/postgres/postgres-get-column-cardinality.md +++ b/docs/en/resources/tools/postgres/postgres-get-column-cardinality.md @@ -30,11 +30,11 @@ the following input parameters: ## Example ```yaml -tools: - get_column_cardinality: - kind: postgres-get-column-cardinality - source: postgres-source - description: Estimates the number of unique values (cardinality) quickly for one or all columns in a specific PostgreSQL table by using the database's internal statistics, returning the results in descending order of estimated cardinality. Please run ANALYZE on the table before using this tool to get accurate results. The tool returns the column_name and the estimated_cardinality. If the column_name is not provided, the tool returns all columns along with their estimated cardinality. +kind: tools +name: get_column_cardinality +type: postgres-get-column-cardinality +source: postgres-source +description: Estimates the number of unique values (cardinality) quickly for one or all columns in a specific PostgreSQL table by using the database's internal statistics, returning the results in descending order of estimated cardinality. Please run ANALYZE on the table before using this tool to get accurate results. The tool returns the column_name and the estimated_cardinality. If the column_name is not provided, the tool returns all columns along with their estimated cardinality. ``` The response is a json array with the following elements: @@ -58,6 +58,6 @@ to estimate cardinality. | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------| -| kind | string | true | Must be "postgres-get-column-cardinality". | +| type | string | true | Must be "postgres-get-column-cardinality". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/postgres/postgres-list-active-queries.md b/docs/en/resources/tools/postgres/postgres-list-active-queries.md index 2184d96f70..2c533b15ba 100644 --- a/docs/en/resources/tools/postgres/postgres-list-active-queries.md +++ b/docs/en/resources/tools/postgres/postgres-list-active-queries.md @@ -33,11 +33,11 @@ active queries. The tool takes the following input parameters: ## Example ```yaml -tools: - list_active_queries: - kind: postgres-list-active-queries - source: postgres-source - description: List the top N (default 50) currently running queries (state='active') from pg_stat_activity, ordered by longest-running first. Returns pid, user, database, application_name, client_addr, state, wait_event_type/wait_event, backend/xact/query start times, computed query_duration, and the SQL text. +kind: tools +name: list_active_queries +type: postgres-list-active-queries +source: postgres-source +description: List the top N (default 50) currently running queries (state='active') from pg_stat_activity, ordered by longest-running first. Returns pid, user, database, application_name, client_addr, state, wait_event_type/wait_event, backend/xact/query start times, computed query_duration, and the SQL text. ``` The response is a json array with the following elements: @@ -64,6 +64,6 @@ The response is a json array with the following elements: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "postgres-list-active-queries". | +| type | string | true | Must be "postgres-list-active-queries". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/postgres/postgres-list-available-extensions.md b/docs/en/resources/tools/postgres/postgres-list-available-extensions.md index dc823c78a5..cc2845baa3 100644 --- a/docs/en/resources/tools/postgres/postgres-list-available-extensions.md +++ b/docs/en/resources/tools/postgres/postgres-list-available-extensions.md @@ -26,11 +26,11 @@ not support any input parameter. ## Example ```yaml -tools: - list_available_extensions: - kind: postgres-list-available-extensions - source: postgres-source - description: Discover all PostgreSQL extensions available for installation on this server, returning name, default_version, and description. +kind: tools +name: list_available_extensions +type: postgres-list-available-extensions +source: postgres-source +description: Discover all PostgreSQL extensions available for installation on this server, returning name, default_version, and description. ``` ## Reference diff --git a/docs/en/resources/tools/postgres/postgres-list-database-stats.md b/docs/en/resources/tools/postgres/postgres-list-database-stats.md index 01537bcfa7..7d9e3f57aa 100644 --- a/docs/en/resources/tools/postgres/postgres-list-database-stats.md +++ b/docs/en/resources/tools/postgres/postgres-list-database-stats.md @@ -30,31 +30,31 @@ takes the following input parameters: ## Example ```yaml -tools: - list_database_stats: - kind: postgres-list-database-stats - source: postgres-source - description: | - Lists the key performance and activity statistics for each PostgreSQL - database in the instance, offering insights into cache efficiency, - transaction throughput row-level activity, temporary file usage, and - contention. It returns: the database name, whether the database is - connectable, database owner, default tablespace name, the percentage of - data blocks found in the buffer cache rather than being read from disk - (a higher value indicates better cache performance), the total number of - disk blocks read from disk, the total number of times disk blocks were - found already in the cache; the total number of committed transactions, - the total number of rolled back transactions, the percentage of rolled - back transactions compared to the total number of completed - transactions, the total number of rows returned by queries, the total - number of live rows fetched by scans, the total number of rows inserted, - the total number of rows updated, the total number of rows deleted, the - number of temporary files created by queries, the total size of - temporary files used by queries in bytes, the number of query - cancellations due to conflicts with recovery, the number of deadlocks - detected, the current number of active backend connections, the - timestamp when the database statistics were last reset, and the total - database size in bytes. +kind: tools +name: list_database_stats +type: postgres-list-database-stats +source: postgres-source +description: | + Lists the key performance and activity statistics for each PostgreSQL + database in the instance, offering insights into cache efficiency, + transaction throughput row-level activity, temporary file usage, and + contention. It returns: the database name, whether the database is + connectable, database owner, default tablespace name, the percentage of + data blocks found in the buffer cache rather than being read from disk + (a higher value indicates better cache performance), the total number of + disk blocks read from disk, the total number of times disk blocks were + found already in the cache; the total number of committed transactions, + the total number of rolled back transactions, the percentage of rolled + back transactions compared to the total number of completed + transactions, the total number of rows returned by queries, the total + number of live rows fetched by scans, the total number of rows inserted, + the total number of rows updated, the total number of rows deleted, the + number of temporary files created by queries, the total size of + temporary files used by queries in bytes, the number of query + cancellations due to conflicts with recovery, the number of deadlocks + detected, the current number of active backend connections, the + timestamp when the database statistics were last reset, and the total + database size in bytes. ``` The response is a json array with the following elements: @@ -90,6 +90,6 @@ The response is a json array with the following elements: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------| -| kind | string | true | Must be "postgres-list-database-stats". | +| type | string | true | Must be "postgres-list-database-stats". | | source | string | true | Name of the source the SQL should execute on. | | description | string | false | Description of the tool that is passed to the agent. | diff --git a/docs/en/resources/tools/postgres/postgres-list-indexes.md b/docs/en/resources/tools/postgres/postgres-list-indexes.md index f528ae97ec..590b09bb33 100644 --- a/docs/en/resources/tools/postgres/postgres-list-indexes.md +++ b/docs/en/resources/tools/postgres/postgres-list-indexes.md @@ -30,18 +30,18 @@ takes the following input parameters: ## Example ```yaml -tools: - list_indexes: - kind: postgres-list-indexes - source: postgres-source - description: | - Lists available user indexes in the database, excluding system schemas (pg_catalog, - information_schema). For each index, the following properties are returned: - schema name, table name, index name, index type (access method), a boolean - indicating if it's a unique index, a boolean indicating if it's for a primary key, - the index definition, index size in bytes, the number of index scans, the number of - index tuples read, the number of table tuples fetched via index scans, and a boolean - indicating if the index has been used at least once. +kind: tools +name: list_indexes +type: postgres-list-indexes +source: postgres-source +description: | + Lists available user indexes in the database, excluding system schemas (pg_catalog, + information_schema). For each index, the following properties are returned: + schema name, table name, index name, index type (access method), a boolean + indicating if it's a unique index, a boolean indicating if it's for a primary key, + the index definition, index size in bytes, the number of index scans, the number of + index tuples read, the number of table tuples fetched via index scans, and a boolean + indicating if the index has been used at least once. ``` The response is a json array with the following elements: @@ -67,6 +67,6 @@ The response is a json array with the following elements: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------| -| kind | string | true | Must be "postgres-list-indexes". | +| type | string | true | Must be "postgres-list-indexes". | | source | string | true | Name of the source the SQL should execute on. | | description | string | false | Description of the tool that is passed to the agent. | diff --git a/docs/en/resources/tools/postgres/postgres-list-installed-extensions.md b/docs/en/resources/tools/postgres/postgres-list-installed-extensions.md index d3ff4eb26f..afda5031bc 100644 --- a/docs/en/resources/tools/postgres/postgres-list-installed-extensions.md +++ b/docs/en/resources/tools/postgres/postgres-list-installed-extensions.md @@ -26,17 +26,17 @@ support any input parameter. ## Example ```yaml -tools: - list_installed_extensions: - kind: postgres-list-installed-extensions - source: postgres-source - description: List all installed PostgreSQL extensions with their name, version, schema, owner, and description. +kind: tools +name: list_installed_extensions +type: postgres-list-installed-extensions +source: postgres-source +description: List all installed PostgreSQL extensions with their name, version, schema, owner, and description. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "postgres-list-active-queries". | +| type | string | true | Must be "postgres-list-active-queries". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/postgres/postgres-list-locks.md b/docs/en/resources/tools/postgres/postgres-list-locks.md index a0d60e93d9..4b7b8c6bb2 100644 --- a/docs/en/resources/tools/postgres/postgres-list-locks.md +++ b/docs/en/resources/tools/postgres/postgres-list-locks.md @@ -49,11 +49,11 @@ GROUP BY ## Example ```yaml -tools: - list_locks: - kind: postgres-list-locks - source: postgres-source - description: "Lists active locks with associated process and query information." +kind: tools +name: list_locks +type: postgres-list-locks +source: postgres-source +description: "Lists active locks with associated process and query information." ``` Example response element (aggregated per process): diff --git a/docs/en/resources/tools/postgres/postgres-list-pg-settings.md b/docs/en/resources/tools/postgres/postgres-list-pg-settings.md index 23d5e28e92..895dbebf21 100644 --- a/docs/en/resources/tools/postgres/postgres-list-pg-settings.md +++ b/docs/en/resources/tools/postgres/postgres-list-pg-settings.md @@ -25,16 +25,16 @@ takes the following input parameters: ## Example ```yaml -tools: - list_indexes: - kind: postgres-list-pg-settings - source: postgres-source - description: | - Lists configuration parameters for the postgres server ordered lexicographically, - with a default limit of 50 rows. It returns the parameter name, its current setting, - unit of measurement, a short description, the source of the current setting (e.g., - default, configuration file, session), and whether a restart is required when the - parameter value is changed." +kind: tools +name: list_indexes +type: postgres-list-pg-settings +source: postgres-source +description: | + Lists configuration parameters for the postgres server ordered lexicographically, + with a default limit of 50 rows. It returns the parameter name, its current setting, + unit of measurement, a short description, the source of the current setting (e.g., + default, configuration file, session), and whether a restart is required when the + parameter value is changed." ``` The response is a json array with the following elements: @@ -54,6 +54,6 @@ The response is a json array with the following elements: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------| -| kind | string | true | Must be "postgres-list-pg-settings". | +| type | string | true | Must be "postgres-list-pg-settings". | | source | string | true | Name of the source the SQL should execute on. | | description | string | false | Description of the tool that is passed to the agent. | diff --git a/docs/en/resources/tools/postgres/postgres-list-publication-tables.md b/docs/en/resources/tools/postgres/postgres-list-publication-tables.md index a437d11783..981402aa15 100644 --- a/docs/en/resources/tools/postgres/postgres-list-publication-tables.md +++ b/docs/en/resources/tools/postgres/postgres-list-publication-tables.md @@ -28,18 +28,18 @@ of tables) as part of the logical replication feature. The tool takes the follow ## Example ```yaml -tools: - list_indexes: - kind: postgres-list-publication-tables - source: postgres-source - description: | - Lists all tables that are explicitly part of a publication in the database. - Tables that are part of a publication via 'FOR ALL TABLES' are not included, - unless they are also explicitly added to the publication. - Returns the publication name, schema name, and table name, along with - definition details indicating if it publishes all tables, whether it - replicates inserts, updates, deletes, or truncates, and the publication - owner. +kind: tools +name: list_indexes +type: postgres-list-publication-tables +source: postgres-source +description: | + Lists all tables that are explicitly part of a publication in the database. + Tables that are part of a publication via 'FOR ALL TABLES' are not included, + unless they are also explicitly added to the publication. + Returns the publication name, schema name, and table name, along with + definition details indicating if it publishes all tables, whether it + replicates inserts, updates, deletes, or truncates, and the publication + owner. ``` The response is a JSON array with the following elements: @@ -61,6 +61,6 @@ The response is a JSON array with the following elements: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------| -| kind | string | true | Must be "postgres-list-publication-tables". | +| type | string | true | Must be "postgres-list-publication-tables". | | source | string | true | Name of the source the SQL should execute on. | | description | string | false | Description of the tool that is passed to the agent. | diff --git a/docs/en/resources/tools/postgres/postgres-list-query-stats.md b/docs/en/resources/tools/postgres/postgres-list-query-stats.md index d6bfc9cb57..b5120cf357 100644 --- a/docs/en/resources/tools/postgres/postgres-list-query-stats.md +++ b/docs/en/resources/tools/postgres/postgres-list-query-stats.md @@ -29,11 +29,11 @@ total execution time in descending order. The tool takes the following input par ## Example ```yaml -tools: - list_query_stats: - kind: postgres-list-query-stats - source: postgres-source - description: List query statistics from pg_stat_statements, showing performance metrics for queries including execution counts, timing information, and resource usage. Results are ordered by total execution time descending. +kind: tools +name: list_query_stats +type: postgres-list-query-stats +source: postgres-source +description: List query statistics from pg_stat_statements, showing performance metrics for queries including execution counts, timing information, and resource usage. Results are ordered by total execution time descending. ``` The response is a json array with the following elements: @@ -66,6 +66,6 @@ identifying slow queries and understanding query performance patterns. | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------| -| kind | string | true | Must be "postgres-list-query-stats". | +| type | string | true | Must be "postgres-list-query-stats". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/postgres/postgres-list-roles.md b/docs/en/resources/tools/postgres/postgres-list-roles.md index d3de6abdfb..abfaefc84e 100644 --- a/docs/en/resources/tools/postgres/postgres-list-roles.md +++ b/docs/en/resources/tools/postgres/postgres-list-roles.md @@ -26,19 +26,19 @@ takes the following input parameters: ## Example ```yaml -tools: - list_indexes: - kind: postgres-list-roles - source: postgres-source - description: | - Lists all the user-created roles in the instance . It returns the role name, - Object ID, the maximum number of concurrent connections the role can make, - along with boolean indicators for: superuser status, privilege inheritance - from member roles, ability to create roles, ability to create databases, - ability to log in, replication privilege, and the ability to bypass - row-level security, the password expiration timestamp, a list of direct - members belonging to this role, and a list of other roles/groups that this - role is a member of. +kind: tools +name: list_indexes +type: postgres-list-roles +source: postgres-source +description: | + Lists all the user-created roles in the instance . It returns the role name, + Object ID, the maximum number of concurrent connections the role can make, + along with boolean indicators for: superuser status, privilege inheritance + from member roles, ability to create roles, ability to create databases, + ability to log in, replication privilege, and the ability to bypass + row-level security, the password expiration timestamp, a list of direct + members belonging to this role, and a list of other roles/groups that this + role is a member of. ``` The response is a json array with the following elements: @@ -65,6 +65,6 @@ The response is a json array with the following elements: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------| -| kind | string | true | Must be "postgres-list-roles". | +| type | string | true | Must be "postgres-list-roles". | | source | string | true | Name of the source the SQL should execute on. | | description | string | false | Description of the tool that is passed to the agent. | diff --git a/docs/en/resources/tools/postgres/postgres-list-schemas.md b/docs/en/resources/tools/postgres/postgres-list-schemas.md index 6c0eb9e82a..5933085fe1 100644 --- a/docs/en/resources/tools/postgres/postgres-list-schemas.md +++ b/docs/en/resources/tools/postgres/postgres-list-schemas.md @@ -28,11 +28,11 @@ tool takes the following input parameters: ## Example ```yaml -tools: - list_schemas: - kind: postgres-list-schemas - source: postgres-source - description: "Lists all schemas in the database ordered by schema name and excluding system and temporary schemas. It returns the schema name, schema owner, grants, number of functions, number of tables and number of views within each schema." +kind: tools +name: list_schemas +type: postgres-list-schemas +source: postgres-source +description: "Lists all schemas in the database ordered by schema name and excluding system and temporary schemas. It returns the schema name, schema owner, grants, number of functions, number of tables and number of views within each schema." ``` The response is a json array with the following elements: @@ -52,6 +52,6 @@ The response is a json array with the following elements: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "postgres-list-schemas". | +| type | string | true | Must be "postgres-list-schemas". | | source | string | true | Name of the source the SQL should execute on. | | description | string | false | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/postgres/postgres-list-sequences.md b/docs/en/resources/tools/postgres/postgres-list-sequences.md index e51915525a..3f5efe853b 100644 --- a/docs/en/resources/tools/postgres/postgres-list-sequences.md +++ b/docs/en/resources/tools/postgres/postgres-list-sequences.md @@ -29,16 +29,16 @@ The tool takes the following input parameters: ## Example ```yaml -tools: - list_indexes: - kind: postgres-list-sequences - source: postgres-source - description: | - Lists all the sequences in the database ordered by sequence name. - Returns sequence name, schema name, sequence owner, data type of the - sequence, starting value, minimum value, maximum value of the sequence, - the value by which the sequence is incremented, and the last value - generated by generated by the sequence in the current session. +kind: tools +name: list_indexes +type: postgres-list-sequences +source: postgres-source +description: | + Lists all the sequences in the database ordered by sequence name. + Returns sequence name, schema name, sequence owner, data type of the + sequence, starting value, minimum value, maximum value of the sequence, + the value by which the sequence is incremented, and the last value + generated by generated by the sequence in the current session. ``` The response is a json array with the following elements: @@ -61,6 +61,6 @@ The response is a json array with the following elements: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------| -| kind | string | true | Must be "postgres-list-sequences". | +| type | string | true | Must be "postgres-list-sequences". | | source | string | true | Name of the source the SQL should execute on. | | description | string | false | Description of the tool that is passed to the agent. | diff --git a/docs/en/resources/tools/postgres/postgres-list-stored-procedure.md b/docs/en/resources/tools/postgres/postgres-list-stored-procedure.md index ec03365e55..cd6a86aa1f 100644 --- a/docs/en/resources/tools/postgres/postgres-list-stored-procedure.md +++ b/docs/en/resources/tools/postgres/postgres-list-stored-procedure.md @@ -31,11 +31,11 @@ The tool returns a JSON array where each element represents a stored procedure w ## Example ```yaml -tools: - list_stored_procedure: - kind: postgres-list-stored-procedure - source: postgres-source - description: "Retrieves stored procedure metadata including definitions and owners." +kind: tools +name: list_stored_procedure +type: postgres-list-stored-procedure +source: postgres-source +description: "Retrieves stored procedure metadata including definitions and owners." ``` ### Example Requests diff --git a/docs/en/resources/tools/postgres/postgres-list-table-stats.md b/docs/en/resources/tools/postgres/postgres-list-table-stats.md index 666a126aca..cecc8b81d8 100644 --- a/docs/en/resources/tools/postgres/postgres-list-table-stats.md +++ b/docs/en/resources/tools/postgres/postgres-list-table-stats.md @@ -23,11 +23,11 @@ The tool returns a JSON array where each element represents statistics for a tab ## Example ```yaml -tools: - list_table_stats: - kind: postgres-list-table-stats - source: postgres-source - description: "Lists table statistics including size, scans, and bloat metrics." +kind: tools +name: list_table_stats +type: postgres-list-table-stats +source: postgres-source +description: "Lists table statistics including size, scans, and bloat metrics." ``` ### Example Requests diff --git a/docs/en/resources/tools/postgres/postgres-list-tables.md b/docs/en/resources/tools/postgres/postgres-list-tables.md index f40076bd49..04f56cbfaf 100644 --- a/docs/en/resources/tools/postgres/postgres-list-tables.md +++ b/docs/en/resources/tools/postgres/postgres-list-tables.md @@ -31,18 +31,18 @@ constraints, indexes, triggers, owner, comment) as JSON for user-created tables ## Example ```yaml -tools: - postgres_list_tables: - kind: postgres-list-tables - source: postgres-source - description: Use this tool to retrieve schema information for all or - specified tables. Output format can be simple (only table names) or detailed. +kind: tools +name: postgres_list_tables +type: postgres-list-tables +source: postgres-source +description: Use this tool to retrieve schema information for all or + specified tables. Output format can be simple (only table names) or detailed. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------| -| kind | string | true | Must be "postgres-list-tables". | +| type | string | true | Must be "postgres-list-tables". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the agent. | diff --git a/docs/en/resources/tools/postgres/postgres-list-tablespaces.md b/docs/en/resources/tools/postgres/postgres-list-tablespaces.md index bf63f61b8f..5e78bf5cbe 100644 --- a/docs/en/resources/tools/postgres/postgres-list-tablespaces.md +++ b/docs/en/resources/tools/postgres/postgres-list-tablespaces.md @@ -24,15 +24,15 @@ The `postgres-list-tablespaces` tool lists available tablespaces in the database ## Example ```yaml -tools: - list_tablespaces: - kind: postgres-list-tablespaces - source: postgres-source - description: | - Lists all tablespaces in the database. Returns the tablespace name, - owner name, size in bytes(if the current user has CREATE privileges on - the tablespace, otherwise NULL), internal object ID, the access control - list regarding permissions, and any specific tablespace options. +kind: tools +name: list_tablespaces +type: postgres-list-tablespaces +source: postgres-source +description: | + Lists all tablespaces in the database. Returns the tablespace name, + owner name, size in bytes(if the current user has CREATE privileges on + the tablespace, otherwise NULL), internal object ID, the access control + list regarding permissions, and any specific tablespace options. ``` The response is a json array with the following elements: @@ -51,6 +51,6 @@ The response is a json array with the following elements: | **field** | **type** | **required** | **description** | |-------------|:--------:|:-------------:|------------------------------------------------------| -| kind | string | true | Must be "postgres-list-tablespaces". | +| type | string | true | Must be "postgres-list-tablespaces". | | source | string | true | Name of the source the SQL should execute on. | | description | string | false | Description of the tool that is passed to the agent. | diff --git a/docs/en/resources/tools/postgres/postgres-list-triggers.md b/docs/en/resources/tools/postgres/postgres-list-triggers.md index b270894001..5c93e6fc50 100644 --- a/docs/en/resources/tools/postgres/postgres-list-triggers.md +++ b/docs/en/resources/tools/postgres/postgres-list-triggers.md @@ -31,13 +31,12 @@ tool takes the following input parameters: ## Example ```yaml -```yaml -tools: - list_triggers: - kind: postgres-list-triggers - source: postgres-source - description: | - Lists all non-internal triggers in a database. Returns trigger name, schema name, table name, wether its enabled or disabled, timing (e.g BEFORE/AFTER of the event), the events that cause the trigger to fire such as INSERT, UPDATE, or DELETE, whether the trigger activates per ROW or per STATEMENT, the handler function executed by the trigger and full definition. +kind: tools +name: list_triggers +type: postgres-list-triggers +source: postgres-source +description: | + Lists all non-internal triggers in a database. Returns trigger name, schema name, table name, wether its enabled or disabled, timing (e.g BEFORE/AFTER of the event), the events that cause the trigger to fire such as INSERT, UPDATE, or DELETE, whether the trigger activates per ROW or per STATEMENT, the handler function executed by the trigger and full definition. ``` The response is a json array with the following elements: @@ -60,6 +59,6 @@ The response is a json array with the following elements: | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------| -| kind | string | true | Must be "postgres-list-triggers". | +| type | string | true | Must be "postgres-list-triggers". | | source | string | true | Name of the source the SQL should execute on. | | description | string | false | Description of the tool that is passed to the agent. | diff --git a/docs/en/resources/tools/postgres/postgres-list-views.md b/docs/en/resources/tools/postgres/postgres-list-views.md index b6f1687f33..a99b092276 100644 --- a/docs/en/resources/tools/postgres/postgres-list-views.md +++ b/docs/en/resources/tools/postgres/postgres-list-views.md @@ -29,16 +29,16 @@ parameters: ## Example ```yaml -tools: - list_views: - kind: postgres-list-views - source: cloudsql-pg-source +kind: tools +name: list_views +type: postgres-list-views +source: cloudsql-pg-source ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------| -| kind | string | true | Must be "postgres-list-views". | +| type | string | true | Must be "postgres-list-views". | | source | string | true | Name of the source the SQL should execute on. | | description | string | false | Description of the tool that is passed to the agent. | diff --git a/docs/en/resources/tools/postgres/postgres-long-running-transactions.md b/docs/en/resources/tools/postgres/postgres-long-running-transactions.md index 0380ca7c21..fc264bb6ee 100644 --- a/docs/en/resources/tools/postgres/postgres-long-running-transactions.md +++ b/docs/en/resources/tools/postgres/postgres-long-running-transactions.md @@ -60,11 +60,11 @@ LIMIT ## Example ```yaml -tools: - long_running_transactions: - kind: postgres-long-running-transactions - source: postgres-source - description: "Identifies transactions open longer than a threshold and returns details including query text and durations." +kind: tools +name: long_running_transactions +type: postgres-long-running-transactions +source: postgres-source +description: "Identifies transactions open longer than a threshold and returns details including query text and durations." ``` Example response element: diff --git a/docs/en/resources/tools/postgres/postgres-replication-stats.md b/docs/en/resources/tools/postgres/postgres-replication-stats.md index c9dfdc81cb..b257753955 100644 --- a/docs/en/resources/tools/postgres/postgres-replication-stats.md +++ b/docs/en/resources/tools/postgres/postgres-replication-stats.md @@ -23,11 +23,11 @@ This tool takes no parameters. It returns a JSON array; each element represents ## Example ```yaml -tools: - replication_stats: - kind: postgres-replication-stats - source: postgres-source - description: "Lists replication connections and readable WAL lag metrics." +kind: tools +name: replication_stats +type: postgres-replication-stats +source: postgres-source +description: "Lists replication connections and readable WAL lag metrics." ``` Example response element: diff --git a/docs/en/resources/tools/postgres/postgres-sql.md b/docs/en/resources/tools/postgres/postgres-sql.md index 190f9478e3..067386b957 100644 --- a/docs/en/resources/tools/postgres/postgres-sql.md +++ b/docs/en/resources/tools/postgres/postgres-sql.md @@ -34,41 +34,41 @@ of the prepared statement. > names, or other parts of the query. ```yaml -tools: - search_flights_by_number: - kind: postgres-sql - source: my-pg-instance - statement: | - SELECT * FROM flights - WHERE airline = $1 - AND flight_number = $2 - LIMIT 10 - description: | - Use this tool to get information for a specific flight. - Takes an airline code and flight number and returns info on the flight. - Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. - A airline code is a code for an airline service consisting of two-character - airline designator and followed by flight number, which is 1 to 4 digit number. - For example, if given CY 0123, the airline is "CY", and flight_number is "123". - Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". - If the tool returns more than one option choose the date closes to today. - Example: - {{ - "airline": "CY", - "flight_number": "888", - }} - Example: - {{ - "airline": "DL", - "flight_number": "1234", - }} - parameters: - - name: airline - type: string - description: Airline unique 2 letter identifier - - name: flight_number - type: string - description: 1 to 4 digit number +kind: tools +name: search_flights_by_number +type: postgres-sql +source: my-pg-instance +statement: | + SELECT * FROM flights + WHERE airline = $1 + AND flight_number = $2 + LIMIT 10 +description: | + Use this tool to get information for a specific flight. + Takes an airline code and flight number and returns info on the flight. + Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. + A airline code is a code for an airline service consisting of two-character + airline designator and followed by flight number, which is 1 to 4 digit number. + For example, if given CY 0123, the airline is "CY", and flight_number is "123". + Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". + If the tool returns more than one option choose the date closes to today. + Example: + {{ + "airline": "CY", + "flight_number": "888", + }} + Example: + {{ + "airline": "DL", + "flight_number": "1234", + }} +parameters: + - name: airline + type: string + description: Airline unique 2 letter identifier + - name: flight_number + type: string + description: 1 to 4 digit number ``` ### Example with Template Parameters @@ -80,29 +80,29 @@ tools: > [templateParameters](..#template-parameters). ```yaml -tools: - list_table: - kind: postgres-sql - source: my-pg-instance - statement: | - SELECT * FROM {{.tableName}} - description: | - Use this tool to list all information from a specific table. - Example: - {{ - "tableName": "flights", - }} - templateParameters: - - name: tableName - type: string - description: Table to select from +kind: tools +name: list_table +type: postgres-sql +source: my-pg-instance +statement: | + SELECT * FROM {{.tableName}} +description: | + Use this tool to list all information from a specific table. + Example: + {{ + "tableName": "flights", + }} +templateParameters: + - name: tableName + type: string + description: Table to select from ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:--------------------------------------------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "postgres-sql". | +| type | string | true | Must be "postgres-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | SQL statement to execute on. | diff --git a/docs/en/resources/tools/redis/redis.md b/docs/en/resources/tools/redis/redis.md index 53dc344d6a..fade388416 100644 --- a/docs/en/resources/tools/redis/redis.md +++ b/docs/en/resources/tools/redis/redis.md @@ -42,21 +42,21 @@ to be executed after argument expansion will be `[SADD, userNames, Alice, Sid, B ## Example ```yaml -tools: - user_data_tool: - kind: redis - source: my-redis-instance - description: | - Use this tool to interact with user data stored in Redis. - It can set, retrieve, and delete user-specific information. - commands: - - [SADD, userNames, $userNames] # Array will be flattened into multiple arguments. - - [GET, $userId] - parameters: - - name: userId - type: string - description: The unique identifier for the user. - - name: userNames - type: array - description: The user names to be set. +kind: tools +name: user_data_tool +type: redis +source: my-redis-instance +description: | + Use this tool to interact with user data stored in Redis. + It can set, retrieve, and delete user-specific information. +commands: + - [SADD, userNames, $userNames] # Array will be flattened into multiple arguments. + - [GET, $userId] +parameters: + - name: userId + type: string + description: The unique identifier for the user. + - name: userNames + type: array + description: The user names to be set. ``` diff --git a/docs/en/resources/tools/serverless-spark/serverless-spark-cancel-batch.md b/docs/en/resources/tools/serverless-spark/serverless-spark-cancel-batch.md index 7af6f05ed4..61fa321170 100644 --- a/docs/en/resources/tools/serverless-spark/serverless-spark-cancel-batch.md +++ b/docs/en/resources/tools/serverless-spark/serverless-spark-cancel-batch.md @@ -30,11 +30,11 @@ The tool inherits the `project` and `location` from the source configuration. ## Example ```yaml -tools: - cancel_spark_batch: - kind: serverless-spark-cancel-batch - source: my-serverless-spark-source - description: Use this tool to cancel a running serverless spark batch operation. +kind: tools +name: cancel_spark_batch +type: serverless-spark-cancel-batch +source: my-serverless-spark-source +description: Use this tool to cancel a running serverless spark batch operation. ``` ## Response Format @@ -47,7 +47,7 @@ tools: | **field** | **type** | **required** | **description** | | ------------ | :------: | :----------: | -------------------------------------------------- | -| kind | string | true | Must be "serverless-spark-cancel-batch". | +| type | string | true | Must be "serverless-spark-cancel-batch". | | source | string | true | Name of the source the tool should use. | | description | string | true | Description of the tool that is passed to the LLM. | | authRequired | string[] | false | List of auth services required to invoke this tool | diff --git a/docs/en/resources/tools/serverless-spark/serverless-spark-create-pyspark-batch.md b/docs/en/resources/tools/serverless-spark/serverless-spark-create-pyspark-batch.md index b94d386b2d..e0d2a5c097 100644 --- a/docs/en/resources/tools/serverless-spark/serverless-spark-create-pyspark-batch.md +++ b/docs/en/resources/tools/serverless-spark/serverless-spark-create-pyspark-batch.md @@ -43,16 +43,16 @@ prebuilt config. ### Example `tools.yaml` ```yaml -tools: - - name: "serverless-spark-create-pyspark-batch" - kind: "serverless-spark-create-pyspark-batch" - source: "my-serverless-spark-source" - runtimeConfig: - properties: - spark.driver.memory: "1024m" - environmentConfig: - executionConfig: - networkUri: "my-network" +kind: tools +name: serverless-spark-create-pyspark-batch +type: serverless-spark-create-pyspark-batch +source: "my-serverless-spark-source" +runtimeConfig: + properties: + spark.driver.memory: "1024m" +environmentConfig: + executionConfig: + networkUri: "my-network" ``` ## Response Format @@ -89,7 +89,7 @@ detailed information. | **field** | **type** | **required** | **description** | | ----------------- | :------: | :----------: | -------------------------------------------------------------------------------------------------------------------------------------------------------- | -| kind | string | true | Must be "serverless-spark-create-pyspark-batch". | +| type | string | true | Must be "serverless-spark-create-pyspark-batch". | | source | string | true | Name of the source the tool should use. | | description | string | false | Description of the tool that is passed to the LLM. | | runtimeConfig | map | false | [Runtime config](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/RuntimeConfig) for all batches created with this tool. | diff --git a/docs/en/resources/tools/serverless-spark/serverless-spark-create-spark-batch.md b/docs/en/resources/tools/serverless-spark/serverless-spark-create-spark-batch.md index 8264be00b0..46a0b87292 100644 --- a/docs/en/resources/tools/serverless-spark/serverless-spark-create-spark-batch.md +++ b/docs/en/resources/tools/serverless-spark/serverless-spark-create-spark-batch.md @@ -48,16 +48,16 @@ prebuilt config. ### Example `tools.yaml` ```yaml -tools: - - name: "serverless-spark-create-spark-batch" - kind: "serverless-spark-create-spark-batch" - source: "my-serverless-spark-source" - runtimeConfig: - properties: - spark.driver.memory: "1024m" - environmentConfig: - executionConfig: - networkUri: "my-network" +kind: tools +name: "serverless-spark-create-spark-batch" +type: "serverless-spark-create-spark-batch" +source: "my-serverless-spark-source" +runtimeConfig: + properties: + spark.driver.memory: "1024m" +environmentConfig: + executionConfig: + networkUri: "my-network" ``` ## Response Format @@ -94,7 +94,7 @@ detailed information. | **field** | **type** | **required** | **description** | | ----------------- | :------: | :----------: | -------------------------------------------------------------------------------------------------------------------------------------------------------- | -| kind | string | true | Must be "serverless-spark-create-spark-batch". | +| type | string | true | Must be "serverless-spark-create-spark-batch". | | source | string | true | Name of the source the tool should use. | | description | string | false | Description of the tool that is passed to the LLM. | | runtimeConfig | map | false | [Runtime config](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/RuntimeConfig) for all batches created with this tool. | diff --git a/docs/en/resources/tools/serverless-spark/serverless-spark-get-batch.md b/docs/en/resources/tools/serverless-spark/serverless-spark-get-batch.md index 754aab9fd9..a43d6fdb04 100644 --- a/docs/en/resources/tools/serverless-spark/serverless-spark-get-batch.md +++ b/docs/en/resources/tools/serverless-spark/serverless-spark-get-batch.md @@ -25,11 +25,11 @@ The tool gets the `project` and `location` from the source configuration. ## Example ```yaml -tools: - get_my_batch: - kind: serverless-spark-get-batch - source: my-serverless-spark-source - description: Use this tool to get a serverless spark batch. +kind: tools +name: get_my_batch +type: serverless-spark-get-batch +source: my-serverless-spark-source +description: Use this tool to get a serverless spark batch. ``` ## Response Format @@ -85,7 +85,7 @@ detailed information. | **field** | **type** | **required** | **description** | | ------------ | :------: | :----------: | -------------------------------------------------- | -| kind | string | true | Must be "serverless-spark-get-batch". | +| type | string | true | Must be "serverless-spark-get-batch". | | source | string | true | Name of the source the tool should use. | | description | string | true | Description of the tool that is passed to the LLM. | | authRequired | string[] | false | List of auth services required to invoke this tool | diff --git a/docs/en/resources/tools/serverless-spark/serverless-spark-list-batches.md b/docs/en/resources/tools/serverless-spark/serverless-spark-list-batches.md index 9f0e5f0e7c..7a74006084 100644 --- a/docs/en/resources/tools/serverless-spark/serverless-spark-list-batches.md +++ b/docs/en/resources/tools/serverless-spark/serverless-spark-list-batches.md @@ -33,11 +33,11 @@ The tool gets the `project` and `location` from the source configuration. ## Example ```yaml -tools: - list_spark_batches: - kind: serverless-spark-list-batches - source: my-serverless-spark-source - description: Use this tool to list and filter serverless spark batches. +kind: tools +name: list_spark_batches +type: serverless-spark-list-batches +source: my-serverless-spark-source +description: Use this tool to list and filter serverless spark batches. ``` ## Response Format @@ -72,7 +72,7 @@ tools: | **field** | **type** | **required** | **description** | | ------------ | :------: | :----------: | -------------------------------------------------- | -| kind | string | true | Must be "serverless-spark-list-batches". | +| type | string | true | Must be "serverless-spark-list-batches". | | source | string | true | Name of the source the tool should use. | | description | string | true | Description of the tool that is passed to the LLM. | | authRequired | string[] | false | List of auth services required to invoke this tool | diff --git a/docs/en/resources/tools/singlestore/singlestore-execute-sql.md b/docs/en/resources/tools/singlestore/singlestore-execute-sql.md index c4c007eaf1..fc2e9fbf55 100644 --- a/docs/en/resources/tools/singlestore/singlestore-execute-sql.md +++ b/docs/en/resources/tools/singlestore/singlestore-execute-sql.md @@ -25,17 +25,17 @@ statement against the `source`. ## Example ```yaml -tools: - execute_sql_tool: - kind: singlestore-execute-sql - source: my-s2-instance - description: Use this tool to execute sql statement +kind: tools +name: execute_sql_tool +type: singlestore-execute-sql +source: my-s2-instance +description: Use this tool to execute sql statement ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "singlestore-execute-sql". | +| type | string | true | Must be "singlestore-execute-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/singlestore/singlestore-sql.md b/docs/en/resources/tools/singlestore/singlestore-sql.md index c745e2d9f3..885fbc959f 100644 --- a/docs/en/resources/tools/singlestore/singlestore-sql.md +++ b/docs/en/resources/tools/singlestore/singlestore-sql.md @@ -27,41 +27,41 @@ form of placeholders `?`. > names, or other parts of the query. ```yaml -tools: - search_flights_by_number: - kind: singlestore-sql - source: my-s2-instance - statement: | - SELECT * FROM flights - WHERE airline = ? - AND flight_number = ? - LIMIT 10 - description: | - Use this tool to get information for a specific flight. - Takes an airline code and flight number and returns info on the flight. - Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. - A airline code is a code for an airline service consisting of two-character - airline designator and followed by flight number, which is 1 to 4 digit number. - For example, if given CY 0123, the airline is "CY", and flight_number is "123". - Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". - If the tool returns more than one option choose the date closes to today. - Example: - {{ - "airline": "CY", - "flight_number": "888", - }} - Example: - {{ - "airline": "DL", - "flight_number": "1234", - }} - parameters: - - name: airline - type: string - description: Airline unique 2 letter identifier - - name: flight_number - type: string - description: 1 to 4 digit number +kind: tools +name: search_flights_by_number +type: singlestore-sql +source: my-s2-instance +statement: | + SELECT * FROM flights + WHERE airline = ? + AND flight_number = ? + LIMIT 10 +description: | + Use this tool to get information for a specific flight. + Takes an airline code and flight number and returns info on the flight. + Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. + A airline code is a code for an airline service consisting of two-character + airline designator and followed by flight number, which is 1 to 4 digit number. + For example, if given CY 0123, the airline is "CY", and flight_number is "123". + Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". + If the tool returns more than one option choose the date closes to today. + Example: + {{ + "airline": "CY", + "flight_number": "888", + }} + Example: + {{ + "airline": "DL", + "flight_number": "1234", + }} +parameters: + - name: airline + type: string + description: Airline unique 2 letter identifier + - name: flight_number + type: string + description: 1 to 4 digit number ``` ### Example with Template Parameters @@ -73,29 +73,29 @@ tools: > [templateParameters](..#template-parameters). ```yaml -tools: - list_table: - kind: singlestore-sql - source: my-s2-instance - statement: | - SELECT * FROM {{.tableName}}; - description: | - Use this tool to list all information from a specific table. - Example: - {{ - "tableName": "flights", - }} - templateParameters: - - name: tableName - type: string - description: Table to select from +kind: tools +name: list_table +type: singlestore-sql +source: my-s2-instance +statement: | + SELECT * FROM {{.tableName}}; +description: | + Use this tool to list all information from a specific table. + Example: + {{ + "tableName": "flights", + }} +templateParameters: + - name: tableName + type: string + description: Table to select from ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:--------------------------------------------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "singlestore-sql". | +| type | string | true | Must be "singlestore-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | SQL statement to execute on. | diff --git a/docs/en/resources/tools/snowflake/snowflake-execute-sql.md b/docs/en/resources/tools/snowflake/snowflake-execute-sql.md index 1d6af931bd..b5af0fe63f 100644 --- a/docs/en/resources/tools/snowflake/snowflake-execute-sql.md +++ b/docs/en/resources/tools/snowflake/snowflake-execute-sql.md @@ -23,18 +23,18 @@ statement against the `source`. ## Example ```yaml -tools: - execute_sql_tool: - kind: snowflake-execute-sql - source: my-snowflake-instance - description: Use this tool to execute sql statement. +kind: tools +name: execute_sql_tool +type: snowflake-execute-sql +source: my-snowflake-instance +description: Use this tool to execute sql statement. ``` ## Reference | **field** | **type** | **required** | **description** | |--------------|:-------------:|:------------:|-----------------------------------------------------------| -| kind | string | true | Must be "snowflake-execute-sql". | +| type | string | true | Must be "snowflake-execute-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | authRequired | array[string] | false | List of auth services that are required to use this tool. | diff --git a/docs/en/resources/tools/snowflake/snowflake-sql.md b/docs/en/resources/tools/snowflake/snowflake-sql.md index d55241e32f..b66041cac5 100644 --- a/docs/en/resources/tools/snowflake/snowflake-sql.md +++ b/docs/en/resources/tools/snowflake/snowflake-sql.md @@ -26,41 +26,41 @@ first parameter specified, `:2` will be the second parameter, and so on. ## Example ```yaml -tools: - search_flights_by_number: - kind: snowflake-sql - source: my-snowflake-instance - statement: | - SELECT * FROM flights - WHERE airline = :1 - AND flight_number = :2 - LIMIT 10 - description: | - Use this tool to get information for a specific flight. - Takes an airline code and flight number and returns info on the flight. - Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. - A airline code is a code for an airline service consisting of two-character - airline designator and followed by flight number, which is 1 to 4 digit number. - For example, if given CY 0123, the airline is "CY", and flight_number is "123". - Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". - If the tool returns more than one option choose the date closes to today. - Example: - {{ - "airline": "CY", - "flight_number": "888", - }} - Example: - {{ - "airline": "DL", - "flight_number": "1234", - }} - parameters: - - name: airline - type: string - description: Airline unique 2 letter identifier - - name: flight_number - type: string - description: 1 to 4 digit number +kind: tools +name: search_flights_by_number +type: snowflake-sql +source: my-snowflake-instance +statement: | + SELECT * FROM flights + WHERE airline = :1 + AND flight_number = :2 + LIMIT 10 +description: | + Use this tool to get information for a specific flight. + Takes an airline code and flight number and returns info on the flight. + Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. + A airline code is a code for an airline service consisting of two-character + airline designator and followed by flight number, which is 1 to 4 digit number. + For example, if given CY 0123, the airline is "CY", and flight_number is "123". + Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". + If the tool returns more than one option choose the date closes to today. + Example: + {{ + "airline": "CY", + "flight_number": "888", + }} + Example: + {{ + "airline": "DL", + "flight_number": "1234", + }} +parameters: + - name: airline + type: string + description: Airline unique 2 letter identifier + - name: flight_number + type: string + description: 1 to 4 digit number ``` ### Example with Template Parameters @@ -72,29 +72,29 @@ tools: > [templateParameters](..#template-parameters). ```yaml -tools: - list_table: - kind: snowflake - source: my-snowflake-instance - statement: | - SELECT * FROM {{.tableName}}; - description: | - Use this tool to list all information from a specific table. - Example: - {{ - "tableName": "flights", - }} - templateParameters: - - name: tableName - type: string - description: Table to select from +kind: tools +name: list_table +type: snowflake +source: my-snowflake-instance +statement: | + SELECT * FROM {{.tableName}}; +description: | + Use this tool to list all information from a specific table. + Example: + {{ + "tableName": "flights", + }} +templateParameters: + - name: tableName + type: string + description: Table to select from ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:--------------------------------------------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "snowflake-sql". | +| type | string | true | Must be "snowflake-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | SQL statement to execute on. | diff --git a/docs/en/resources/tools/spanner/spanner-execute-sql.md b/docs/en/resources/tools/spanner/spanner-execute-sql.md index b04e90e34e..b9e5c6c255 100644 --- a/docs/en/resources/tools/spanner/spanner-execute-sql.md +++ b/docs/en/resources/tools/spanner/spanner-execute-sql.md @@ -25,18 +25,18 @@ statement against the `source`. ## Example ```yaml -tools: - execute_sql_tool: - kind: spanner-execute-sql - source: my-spanner-instance - description: Use this tool to execute sql statement. +kind: tools +name: execute_sql_tool +type: spanner-execute-sql +source: my-spanner-instance +description: Use this tool to execute sql statement. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|------------------------------------------------------------------------------------------| -| kind | string | true | Must be "spanner-execute-sql". | +| type | string | true | Must be "spanner-execute-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | readOnly | bool | false | When set to `true`, the `statement` is run as a read-only transaction. Default: `false`. | diff --git a/docs/en/resources/tools/spanner/spanner-list-graphs.md b/docs/en/resources/tools/spanner/spanner-list-graphs.md index e846fdfe77..7d8ae63fbc 100644 --- a/docs/en/resources/tools/spanner/spanner-list-graphs.md +++ b/docs/en/resources/tools/spanner/spanner-list-graphs.md @@ -35,35 +35,35 @@ source dialect, as Spanner Graph isn't available in the PostgreSQL dialect. ### Basic Usage - List All Graphs ```yaml -sources: - my-spanner-db: - kind: spanner - project: ${SPANNER_PROJECT} - instance: ${SPANNER_INSTANCE} - database: ${SPANNER_DATABASE} - dialect: googlesql # wont work for postgresql - -tools: - list_all_graphs: - kind: spanner-list-graphs - source: my-spanner-db - description: Lists all graphs with their complete schema information +kind: sources +name: my-spanner-db +type: spanner +project: ${SPANNER_PROJECT} +instance: ${SPANNER_INSTANCE} +database: ${SPANNER_DATABASE} +dialect: googlesql # wont work for postgresql +--- +kind: tools +name: list_all_graphs +type: spanner-list-graphs +source: my-spanner-db +description: Lists all graphs with their complete schema information ``` ### List Specific Graphs ```yaml -tools: - list_specific_graphs: - kind: spanner-list-graphs - source: my-spanner-db - description: | - Lists schema information for specific graphs. - Example usage: - { - "graph_names": "FinGraph,SocialGraph", - "output_format": "detailed" - } +kind: tools +name: list_specific_graphs +type: spanner-list-graphs +source: my-spanner-db +description: | + Lists schema information for specific graphs. + Example usage: + { + "graph_names": "FinGraph,SocialGraph", + "output_format": "detailed" + } ``` ## Parameters @@ -235,36 +235,36 @@ comprehensive schema information: ## Example with Agent Integration ```yaml -sources: - spanner-db: - kind: spanner - project: my-project - instance: my-instance - database: my-database - dialect: googlesql - -tools: - schema_inspector: - kind: spanner-list-graphs - source: spanner-db - description: | - Use this tool to inspect database schema information. - You can: - - List all graphs by leaving graph_names empty - - Get specific graph schemas by providing comma-separated graph names - - Choose between simple (names only) or detailed (full schema) output - - Examples: - 1. List all graphs with details: {"output_format": "detailed"} - 2. Get specific graphs: {"graph_names": "FinGraph,SocialGraph", "output_format": "detailed"} - 3. Just get graph names: {"output_format": "simple"} +kind: sources +name: spanner-db +type: spanner +project: my-project +instance: my-instance +database: my-database +dialect: googlesql +--- +kind: tools +name: schema_inspector +type: spanner-list-graphs +source: spanner-db +description: | + Use this tool to inspect database schema information. + You can: + - List all graphs by leaving graph_names empty + - Get specific graph schemas by providing comma-separated graph names + - Choose between simple (names only) or detailed (full schema) output + + Examples: + 1. List all graphs with details: {"output_format": "detailed"} + 2. Get specific graphs: {"graph_names": "FinGraph,SocialGraph", "output_format": "detailed"} + 3. Just get graph names: {"output_format": "simple"} ``` ## Reference | **field** | **type** | **required** | **description** | |--------------|:--------:|:------------:|-----------------------------------------------------------------| -| kind | string | true | Must be "spanner-list-graphs" | +| type | string | true | Must be "spanner-list-graphs" | | source | string | true | Name of the Spanner source to query (dialect must be GoogleSQL) | | description | string | false | Description of the tool that is passed to the LLM | | authRequired | string[] | false | List of auth services required to invoke this tool | diff --git a/docs/en/resources/tools/spanner/spanner-list-tables.md b/docs/en/resources/tools/spanner/spanner-list-tables.md index 3a7f6ea48a..d1b196fa9c 100644 --- a/docs/en/resources/tools/spanner/spanner-list-tables.md +++ b/docs/en/resources/tools/spanner/spanner-list-tables.md @@ -36,35 +36,35 @@ syntax. ### Basic Usage - List All Tables ```yaml -sources: - my-spanner-db: - kind: spanner - project: ${SPANNER_PROJECT} - instance: ${SPANNER_INSTANCE} - database: ${SPANNER_DATABASE} - dialect: googlesql # or postgresql - -tools: - list_all_tables: - kind: spanner-list-tables - source: my-spanner-db - description: Lists all tables with their complete schema information +kind: sources +name: my-spanner-db +type: spanner +project: ${SPANNER_PROJECT} +instance: ${SPANNER_INSTANCE} +database: ${SPANNER_DATABASE} +dialect: googlesql # or postgresql +--- +kind: tools +name: list_all_tables +type: spanner-list-tables +source: my-spanner-db +description: Lists all tables with their complete schema information ``` ### List Specific Tables ```yaml -tools: - list_specific_tables: - kind: spanner-list-tables - source: my-spanner-db - description: | - Lists schema information for specific tables. - Example usage: - { - "table_names": "users,orders,products", - "output_format": "detailed" - } +kind: tools +name: list_specific_tables +type: spanner-list-tables +source: my-spanner-db +description: | + Lists schema information for specific tables. + Example usage: + { + "table_names": "users,orders,products", + "output_format": "detailed" + } ``` ## Parameters @@ -177,36 +177,36 @@ comprehensive schema information: ## Example with Agent Integration ```yaml -sources: - spanner-db: - kind: spanner - project: my-project - instance: my-instance - database: my-database - dialect: googlesql - -tools: - schema_inspector: - kind: spanner-list-tables - source: spanner-db - description: | - Use this tool to inspect database schema information. - You can: - - List all tables by leaving table_names empty - - Get specific table schemas by providing comma-separated table names - - Choose between simple (names only) or detailed (full schema) output - - Examples: - 1. List all tables with details: {"output_format": "detailed"} - 2. Get specific tables: {"table_names": "users,orders", "output_format": "detailed"} - 3. Just get table names: {"output_format": "simple"} +kind: sources +name: spanner-db +type: spanner +project: my-project +instance: my-instance +database: my-database +dialect: googlesql +--- +kind: tools +name: schema_inspector +type: spanner-list-tables +source: spanner-db +description: | + Use this tool to inspect database schema information. + You can: + - List all tables by leaving table_names empty + - Get specific table schemas by providing comma-separated table names + - Choose between simple (names only) or detailed (full schema) output + + Examples: + 1. List all tables with details: {"output_format": "detailed"} + 2. Get specific tables: {"table_names": "users,orders", "output_format": "detailed"} + 3. Just get table names: {"output_format": "simple"} ``` ## Reference | **field** | **type** | **required** | **description** | |--------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "spanner-list-tables" | +| type | string | true | Must be "spanner-list-tables" | | source | string | true | Name of the Spanner source to query | | description | string | false | Description of the tool that is passed to the LLM | | authRequired | string[] | false | List of auth services required to invoke this tool | diff --git a/docs/en/resources/tools/spanner/spanner-sql.md b/docs/en/resources/tools/spanner/spanner-sql.md index edf21254b4..221ecc7288 100644 --- a/docs/en/resources/tools/spanner/spanner-sql.md +++ b/docs/en/resources/tools/spanner/spanner-sql.md @@ -50,80 +50,80 @@ the second parameter, and so on. {{< tabpane persist="header" >}} {{< tab header="GoogleSQL" lang="yaml" >}} -tools: - search_flights_by_number: - kind: spanner-sql - source: my-spanner-instance - statement: | - SELECT * FROM flights - WHERE airline = @airline - AND flight_number = @flight_number - LIMIT 10 - description: | - Use this tool to get information for a specific flight. - Takes an airline code and flight number and returns info on the flight. - Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. - A airline code is a code for an airline service consisting of two-character - airline designator and followed by flight number, which is 1 to 4 digit number. - For example, if given CY 0123, the airline is "CY", and flight_number is "123". - Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". - If the tool returns more than one option choose the date closes to today. - Example: - {{ - "airline": "CY", - "flight_number": "888", - }} - Example: - {{ - "airline": "DL", - "flight_number": "1234", - }} - parameters: - - name: airline - type: string - description: Airline unique 2 letter identifier - - name: flight_number - type: string - description: 1 to 4 digit number +kind: tools +name: search_flights_by_number +type: spanner-sql +source: my-spanner-instance +statement: | + SELECT * FROM flights + WHERE airline = @airline + AND flight_number = @flight_number + LIMIT 10 +description: | + Use this tool to get information for a specific flight. + Takes an airline code and flight number and returns info on the flight. + Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. + A airline code is a code for an airline service consisting of two-character + airline designator and followed by flight number, which is 1 to 4 digit number. + For example, if given CY 0123, the airline is "CY", and flight_number is "123". + Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". + If the tool returns more than one option choose the date closes to today. + Example: + {{ + "airline": "CY", + "flight_number": "888", + }} + Example: + {{ + "airline": "DL", + "flight_number": "1234", + }} +parameters: + - name: airline + type: string + description: Airline unique 2 letter identifier + - name: flight_number + type: string + description: 1 to 4 digit number {{< /tab >}} {{< tab header="PostgreSQL" lang="yaml" >}} -tools: - search_flights_by_number: - kind: spanner - source: my-spanner-instance - statement: | - SELECT * FROM flights - WHERE airline = $1 - AND flight_number = $2 - LIMIT 10 - description: | - Use this tool to get information for a specific flight. - Takes an airline code and flight number and returns info on the flight. - Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. - A airline code is a code for an airline service consisting of two-character - airline designator and followed by flight number, which is 1 to 4 digit number. - For example, if given CY 0123, the airline is "CY", and flight_number is "123". - Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". - If the tool returns more than one option choose the date closes to today. - Example: - {{ - "airline": "CY", - "flight_number": "888", - }} - Example: - {{ - "airline": "DL", - "flight_number": "1234", - }} - parameters: - - name: airline - type: string - description: Airline unique 2 letter identifier - - name: flight_number - type: string - description: 1 to 4 digit number +kind: tools +name: search_flights_by_number +type: spanner +source: my-spanner-instance +statement: | + SELECT * FROM flights + WHERE airline = $1 + AND flight_number = $2 + LIMIT 10 +description: | + Use this tool to get information for a specific flight. + Takes an airline code and flight number and returns info on the flight. + Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. + A airline code is a code for an airline service consisting of two-character + airline designator and followed by flight number, which is 1 to 4 digit number. + For example, if given CY 0123, the airline is "CY", and flight_number is "123". + Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". + If the tool returns more than one option choose the date closes to today. + Example: + {{ + "airline": "CY", + "flight_number": "888", + }} + Example: + {{ + "airline": "DL", + "flight_number": "1234", + }} +parameters: + - name: airline + type: string + description: Airline unique 2 letter identifier + - name: flight_number + type: string + description: 1 to 4 digit number {{< /tab >}} {{< /tabpane >}} @@ -137,29 +137,29 @@ tools: > [templateParameters](..#template-parameters). ```yaml -tools: - list_table: - kind: spanner - source: my-spanner-instance - statement: | - SELECT * FROM {{.tableName}}; - description: | - Use this tool to list all information from a specific table. - Example: - {{ - "tableName": "flights", - }} - templateParameters: - - name: tableName - type: string - description: Table to select from +kind: tools +name: list_table +type: spanner +source: my-spanner-instance +statement: | + SELECT * FROM {{.tableName}}; +description: | + Use this tool to list all information from a specific table. + Example: + {{ + "tableName": "flights", + }} +templateParameters: + - name: tableName + type: string + description: Table to select from ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:--------------------------------------------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "spanner-sql". | +| type | string | true | Must be "spanner-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | SQL statement to execute on. | diff --git a/docs/en/resources/tools/sqlite/sqlite-execute-sql.md b/docs/en/resources/tools/sqlite/sqlite-execute-sql.md index 0360154a44..7e5d293641 100644 --- a/docs/en/resources/tools/sqlite/sqlite-execute-sql.md +++ b/docs/en/resources/tools/sqlite/sqlite-execute-sql.md @@ -25,17 +25,17 @@ This tool is designed for direct execution of SQL statements. It takes a single ## Example ```yaml -tools: - execute_sql_tool: - kind: sqlite-execute-sql - source: my-sqlite-db - description: Use this tool to execute a SQL statement. +kind: tools +name: execute_sql_tool +type: sqlite-execute-sql +source: my-sqlite-db +description: Use this tool to execute a SQL statement. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "sqlite-execute-sql". | +| type | string | true | Must be "sqlite-execute-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/sqlite/sqlite-sql.md b/docs/en/resources/tools/sqlite/sqlite-sql.md index ecc714dfb2..de5c9bc48b 100644 --- a/docs/en/resources/tools/sqlite/sqlite-sql.md +++ b/docs/en/resources/tools/sqlite/sqlite-sql.md @@ -30,19 +30,19 @@ DDL statements. > names, or other parts of the query. ```yaml -tools: - search-users: - kind: sqlite-sql - source: my-sqlite-db - description: Search users by name and age - parameters: - - name: name - type: string - description: The name to search for - - name: min_age - type: integer - description: Minimum age - statement: SELECT * FROM users WHERE name LIKE ? AND age >= ? +kind: tools +name: search-users +type: sqlite-sql +source: my-sqlite-db +description: Search users by name and age +parameters: + - name: name + type: string + description: The name to search for + - name: min_age + type: integer + description: Minimum age +statement: SELECT * FROM users WHERE name LIKE ? AND age >= ? ``` ### Example with Template Parameters @@ -54,29 +54,29 @@ tools: > [templateParameters](..#template-parameters). ```yaml -tools: - list_table: - kind: sqlite-sql - source: my-sqlite-db - statement: | - SELECT * FROM {{.tableName}}; - description: | - Use this tool to list all information from a specific table. - Example: - {{ - "tableName": "flights", - }} - templateParameters: - - name: tableName - type: string - description: Table to select from +kind: tools +name: list_table +type: sqlite-sql +source: my-sqlite-db +statement: | + SELECT * FROM {{.tableName}}; +description: | + Use this tool to list all information from a specific table. + Example: + {{ + "tableName": "flights", + }} +templateParameters: + - name: tableName + type: string + description: Table to select from ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:--------------------------------------------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "sqlite-sql". | +| type | string | true | Must be "sqlite-sql". | | source | string | true | Name of the source the SQLite source configuration. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | The SQL statement to execute. | diff --git a/docs/en/resources/tools/tidb/tidb-execute-sql.md b/docs/en/resources/tools/tidb/tidb-execute-sql.md index 8b1e68b8b2..fd36d7a845 100644 --- a/docs/en/resources/tools/tidb/tidb-execute-sql.md +++ b/docs/en/resources/tools/tidb/tidb-execute-sql.md @@ -25,17 +25,17 @@ statement against the `source`. ## Example ```yaml -tools: - execute_sql_tool: - kind: tidb-execute-sql - source: my-tidb-instance - description: Use this tool to execute sql statement. +kind: tools +name: execute_sql_tool +type: tidb-execute-sql +source: my-tidb-instance +description: Use this tool to execute sql statement. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------:|:------------:|----------------------------------------------------| -| kind | string | true | Must be "tidb-execute-sql". | +| type | string | true | Must be "tidb-execute-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/tidb/tidb-sql.md b/docs/en/resources/tools/tidb/tidb-sql.md index 3ec6caff45..8883d7ef42 100644 --- a/docs/en/resources/tools/tidb/tidb-sql.md +++ b/docs/en/resources/tools/tidb/tidb-sql.md @@ -29,41 +29,41 @@ and expects parameters in the SQL query to be in the form of placeholders `?`. > names, or other parts of the query. ```yaml -tools: - search_flights_by_number: - kind: tidb-sql - source: my-tidb-instance - statement: | - SELECT * FROM flights - WHERE airline = ? - AND flight_number = ? - LIMIT 10 - description: | - Use this tool to get information for a specific flight. - Takes an airline code and flight number and returns info on the flight. - Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. - A airline code is a code for an airline service consisting of two-character - airline designator and followed by flight number, which is 1 to 4 digit number. - For example, if given CY 0123, the airline is "CY", and flight_number is "123". - Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". - If the tool returns more than one option choose the date closes to today. - Example: - {{ - "airline": "CY", - "flight_number": "888", - }} - Example: - {{ - "airline": "DL", - "flight_number": "1234", - }} - parameters: - - name: airline - type: string - description: Airline unique 2 letter identifier - - name: flight_number - type: string - description: 1 to 4 digit number +kind: tools +name: search_flights_by_number +type: tidb-sql +source: my-tidb-instance +statement: | + SELECT * FROM flights + WHERE airline = ? + AND flight_number = ? + LIMIT 10 +description: | + Use this tool to get information for a specific flight. + Takes an airline code and flight number and returns info on the flight. + Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. + A airline code is a code for an airline service consisting of two-character + airline designator and followed by flight number, which is 1 to 4 digit number. + For example, if given CY 0123, the airline is "CY", and flight_number is "123". + Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". + If the tool returns more than one option choose the date closes to today. + Example: + {{ + "airline": "CY", + "flight_number": "888", + }} + Example: + {{ + "airline": "DL", + "flight_number": "1234", + }} +parameters: + - name: airline + type: string + description: Airline unique 2 letter identifier + - name: flight_number + type: string + description: 1 to 4 digit number ``` ### Example with Template Parameters @@ -75,29 +75,29 @@ tools: > [templateParameters](..#template-parameters). ```yaml -tools: - list_table: - kind: tidb-sql - source: my-tidb-instance - statement: | - SELECT * FROM {{.tableName}}; - description: | - Use this tool to list all information from a specific table. - Example: - {{ - "tableName": "flights", - }} - templateParameters: - - name: tableName - type: string - description: Table to select from +kind: tools +name: list_table +type: tidb-sql +source: my-tidb-instance +statement: | + SELECT * FROM {{.tableName}}; +description: | + Use this tool to list all information from a specific table. + Example: + {{ + "tableName": "flights", + }} +templateParameters: + - name: tableName + type: string + description: Table to select from ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:--------------------------------------------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "tidb-sql". | +| type | string | true | Must be "tidb-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | SQL statement to execute on. | diff --git a/docs/en/resources/tools/trino/trino-execute-sql.md b/docs/en/resources/tools/trino/trino-execute-sql.md index 3d18cd6ee8..3d07133e03 100644 --- a/docs/en/resources/tools/trino/trino-execute-sql.md +++ b/docs/en/resources/tools/trino/trino-execute-sql.md @@ -25,17 +25,17 @@ statement against the `source`. ## Example ```yaml -tools: - execute_sql_tool: - kind: trino-execute-sql - source: my-trino-instance - description: Use this tool to execute sql statement. +kind: tools +name: execute_sql_tool +type: trino-execute-sql +source: my-trino-instance +description: Use this tool to execute sql statement. ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "trino-execute-sql". | +| type | string | true | Must be "trino-execute-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | diff --git a/docs/en/resources/tools/trino/trino-sql.md b/docs/en/resources/tools/trino/trino-sql.md index 4b0f8b21d5..1ee22b4239 100644 --- a/docs/en/resources/tools/trino/trino-sql.md +++ b/docs/en/resources/tools/trino/trino-sql.md @@ -28,41 +28,41 @@ The specified SQL statement is executed as a [prepared statement][trino-prepare] > names, or other parts of the query. ```yaml -tools: - search_orders_by_region: - kind: trino-sql - source: my-trino-instance - statement: | - SELECT * FROM hive.sales.orders - WHERE region = ? - AND order_date >= DATE(?) - LIMIT 10 - description: | - Use this tool to get information for orders in a specific region. - Takes a region code and date and returns info on the orders. - Do NOT use this tool with an order id. Do NOT guess a region code or date. - A region code is a code for a geographic region consisting of two-character - region designator and followed by optional subregion. - For example, if given US-WEST, the region is "US-WEST". - Another example for this is EU-CENTRAL, the region is "EU-CENTRAL". - If the tool returns more than one option choose the date closest to today. - Example: - {{ - "region": "US-WEST", - "order_date": "2024-01-01", - }} - Example: - {{ - "region": "EU-CENTRAL", - "order_date": "2024-01-15", - }} - parameters: - - name: region - type: string - description: Region unique identifier - - name: order_date - type: string - description: Order date in YYYY-MM-DD format +kind: tools +name: search_orders_by_region +type: trino-sql +source: my-trino-instance +statement: | + SELECT * FROM hive.sales.orders + WHERE region = ? + AND order_date >= DATE(?) + LIMIT 10 +description: | + Use this tool to get information for orders in a specific region. + Takes a region code and date and returns info on the orders. + Do NOT use this tool with an order id. Do NOT guess a region code or date. + A region code is a code for a geographic region consisting of two-character + region designator and followed by optional subregion. + For example, if given US-WEST, the region is "US-WEST". + Another example for this is EU-CENTRAL, the region is "EU-CENTRAL". + If the tool returns more than one option choose the date closest to today. + Example: + {{ + "region": "US-WEST", + "order_date": "2024-01-01", + }} + Example: + {{ + "region": "EU-CENTRAL", + "order_date": "2024-01-15", + }} +parameters: + - name: region + type: string + description: Region unique identifier + - name: order_date + type: string + description: Order date in YYYY-MM-DD format ``` ### Example with Template Parameters @@ -74,29 +74,29 @@ tools: > [templateParameters](..#template-parameters). ```yaml -tools: - list_table: - kind: trino-sql - source: my-trino-instance - statement: | - SELECT * FROM {{.tableName}} - description: | - Use this tool to list all information from a specific table. - Example: - {{ - "tableName": "hive.sales.orders", - }} - templateParameters: - - name: tableName - type: string - description: Table to select from +kind: tools +name: list_table +type: trino-sql +source: my-trino-instance +statement: | + SELECT * FROM {{.tableName}} +description: | + Use this tool to list all information from a specific table. + Example: + {{ + "tableName": "hive.sales.orders", + }} +templateParameters: + - name: tableName + type: string + description: Table to select from ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:--------------------------------------------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "trino-sql". | +| type | string | true | Must be "trino-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | SQL statement to execute on. | diff --git a/docs/en/resources/tools/utility/wait.md b/docs/en/resources/tools/utility/wait.md index 1fde916ce5..c58d69a24b 100644 --- a/docs/en/resources/tools/utility/wait.md +++ b/docs/en/resources/tools/utility/wait.md @@ -24,17 +24,17 @@ and shouldn't be used for production agents. ## Example ```yaml -tools: - wait_for_tool: - kind: wait - description: Use this tool to pause execution for a specified duration. - timeout: 30s +kind: tools +name: wait_for_tool +type: wait +description: Use this tool to pause execution for a specified duration. +timeout: 30s ``` ## Reference | **field** | **type** | **required** | **description** | |-------------|:--------------:|:------------:|-------------------------------------------------------| -| kind | string | true | Must be "wait". | +| type | string | true | Must be "wait". | | description | string | true | Description of the tool that is passed to the LLM. | | timeout | string | true | The default duration the tool can wait for. | diff --git a/docs/en/resources/tools/valkey/valkey.md b/docs/en/resources/tools/valkey/valkey.md index fab9e5e3b1..3b801fcae9 100644 --- a/docs/en/resources/tools/valkey/valkey.md +++ b/docs/en/resources/tools/valkey/valkey.md @@ -38,21 +38,21 @@ to be executed after argument expansion will be `[SADD, userNames, Alice, Sid, B ## Example ```yaml -tools: - user_data_tool: - kind: valkey - source: my-valkey-instance - description: | - Use this tool to interact with user data stored in Valkey. - It can set, retrieve, and delete user-specific information. - commands: - - [SADD, userNames, $userNames] # Array will be flattened into multiple arguments. - - [GET, $userId] - parameters: - - name: userId - type: string - description: The unique identifier for the user. - - name: userNames - type: array - description: The user names to be set. +kind: tools +name: user_data_tool +type: valkey +source: my-valkey-instance +description: | + Use this tool to interact with user data stored in Valkey. + It can set, retrieve, and delete user-specific information. +commands: + - [SADD, userNames, $userNames] # Array will be flattened into multiple arguments. + - [GET, $userId] +parameters: + - name: userId + type: string + description: The unique identifier for the user. + - name: userNames + type: array + description: The user names to be set. ``` diff --git a/docs/en/resources/tools/yuagbytedb/yugabytedb-sql.md b/docs/en/resources/tools/yuagbytedb/yugabytedb-sql.md index e96ab9e6be..82329c42c8 100644 --- a/docs/en/resources/tools/yuagbytedb/yugabytedb-sql.md +++ b/docs/en/resources/tools/yuagbytedb/yugabytedb-sql.md @@ -26,41 +26,41 @@ of the prepared statement. > names, or other parts of the query. ```yaml -tools: - search_flights_by_number: - kind: yugabytedb-sql - source: my-yb-instance - statement: | - SELECT * FROM flights - WHERE airline = $1 - AND flight_number = $2 - LIMIT 10 - description: | - Use this tool to get information for a specific flight. - Takes an airline code and flight number and returns info on the flight. - Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. - A airline code is a code for an airline service consisting of two-character - airline designator and followed by flight number, which is 1 to 4 digit number. - For example, if given CY 0123, the airline is "CY", and flight_number is "123". - Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". - If the tool returns more than one option choose the date closes to today. - Example: - {{ - "airline": "CY", - "flight_number": "888", - }} - Example: - {{ - "airline": "DL", - "flight_number": "1234", - }} - parameters: - - name: airline - type: string - description: Airline unique 2 letter identifier - - name: flight_number - type: string - description: 1 to 4 digit number +kind: tools +name: search_flights_by_number +type: yugabytedb-sql +source: my-yb-instance +statement: | + SELECT * FROM flights + WHERE airline = $1 + AND flight_number = $2 + LIMIT 10 +description: | + Use this tool to get information for a specific flight. + Takes an airline code and flight number and returns info on the flight. + Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. + A airline code is a code for an airline service consisting of two-character + airline designator and followed by flight number, which is 1 to 4 digit number. + For example, if given CY 0123, the airline is "CY", and flight_number is "123". + Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". + If the tool returns more than one option choose the date closes to today. + Example: + {{ + "airline": "CY", + "flight_number": "888", + }} + Example: + {{ + "airline": "DL", + "flight_number": "1234", + }} +parameters: + - name: airline + type: string + description: Airline unique 2 letter identifier + - name: flight_number + type: string + description: 1 to 4 digit number ``` ### Example with Template Parameters @@ -72,29 +72,29 @@ tools: > [templateParameters](..#template-parameters). ```yaml -tools: - list_table: - kind: yugabytedb-sql - source: my-yb-instance - statement: | - SELECT * FROM {{.tableName}} - description: | - Use this tool to list all information from a specific table. - Example: - {{ - "tableName": "flights", - }} - templateParameters: - - name: tableName - type: string - description: Table to select from +kind: tools +name: list_table +type: yugabytedb-sql +source: my-yb-instance +statement: | + SELECT * FROM {{.tableName}} +description: | + Use this tool to list all information from a specific table. + Example: + {{ + "tableName": "flights", + }} +templateParameters: + - name: tableName + type: string + description: Table to select from ``` ## Reference | **field** | **type** | **required** | **description** | |--------------------|:--------------------------------------------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------| -| kind | string | true | Must be "yugabytedb-sql". | +| type | string | true | Must be "yugabytedb-sql". | | source | string | true | Name of the source the SQL should execute on. | | description | string | true | Description of the tool that is passed to the LLM. | | statement | string | true | SQL statement to execute on. | diff --git a/docs/en/samples/alloydb/ai-nl/alloydb_ai_nl.ipynb b/docs/en/samples/alloydb/ai-nl/alloydb_ai_nl.ipynb index f931a2119b..aba3c2a7d2 100644 --- a/docs/en/samples/alloydb/ai-nl/alloydb_ai_nl.ipynb +++ b/docs/en/samples/alloydb/ai-nl/alloydb_ai_nl.ipynb @@ -771,7 +771,7 @@ }, "outputs": [], "source": [ - "version = \"0.26.0\" # x-release-please-version\n", + "version = \"0.27.0\" # x-release-please-version\n", "! curl -L -o /content/toolbox https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n", "\n", "# Make the binary executable\n", @@ -801,27 +801,30 @@ "# Create a tools file\n", "tools_file_name = \"tools.yml\"\n", "file_content = f\"\"\"\n", - "sources:\n", - " my-alloydb-pg-source:\n", - " kind: alloydb-postgres\n", - " project: {project_id}\n", - " region: {region}\n", - " cluster: {cluster_name}\n", - " instance: {instance_name}\n", - " database: {database_name}\n", - " user: postgres\n", - " password: {password}\n", - "tools:\n", - " ask_questions:\n", - " kind: alloydb-ai-nl\n", - " source: my-alloydb-pg-source\n", - " description: 'Ask any natural language questions about the tables'\n", - " nlConfig: 'nla_demo_cfg'\n", - " basic_sql:\n", - " kind: postgres-sql\n", - " source: my-alloydb-pg-source\n", - " description: 'Check if db is connected'\n", - " statement: SELECT * from nla_demo.products;\n", + "kind: sources\n", + "name: my-alloydb-pg-source\n", + "type: alloydb-postgres\n", + "project: {project_id}\n", + "region: {region}\n", + "cluster: {cluster_name}\n", + "instance: {instance_name}\n", + "database: {database_name}\n", + "user: postgres\n", + "password: {password}\n", + "---\n", + "kind: tools\n", + "name: ask_questions\n", + "type: alloydb-ai-nl\n", + "source: my-alloydb-pg-source\n", + "description: 'Ask any natural language questions about the tables'\n", + "nlConfig: 'nla_demo_cfg'\n", + "---\n", + "kind: tools\n", + "name: basic_sql\n", + "type: postgres-sql\n", + "source: my-alloydb-pg-source\n", + "description: 'Check if db is connected'\n", + "statement: SELECT * from nla_demo.products;\n", "\"\"\"" ] }, diff --git a/docs/en/samples/alloydb/mcp_quickstart.md b/docs/en/samples/alloydb/mcp_quickstart.md index 82bd5198e7..43ae7fd311 100644 --- a/docs/en/samples/alloydb/mcp_quickstart.md +++ b/docs/en/samples/alloydb/mcp_quickstart.md @@ -123,7 +123,7 @@ In this section, we will download and install the Toolbox binary. ```bash export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64 - export VERSION="0.26.0" + export VERSION="0.27.0" curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox ``` @@ -143,16 +143,16 @@ First, define the data source for your tools. This tells Toolbox how to connect to your AlloyDB instance. ```yaml -sources: - alloydb-pg-source: - kind: alloydb-postgres - project: YOUR_PROJECT_ID - region: YOUR_REGION - cluster: YOUR_CLUSTER - instance: YOUR_INSTANCE - database: YOUR_DATABASE - user: YOUR_USER - password: YOUR_PASSWORD +kind: sources +name: alloydb-pg-source +type: alloydb-postgres +project: YOUR_PROJECT_ID +region: YOUR_REGION +cluster: YOUR_CLUSTER +instance: YOUR_INSTANCE +database: YOUR_DATABASE +user: YOUR_USER +password: YOUR_PASSWORD ``` Next, define the tools the agent can use. We will categorize them into three @@ -165,76 +165,77 @@ structured queries like managing a shopping cart. Add the following to your `tools.yaml` file: ```yaml -tools: - - access-cart-information: - kind: postgres-sql - source: alloydb-pg-source - description: >- - List items in customer cart. - Use this tool to list items in a customer cart. This tool requires the cart ID. - parameters: - - name: cart_id - type: integer - description: The id of the cart. - statement: | - SELECT - p.name AS product_name, - ci.quantity, - ci.price AS item_price, - (ci.quantity * ci.price) AS total_item_price, - c.created_at AS cart_created_at, - ci.product_id AS product_id - FROM - cart_items ci JOIN cart c ON ci.cart_id = c.cart_id - JOIN products p ON ci.product_id = p.product_id - WHERE - c.cart_id = $1; - - add-to-cart: - kind: postgres-sql - source: alloydb-pg-source - description: >- - Add items to customer cart using the product ID and product prices from the product list. - Use this tool to add items to a customer cart. - This tool requires the cart ID, product ID, quantity, and price. - parameters: - - name: cart_id - type: integer - description: The id of the cart. - - name: product_id - type: integer - description: The id of the product. - - name: quantity - type: integer - description: The quantity of items to add. - - name: price - type: float - description: The price of items to add. - statement: | - INSERT INTO - cart_items (cart_id, product_id, quantity, price) - VALUES($1,$2,$3,$4); - - delete-from-cart: - kind: postgres-sql - source: alloydb-pg-source - description: >- - Remove products from customer cart. - Use this tool to remove products from a customer cart. - This tool requires the cart ID and product ID. - parameters: - - name: cart_id - type: integer - description: The id of the cart. - - name: product_id - type: integer - description: The id of the product. - statement: | - DELETE FROM - cart_items - WHERE - cart_id = $1 AND product_id = $2; +kind: tools +name: access-cart-information +type: postgres-sql +source: alloydb-pg-source +description: >- + List items in customer cart. + Use this tool to list items in a customer cart. This tool requires the cart ID. +parameters: + - name: cart_id + type: integer + description: The id of the cart. +statement: | + SELECT + p.name AS product_name, + ci.quantity, + ci.price AS item_price, + (ci.quantity * ci.price) AS total_item_price, + c.created_at AS cart_created_at, + ci.product_id AS product_id + FROM + cart_items ci JOIN cart c ON ci.cart_id = c.cart_id + JOIN products p ON ci.product_id = p.product_id + WHERE + c.cart_id = $1; +--- +kind: tools +name: add-to-cart +type: postgres-sql +source: alloydb-pg-source +description: >- + Add items to customer cart using the product ID and product prices from the product list. + Use this tool to add items to a customer cart. + This tool requires the cart ID, product ID, quantity, and price. +parameters: + - name: cart_id + type: integer + description: The id of the cart. + - name: product_id + type: integer + description: The id of the product. + - name: quantity + type: integer + description: The quantity of items to add. + - name: price + type: float + description: The price of items to add. +statement: | + INSERT INTO + cart_items (cart_id, product_id, quantity, price) + VALUES($1,$2,$3,$4); +--- +kind: tools +name: delete-from-cart +type: postgres-sql +source: alloydb-pg-source +description: >- + Remove products from customer cart. + Use this tool to remove products from a customer cart. + This tool requires the cart ID and product ID. +parameters: + - name: cart_id + type: integer + description: The id of the cart. + - name: product_id + type: integer + description: The id of the product. +statement: | + DELETE FROM + cart_items + WHERE + cart_id = $1 AND product_id = $2; ``` ### 2. Semantic Search Tools @@ -244,27 +245,28 @@ meaning of a user's query, rather than just keywords. Append the following tools to the `tools` section in your `tools.yaml`: ```yaml - search-product-recommendations: - kind: postgres-sql - source: alloydb-pg-source - description: >- - Search for products based on user needs. - Use this tool to search for products. This tool requires the user's needs. - parameters: - - name: query - type: string - description: The product characteristics - statement: | - SELECT - product_id, - name, - description, - ROUND(CAST(price AS numeric), 2) as price - FROM - products - ORDER BY - embedding('gemini-embedding-001', $1)::vector <=> embedding - LIMIT 5; +kind: tools +name: search-product-recommendations +type: postgres-sql +source: alloydb-pg-source +description: >- + Search for products based on user needs. + Use this tool to search for products. This tool requires the user's needs. +parameters: + - name: query + type: string + description: The product characteristics +statement: | + SELECT + product_id, + name, + description, + ROUND(CAST(price AS numeric), 2) as price + FROM + products + ORDER BY + embedding('gemini-embedding-001', $1)::vector <=> embedding + LIMIT 5; ``` ### 3. Natural Language to SQL (NL2SQL) Tools @@ -286,27 +288,29 @@ to the `tools` section in your `tools.yaml`: section: ```yaml - ask-questions-about-products: - kind: alloydb-ai-nl - source: alloydb-pg-source - nlConfig: flower_shop - description: >- - Ask questions related to products or brands. - Use this tool to ask questions about products or brands. - Always SELECT the IDs of objects when generating queries. +kind: tools +name: ask-questions-about-products +type: alloydb-ai-nl +source: alloydb-pg-source +nlConfig: flower_shop +description: >- + Ask questions related to products or brands. + Use this tool to ask questions about products or brands. + Always SELECT the IDs of objects when generating queries. ``` Finally, group the tools into a `toolset` to make them available to the model. Add the following to the end of your `tools.yaml` file: ```yaml -toolsets: - flower_shop: - - access-cart-information - - search-product-recommendations - - ask-questions-about-products - - add-to-cart - - delete-from-cart +kind: toolsets +name: flower_shop +tools: + - access-cart-information + - search-product-recommendations + - ask-questions-about-products + - add-to-cart + - delete-from-cart ``` For more info on tools, check out the diff --git a/docs/en/samples/bigquery/colab_quickstart_bigquery.ipynb b/docs/en/samples/bigquery/colab_quickstart_bigquery.ipynb index 9c13b6e86e..6dbdc66e57 100644 --- a/docs/en/samples/bigquery/colab_quickstart_bigquery.ipynb +++ b/docs/en/samples/bigquery/colab_quickstart_bigquery.ipynb @@ -220,7 +220,7 @@ }, "outputs": [], "source": [ - "version = \"0.26.0\" # x-release-please-version\n", + "version = \"0.27.0\" # x-release-please-version\n", "! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n", "\n", "# Make the binary executable\n", @@ -273,72 +273,83 @@ "# You can also upload a tools file and use that to run toolbox.\n", "tools_file_name = \"tools.yml\"\n", "file_content = f\"\"\"\n", - "sources:\n", - " my-bigquery-source:\n", - " kind: bigquery\n", - " project: {BIGQUERY_PROJECT}\n", + "kind: sources\n", + "name: my-bigquery-source\n", + "type: bigquery\n", + "project: {BIGQUERY_PROJECT}\n", + "---\n", + "kind: tools\n", + "name: search-hotels-by-name\n", + "type: bigquery-sql\n", + "source: my-bigquery-source\n", + "description: Search for hotels based on name.\n", + "parameters:\n", + " - name: name\n", + " type: string\n", + " description: The name of the hotel.\n", + "statement: SELECT * FROM `{DATASET}.{TABLE_ID}` WHERE LOWER(name) LIKE LOWER(CONCAT('%', @name, '%'));\n", + "---\n", + "kind: tools\n", + "name: search-hotels-by-location\n", + "type: bigquery-sql\n", + "source: my-bigquery-source\n", + "description: Search for hotels based on location.\n", + "parameters:\n", + " - name: location\n", + " type: string\n", + " description: The location of the hotel.\n", + "statement: SELECT * FROM `{DATASET}.{TABLE_ID}` WHERE LOWER(location) LIKE LOWER(CONCAT('%', @location, '%'));\n", + "---\n", + "kind: tools\n", + "name: book-hotel\n", + "type: bigquery-sql\n", + "source: my-bigquery-source\n", + "description: >-\n", + " Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.\n", + "parameters:\n", + " - name: hotel_id\n", + " type: integer\n", + " description: The ID of the hotel to book.\n", + "statement: UPDATE `{DATASET}.{TABLE_ID}` SET booked = TRUE WHERE id = @hotel_id;\n", + "---\n", + "kind: tools\n", + "name: update-hotel\n", + "type: bigquery-sql\n", + "source: my-bigquery-source\n", + "description: >-\n", + " Update a hotel's check-in and check-out dates by its ID. Returns a message indicating whether the hotel was successfully updated or not.\n", + "parameters:\n", + " - name: checkin_date\n", + " type: string\n", + " description: The new check-in date of the hotel.\n", + " - name: checkout_date\n", + " type: string\n", + " description: The new check-out date of the hotel.\n", + " - name: hotel_id\n", + " type: integer\n", + " description: The ID of the hotel to update.\n", + "statement: >-\n", + " UPDATE `{DATASET}.{TABLE_ID}` SET checkin_date = PARSE_DATE('%Y-%m-%d', @checkin_date), checkout_date = PARSE_DATE('%Y-%m-%d', @checkout_date) WHERE id = @hotel_id;\n", + "---\n", + "kind: tools\n", + "name: cancel-hotel\n", + "type: bigquery-sql\n", + "source: my-bigquery-source\n", + "description: Cancel a hotel by its ID.\n", + "parameters:\n", + " - name: hotel_id\n", + " type: integer\n", + " description: The ID of the hotel to cancel.\n", + "statement: UPDATE `{DATASET}.{TABLE_ID}` SET booked = FALSE WHERE id = @hotel_id;\n", + "---\n", + "kind: toolsets\n", + "name: my-toolset\n", "tools:\n", - " search-hotels-by-name:\n", - " kind: bigquery-sql\n", - " source: my-bigquery-source\n", - " description: Search for hotels based on name.\n", - " parameters:\n", - " - name: name\n", - " type: string\n", - " description: The name of the hotel.\n", - " statement: SELECT * FROM `{DATASET}.{TABLE_ID}` WHERE LOWER(name) LIKE LOWER(CONCAT('%', @name, '%'));\n", - " search-hotels-by-location:\n", - " kind: bigquery-sql\n", - " source: my-bigquery-source\n", - " description: Search for hotels based on location.\n", - " parameters:\n", - " - name: location\n", - " type: string\n", - " description: The location of the hotel.\n", - " statement: SELECT * FROM `{DATASET}.{TABLE_ID}` WHERE LOWER(location) LIKE LOWER(CONCAT('%', @location, '%'));\n", - " book-hotel:\n", - " kind: bigquery-sql\n", - " source: my-bigquery-source\n", - " description: >-\n", - " Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.\n", - " parameters:\n", - " - name: hotel_id\n", - " type: integer\n", - " description: The ID of the hotel to book.\n", - " statement: UPDATE `{DATASET}.{TABLE_ID}` SET booked = TRUE WHERE id = @hotel_id;\n", - " update-hotel:\n", - " kind: bigquery-sql\n", - " source: my-bigquery-source\n", - " description: >-\n", - " Update a hotel's check-in and check-out dates by its ID. Returns a message indicating whether the hotel was successfully updated or not.\n", - " parameters:\n", - " - name: checkin_date\n", - " type: string\n", - " description: The new check-in date of the hotel.\n", - " - name: checkout_date\n", - " type: string\n", - " description: The new check-out date of the hotel.\n", - " - name: hotel_id\n", - " type: integer\n", - " description: The ID of the hotel to update.\n", - " statement: >-\n", - " UPDATE `{DATASET}.{TABLE_ID}` SET checkin_date = PARSE_DATE('%Y-%m-%d', @checkin_date), checkout_date = PARSE_DATE('%Y-%m-%d', @checkout_date) WHERE id = @hotel_id;\n", - " cancel-hotel:\n", - " kind: bigquery-sql\n", - " source: my-bigquery-source\n", - " description: Cancel a hotel by its ID.\n", - " parameters:\n", - " - name: hotel_id\n", - " type: integer\n", - " description: The ID of the hotel to cancel.\n", - " statement: UPDATE `{DATASET}.{TABLE_ID}` SET booked = FALSE WHERE id = @hotel_id;\n", - "toolsets:\n", - " my-toolset:\n", - " - search-hotels-by-name\n", - " - search-hotels-by-location\n", - " - book-hotel\n", - " - update-hotel\n", - " - cancel-hotel\n", + " - search-hotels-by-name\n", + " - search-hotels-by-location\n", + " - book-hotel\n", + " - update-hotel\n", + " - cancel-hotel\n", "\"\"\"" ] }, diff --git a/docs/en/samples/bigquery/local_quickstart.md b/docs/en/samples/bigquery/local_quickstart.md index 5735a1a515..e8e64cd9ee 100644 --- a/docs/en/samples/bigquery/local_quickstart.md +++ b/docs/en/samples/bigquery/local_quickstart.md @@ -179,7 +179,7 @@ to use BigQuery, and then run the Toolbox server. ```bash export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64 - curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox + curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox ``` @@ -201,66 +201,75 @@ to use BigQuery, and then run the Toolbox server. {{< /notice >}} ```yaml - sources: - my-bigquery-source: - kind: bigquery - project: YOUR_PROJECT_ID - location: us - tools: - search-hotels-by-name: - kind: bigquery-sql - source: my-bigquery-source - description: Search for hotels based on name. - parameters: - - name: name - type: string - description: The name of the hotel. - statement: SELECT * FROM `YOUR_DATASET_NAME.hotels` WHERE LOWER(name) LIKE LOWER(CONCAT('%', @name, '%')); - search-hotels-by-location: - kind: bigquery-sql - source: my-bigquery-source - description: Search for hotels based on location. - parameters: - - name: location - type: string - description: The location of the hotel. - statement: SELECT * FROM `YOUR_DATASET_NAME.hotels` WHERE LOWER(location) LIKE LOWER(CONCAT('%', @location, '%')); - book-hotel: - kind: bigquery-sql - source: my-bigquery-source - description: >- - Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not. - parameters: - - name: hotel_id - type: integer - description: The ID of the hotel to book. - statement: UPDATE `YOUR_DATASET_NAME.hotels` SET booked = TRUE WHERE id = @hotel_id; - update-hotel: - kind: bigquery-sql - source: my-bigquery-source - description: >- - Update a hotel's check-in and check-out dates by its ID. Returns a message indicating whether the hotel was successfully updated or not. - parameters: - - name: checkin_date - type: string - description: The new check-in date of the hotel. - - name: checkout_date - type: string - description: The new check-out date of the hotel. - - name: hotel_id - type: integer - description: The ID of the hotel to update. - statement: >- - UPDATE `YOUR_DATASET_NAME.hotels` SET checkin_date = PARSE_DATE('%Y-%m-%d', @checkin_date), checkout_date = PARSE_DATE('%Y-%m-%d', @checkout_date) WHERE id = @hotel_id; - cancel-hotel: - kind: bigquery-sql - source: my-bigquery-source - description: Cancel a hotel by its ID. - parameters: - - name: hotel_id - type: integer - description: The ID of the hotel to cancel. - statement: UPDATE `YOUR_DATASET_NAME.hotels` SET booked = FALSE WHERE id = @hotel_id; + kind: sources + name: my-bigquery-source + type: bigquery + project: YOUR_PROJECT_ID + location: us + --- + kind: tools + name: search-hotels-by-name + type: bigquery-sql + source: my-bigquery-source + description: Search for hotels based on name. + parameters: + - name: name + type: string + description: The name of the hotel. + statement: SELECT * FROM `YOUR_DATASET_NAME.hotels` WHERE LOWER(name) LIKE LOWER(CONCAT('%', @name, '%')); + --- + kind: tools + name: search-hotels-by-location + type: bigquery-sql + source: my-bigquery-source + description: Search for hotels based on location. + parameters: + - name: location + type: string + description: The location of the hotel. + statement: SELECT * FROM `YOUR_DATASET_NAME.hotels` WHERE LOWER(location) LIKE LOWER(CONCAT('%', @location, '%')); + --- + kind: tools + name: book-hotel + type: bigquery-sql + source: my-bigquery-source + description: >- + Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not. + parameters: + - name: hotel_id + type: integer + description: The ID of the hotel to book. + statement: UPDATE `YOUR_DATASET_NAME.hotels` SET booked = TRUE WHERE id = @hotel_id; + --- + kind: tools + name: update-hotel + type: bigquery-sql + source: my-bigquery-source + description: >- + Update a hotel's check-in and check-out dates by its ID. Returns a message indicating whether the hotel was successfully updated or not. + parameters: + - name: checkin_date + type: string + description: The new check-in date of the hotel. + - name: checkout_date + type: string + description: The new check-out date of the hotel. + - name: hotel_id + type: integer + description: The ID of the hotel to update. + statement: >- + UPDATE `YOUR_DATASET_NAME.hotels` SET checkin_date = PARSE_DATE('%Y-%m-%d', @checkin_date), checkout_date = PARSE_DATE('%Y-%m-%d', @checkout_date) WHERE id = @hotel_id; + --- + kind: tools + name: cancel-hotel + type: bigquery-sql + source: my-bigquery-source + description: Cancel a hotel by its ID. + parameters: + - name: hotel_id + type: integer + description: The ID of the hotel to cancel. + statement: UPDATE `YOUR_DATASET_NAME.hotels` SET booked = FALSE WHERE id = @hotel_id; ``` **Important Note on `toolsets`**: The `tools.yaml` content above does not @@ -272,8 +281,9 @@ to use BigQuery, and then run the Toolbox server. ```yaml # Add this to your tools.yaml if using load_toolset("my-toolset") # Ensure it's at the same indentation level as 'sources:' and 'tools:' - toolsets: - my-toolset: + kind: toolsets + name: my-toolset + tools: - search-hotels-by-name - search-hotels-by-location - book-hotel @@ -325,7 +335,7 @@ pip install toolbox-llamaindex {{< /tab >}} {{< tab header="ADK" lang="bash" >}} -pip install google-adk +pip install google-adk[toolbox] {{< /tab >}} {{< /tabpane >}} @@ -365,7 +375,7 @@ pip install llama-index-llms-google-genai {{< /tab >}} {{< tab header="ADK" lang="bash" >}} -pip install toolbox-core +# No other dependencies required for ADK {{< /tab >}} {{< /tabpane >}} @@ -607,8 +617,8 @@ from google.adk.agents import Agent from google.adk.runners import Runner from google.adk.sessions import InMemorySessionService from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService +from google.adk.tools.toolbox_toolset import ToolboxToolset from google.genai import types # For constructing message content -from toolbox_core import ToolboxSyncClient import os os.environ['GOOGLE_GENAI_USE_VERTEXAI'] = 'True' @@ -623,48 +633,47 @@ os.environ['GOOGLE_CLOUD_LOCATION'] = 'us-central1' # --- Load Tools from Toolbox --- -# TODO(developer): Ensure the Toolbox server is running at +# TODO(developer): Ensure the Toolbox server is running at http://127.0.0.1:5000 +toolset = ToolboxToolset(server_url="http://127.0.0.1:5000") -with ToolboxSyncClient("") as toolbox_client: - # TODO(developer): Replace "my-toolset" with the actual ID of your toolset as configured in your MCP Toolbox server. - agent_toolset = toolbox_client.load_toolset("my-toolset") +# --- Define the Agent's Prompt --- +prompt = """ + You're a helpful hotel assistant. You handle hotel searching, booking and + cancellations. When the user searches for a hotel, mention it's name, id, + location and price tier. Always mention hotel ids while performing any + searches. This is very important for any operations. For any bookings or + cancellations, please provide the appropriate confirmation. Be sure to + update checkin or checkout dates if mentioned by the user. + Don't ask for confirmations from the user. +""" - # --- Define the Agent's Prompt --- - prompt = """ - You're a helpful hotel assistant. You handle hotel searching, booking and - cancellations. When the user searches for a hotel, mention it's name, id, - location and price tier. Always mention hotel ids while performing any - searches. This is very important for any operations. For any bookings or - cancellations, please provide the appropriate confirmation. Be sure to - update checkin or checkout dates if mentioned by the user. - Don't ask for confirmations from the user. - """ +# --- Configure the Agent --- - # --- Configure the Agent --- +root_agent = Agent( + model='gemini-2.0-flash-001', + name='hotel_agent', + description='A helpful AI assistant that can search and book hotels.', + instruction=prompt, + tools=[toolset], # Pass the loaded toolset +) - root_agent = Agent( - model='gemini-2.0-flash-001', - name='hotel_agent', - description='A helpful AI assistant that can search and book hotels.', - instruction=prompt, - tools=agent_toolset, # Pass the loaded toolset - ) +# --- Initialize Services for Running the Agent --- +session_service = InMemorySessionService() +artifacts_service = InMemoryArtifactService() - # --- Initialize Services for Running the Agent --- - session_service = InMemorySessionService() - artifacts_service = InMemoryArtifactService() +runner = Runner( + app_name='hotel_agent', + agent=root_agent, + artifact_service=artifacts_service, + session_service=session_service, +) + +async def main(): # Create a new session for the interaction. - session = session_service.create_session( + session = await session_service.create_session( state={}, app_name='hotel_agent', user_id='123' ) - runner = Runner( - app_name='hotel_agent', - agent=root_agent, - artifact_service=artifacts_service, - session_service=session_service, - ) - # --- Define Queries and Run the Agent --- queries = [ "Find hotels in Basel with Basel in it's name.", @@ -687,6 +696,10 @@ with ToolboxSyncClient("") as toolbox_client: for text in responses: print(text) + +import asyncio +if __name__ == "__main__": + asyncio.run(main()) {{< /tab >}} {{< /tabpane >}} diff --git a/docs/en/samples/bigquery/mcp_quickstart/_index.md b/docs/en/samples/bigquery/mcp_quickstart/_index.md index 53250e06fd..3ca10183b1 100644 --- a/docs/en/samples/bigquery/mcp_quickstart/_index.md +++ b/docs/en/samples/bigquery/mcp_quickstart/_index.md @@ -98,7 +98,7 @@ In this section, we will download Toolbox, configure our tools in a ```bash export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64 - curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox + curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox ``` @@ -120,73 +120,84 @@ In this section, we will download Toolbox, configure our tools in a {{< /notice >}} ```yaml - sources: - my-bigquery-source: - kind: bigquery - project: YOUR_PROJECT_ID - location: us + kind: sources + name: my-bigquery-source + type: bigquery + project: YOUR_PROJECT_ID + location: us + --- + kind: tools + name: search-hotels-by-name + type: bigquery-sql + source: my-bigquery-source + description: Search for hotels based on name. + parameters: + - name: name + type: string + description: The name of the hotel. + statement: SELECT * FROM `YOUR_DATASET_NAME.hotels` WHERE LOWER(name) LIKE LOWER(CONCAT('%', @name, '%')); + --- + kind: tools + name: search-hotels-by-location + type: bigquery-sql + source: my-bigquery-source + description: Search for hotels based on location. + parameters: + - name: location + type: string + description: The location of the hotel. + statement: SELECT * FROM `YOUR_DATASET_NAME.hotels` WHERE LOWER(location) LIKE LOWER(CONCAT('%', @location, '%')); + --- + kind: tools + name: book-hotel + type: bigquery-sql + source: my-bigquery-source + description: >- + Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not. + parameters: + - name: hotel_id + type: integer + description: The ID of the hotel to book. + statement: UPDATE `YOUR_DATASET_NAME.hotels` SET booked = TRUE WHERE id = @hotel_id; + --- + kind: tools + name: update-hotel + type: bigquery-sql + source: my-bigquery-source + description: >- + Update a hotel's check-in and check-out dates by its ID. Returns a message indicating whether the hotel was successfully updated or not. + parameters: + - name: checkin_date + type: string + description: The new check-in date of the hotel. + - name: checkout_date + type: string + description: The new check-out date of the hotel. + - name: hotel_id + type: integer + description: The ID of the hotel to update. + statement: >- + UPDATE `YOUR_DATASET_NAME.hotels` SET checkin_date = PARSE_DATE('%Y-%m-%d', @checkin_date), checkout_date = PARSE_DATE('%Y-%m-%d', @checkout_date) WHERE id = @hotel_id; + --- + kind: tools + name: cancel-hotel + type: bigquery-sql + source: my-bigquery-source + description: Cancel a hotel by its ID. + parameters: + - name: hotel_id + type: integer + description: The ID of the hotel to cancel. + statement: UPDATE `YOUR_DATASET_NAME.hotels` SET booked = FALSE WHERE id = @hotel_id; + --- + kind: toolsets + name: my-toolset tools: - search-hotels-by-name: - kind: bigquery-sql - source: my-bigquery-source - description: Search for hotels based on name. - parameters: - - name: name - type: string - description: The name of the hotel. - statement: SELECT * FROM `YOUR_DATASET_NAME.hotels` WHERE LOWER(name) LIKE LOWER(CONCAT('%', @name, '%')); - search-hotels-by-location: - kind: bigquery-sql - source: my-bigquery-source - description: Search for hotels based on location. - parameters: - - name: location - type: string - description: The location of the hotel. - statement: SELECT * FROM `YOUR_DATASET_NAME.hotels` WHERE LOWER(location) LIKE LOWER(CONCAT('%', @location, '%')); - book-hotel: - kind: bigquery-sql - source: my-bigquery-source - description: >- - Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not. - parameters: - - name: hotel_id - type: integer - description: The ID of the hotel to book. - statement: UPDATE `YOUR_DATASET_NAME.hotels` SET booked = TRUE WHERE id = @hotel_id; - update-hotel: - kind: bigquery-sql - source: my-bigquery-source - description: >- - Update a hotel's check-in and check-out dates by its ID. Returns a message indicating whether the hotel was successfully updated or not. - parameters: - - name: checkin_date - type: string - description: The new check-in date of the hotel. - - name: checkout_date - type: string - description: The new check-out date of the hotel. - - name: hotel_id - type: integer - description: The ID of the hotel to update. - statement: >- - UPDATE `YOUR_DATASET_NAME.hotels` SET checkin_date = PARSE_DATE('%Y-%m-%d', @checkin_date), checkout_date = PARSE_DATE('%Y-%m-%d', @checkout_date) WHERE id = @hotel_id; - cancel-hotel: - kind: bigquery-sql - source: my-bigquery-source - description: Cancel a hotel by its ID. - parameters: - - name: hotel_id - type: integer - description: The ID of the hotel to cancel. - statement: UPDATE `YOUR_DATASET_NAME.hotels` SET booked = FALSE WHERE id = @hotel_id; - toolsets: - my-toolset: - - search-hotels-by-name - - search-hotels-by-location - - book-hotel - - update-hotel - - cancel-hotel + - search-hotels-by-name + - search-hotels-by-location + - book-hotel + - update-hotel + - cancel-hotel ``` For more info on tools, check out the diff --git a/docs/en/samples/looker/looker_gemini.md b/docs/en/samples/looker/looker_gemini.md index 33ed1fe580..70bde9465a 100644 --- a/docs/en/samples/looker/looker_gemini.md +++ b/docs/en/samples/looker/looker_gemini.md @@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server. ```bash export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64 - curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox + curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox ``` diff --git a/docs/en/samples/looker/looker_gemini_oauth/_index.md b/docs/en/samples/looker/looker_gemini_oauth/_index.md index b9e224a1a1..5e7b574c55 100644 --- a/docs/en/samples/looker/looker_gemini_oauth/_index.md +++ b/docs/en/samples/looker/looker_gemini_oauth/_index.md @@ -48,7 +48,7 @@ In this section, we will download Toolbox and run the Toolbox server. ```bash export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64 - curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox + curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox ``` diff --git a/docs/en/samples/looker/looker_mcp_inspector/_index.md b/docs/en/samples/looker/looker_mcp_inspector/_index.md index ca0de51f99..56338b3576 100644 --- a/docs/en/samples/looker/looker_mcp_inspector/_index.md +++ b/docs/en/samples/looker/looker_mcp_inspector/_index.md @@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server. ```bash export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64 - curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox + curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox ``` diff --git a/docs/en/samples/neo4j/mcp_quickstart.md b/docs/en/samples/neo4j/mcp_quickstart.md index fa36ac6ce3..ebcb70a70e 100644 --- a/docs/en/samples/neo4j/mcp_quickstart.md +++ b/docs/en/samples/neo4j/mcp_quickstart.md @@ -61,37 +61,38 @@ Write the following into a `tools.yaml` file: \+ ```yaml -sources: - my-neo4j-source: - kind: neo4j - uri: bolt://localhost:7687 - user: neo4j - password: my-password # Replace with your actual password - -tools: - search-movies-by-actor: - kind: neo4j-cypher - source: my-neo4j-source - description: "Searches for movies an actor has appeared in based on their name. Useful for questions like 'What movies has Tom Hanks been in?'" - parameters: - - name: actor_name - type: string - description: The full name of the actor to search for. - statement: | - MATCH (p:Person {name: $actor_name}) -[:ACTED_IN]-> (m:Movie) - RETURN m.title AS title, m.year AS year, m.genre AS genre - - get-actor-for-movie: - kind: neo4j-cypher - source: my-neo4j-source - description: "Finds the actors who starred in a specific movie. Useful for questions like 'Who acted in Inception?'" - parameters: - - name: movie_title - type: string - description: The exact title of the movie. - statement: | - MATCH (p:Person) -[:ACTED_IN]-> (m:Movie {title: $movie_title}) - RETURN p.name AS actor +kind: sources +name: my-neo4j-source +type: neo4j +uri: bolt://localhost:7687 +user: neo4j +password: my-password # Replace with your actual password +--- +kind: tools +name: search-movies-by-actor +type: neo4j-cypher +source: my-neo4j-source +description: "Searches for movies an actor has appeared in based on their name. Useful for questions like 'What movies has Tom Hanks been in?'" +parameters: + - name: actor_name + type: string + description: The full name of the actor to search for. +statement: | + MATCH (p:Person {name: $actor_name}) -[:ACTED_IN]-> (m:Movie) + RETURN m.title AS title, m.year AS year, m.genre AS genre +--- +kind: tools +name: get-actor-for-movie +type: neo4j-cypher +source: my-neo4j-source +description: "Finds the actors who starred in a specific movie. Useful for questions like 'Who acted in Inception?'" +parameters: + - name: movie_title + type: string + description: The exact title of the movie. +statement: | + MATCH (p:Person) -[:ACTED_IN]-> (m:Movie {title: $movie_title}) + RETURN p.name AS actor ``` . **Start the Toolbox server.** diff --git a/docs/en/samples/pre_post_processing/_index.md b/docs/en/samples/pre_post_processing/_index.md new file mode 100644 index 0000000000..6fcf570027 --- /dev/null +++ b/docs/en/samples/pre_post_processing/_index.md @@ -0,0 +1,54 @@ +--- +title: "Pre- and Post- Processing" +type: docs +weight: 1 +description: > + Intercept and modify interactions between the agent and its tools either before or after a tool is executed. +--- + +Pre- and post- processing allow developers to intercept and modify interactions between the agent and its tools or the user. + +{{< notice note >}} + +These capabilities are typically features of **orchestration frameworks** (like LangChain, LangGraph, or Agent Builder) rather than the Toolbox SDK itself. However, Toolbox tools are designed to fully leverage these framework capabilities to support robust, secure, and compliant agent architectures. + +{{< /notice >}} + +## Types of Processing + +### Pre-processing + +Pre-processing occurs before a tool is executed or an agent processes a message. Key types include: + +- **Input Sanitization & Redaction**: Detecting and masking sensitive information (like PII) in user queries or tool arguments to prevent it from being logged or sent to unauthorized systems. +- **Business Logic Validation**: Verifying that the proposed action complies with business rules (e.g., ensuring a requested hotel stay does not exceed 14 days, or checking if a user has sufficient permission). +- **Security Guardrails**: Analyzing inputs for potential prompt injection attacks or malicious payloads. + +### Post-processing + +Post-processing occurs after a tool has executed or the model has generated a response. Key types include: + +- **Response Enrichment**: Injecting additional data into the tool output that wasn't part of the raw API response (e.g., calculating loyalty points earned based on the booking value). +- **Output Formatting**: Transforming raw data (like JSON or XML) into a more human-readable or model-friendly format to improve the agent's understanding. +- **Compliance Auditing**: Logging the final outcome of transactions, including the original request and the result, to a secure audit trail. + +## Processing Scopes + +While processing logic can be applied at various levels (Agent, Model, Tool), this guide primarily focuses on **Tool Level** processing, which is most relevant for granular control over tool execution. + +### Tool Level (Primary Focus) + +Wraps individual tool executions. This is best for logic specific to a single tool or a set of tools. + +- **Scope**: Intercepts the raw inputs (arguments) to a tool and its outputs. +- **Use Cases**: Argument validation, output formatting, specific privacy rules for sensitive tools. + +### Other Levels + +It is helpful to understand how tool-level processing differs from other scopes: + +- **Model Level**: Intercepts individual calls to the LLM (prompts and responses). Unlike tool-level, this applies globally to all text sent/received, making it better for global PII redaction or token tracking. +- **Agent Level**: Wraps the high-level execution loop (e.g., a "turn" in the conversation). Unlike tool-level, this envelopes the entire turn (user input to final response), making it suitable for session management or end-to-end auditing. + + +## Samples diff --git a/docs/en/samples/pre_post_processing/python.md b/docs/en/samples/pre_post_processing/python.md new file mode 100644 index 0000000000..1c4311f487 --- /dev/null +++ b/docs/en/samples/pre_post_processing/python.md @@ -0,0 +1,40 @@ +--- +title: "Python" +type: docs +weight: 1 +description: > + How to add pre- and post- processing to your Agents using Python. +--- + +## Prerequisites + +This tutorial assumes that you have set up Toolbox with a basic agent as described in the [local quickstart](../../getting-started/local_quickstart.md). + +This guide demonstrates how to implement these patterns in your Toolbox applications. + +## Implementation + +{{< tabpane persist=header >}} +{{% tab header="ADK" text=true %}} +Coming soon. +{{% /tab %}} +{{% tab header="Langchain" text=true %}} +The following example demonstrates how to use `ToolboxClient` with LangChain's middleware to implement pre- and post- processing for tool calls. + +```py +{{< include "python/langchain/agent.py" >}} +``` + +You can also add model-level (`wrap_model`) and agent-level (`before_agent`, `after_agent`) hooks to intercept messages at different stages of the execution loop. See the [LangChain Middleware documentation](https://docs.langchain.com/oss/python/langchain/middleware/custom#wrap-style-hooks) for details on these additional hook types. +{{% /tab %}} +{{< /tabpane >}} + +## Results + +The output should look similar to the following. Note that exact responses may vary due to the non-deterministic nature of LLMs and differences between orchestration frameworks. + +``` +AI: Booking Confirmed! You earned 500 Loyalty Points with this stay. + +AI: Error: Maximum stay duration is 14 days. +``` diff --git a/docs/en/samples/pre_post_processing/python/__init__.py b/docs/en/samples/pre_post_processing/python/__init__.py new file mode 100644 index 0000000000..f5b7c1bfd2 --- /dev/null +++ b/docs/en/samples/pre_post_processing/python/__init__.py @@ -0,0 +1,19 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# This file makes the 'pre_post_processing/python' directory a Python package. + +# You can include any package-level initialization logic here if needed. +# For now, this file is empty. diff --git a/docs/en/samples/pre_post_processing/python/agent_test.py b/docs/en/samples/pre_post_processing/python/agent_test.py new file mode 100644 index 0000000000..36c5b8e27d --- /dev/null +++ b/docs/en/samples/pre_post_processing/python/agent_test.py @@ -0,0 +1,51 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import asyncio +import importlib +import os +from pathlib import Path + +import pytest + +ORCH_NAME = os.environ.get("ORCH_NAME") +module_path = f"python.{ORCH_NAME}.agent" +agent = importlib.import_module(module_path) + +GOLDEN_KEYWORDS = [ + "AI:", + "Loyalty Points", + "POLICY CHECK: Intercepting 'update-hotel'", +] + +# --- Execution Tests --- +class TestExecution: + """Test framework execution and output validation.""" + + @pytest.fixture(scope="function") + def script_output(self, capsys): + """Run the agent function and return its output.""" + asyncio.run(agent.main()) + return capsys.readouterr() + + def test_script_runs_without_errors(self, script_output): + """Test that the script runs and produces no stderr.""" + assert script_output.err == "", f"Script produced stderr: {script_output.err}" + + def test_keywords_in_output(self, script_output): + """Test that expected keywords are present in the script's output.""" + output = script_output.out + print(f"\nAgent Output:\n{output}\n") + missing_keywords = [kw for kw in GOLDEN_KEYWORDS if kw not in output] + assert not missing_keywords, f"Missing keywords in output: {missing_keywords}" diff --git a/docs/en/samples/pre_post_processing/python/langchain/agent.py b/docs/en/samples/pre_post_processing/python/langchain/agent.py new file mode 100644 index 0000000000..5e174943a7 --- /dev/null +++ b/docs/en/samples/pre_post_processing/python/langchain/agent.py @@ -0,0 +1,116 @@ +import asyncio +from datetime import datetime + +from langchain.agents import create_agent +from langchain.agents.middleware import wrap_tool_call +from langchain_core.messages import ToolMessage +from langchain_google_vertexai import ChatVertexAI +from toolbox_langchain import ToolboxClient + +system_prompt = """ + You're a helpful hotel assistant. You handle hotel searching, booking and + cancellations. When the user searches for a hotel, mention it's name, id, + location and price tier. Always mention hotel ids while performing any + searches. This is very important for any operations. For any bookings or + cancellations, please provide the appropriate confirmation. Be sure to + update checkin or checkout dates if mentioned by the user. + Don't ask for confirmations from the user. +""" + + +# Pre processing +@wrap_tool_call +async def enforce_business_rules(request, handler): + """ + Business Logic Validation: + Enforces max stay duration (e.g., max 14 days). + """ + tool_call = request.tool_call + name = tool_call["name"] + args = tool_call["args"] + + print(f"POLICY CHECK: Intercepting '{name}'") + + if name == "update-hotel": + if "checkin_date" in args and "checkout_date" in args: + try: + start = datetime.fromisoformat(args["checkin_date"]) + end = datetime.fromisoformat(args["checkout_date"]) + duration = (end - start).days + + if duration > 14: + print("BLOCKED: Stay too long") + return ToolMessage( + content="Error: Maximum stay duration is 14 days.", + tool_call_id=tool_call["id"], + ) + except ValueError: + pass # Ignore invalid date formats + + # PRE: Code here runs BEFORE the tool execution + + # EXEC: Execute the tool (or next middleware) + result = await handler(request) + + # POST: Code here runs AFTER the tool execution + return result + + +# Post processing +@wrap_tool_call +async def enrich_response(request, handler): + """ + Post-Processing & Enrichment: + Adds loyalty points information to successful bookings. + Standardizes output format. + """ + # PRE: Code here runs BEFORE the tool execution + + # EXEC: Execute the tool (or next middleware) + result = await handler(request) + + # POST: Code here runs AFTER the tool execution + if isinstance(result, ToolMessage): + content = str(result.content) + tool_name = request.tool_call["name"] + + if tool_name == "book-hotel" and "Error" not in content: + loyalty_bonus = 500 + result.content = f"Booking Confirmed!\n You earned {loyalty_bonus} Loyalty Points with this stay.\n\nSystem Details: {content}" + + return result + + +async def main(): + async with ToolboxClient("http://127.0.0.1:5000") as client: + tools = await client.aload_toolset("my-toolset") + model = ChatVertexAI(model="gemini-2.5-flash") + agent = create_agent( + system_prompt=system_prompt, + model=model, + tools=tools, + # add any pre and post processing methods + middleware=[enforce_business_rules, enrich_response], + ) + + user_input = "Book hotel with id 3." + response = await agent.ainvoke( + {"messages": [{"role": "user", "content": user_input}]} + ) + + print("-" * 50) + last_ai_msg = response["messages"][-1].content + print(f"AI: {last_ai_msg}") + + # Test Pre-processing + print("-" * 50) + user_input = "Update my hotel with id 3 with checkin date 2025-01-18 and checkout date 2025-01-20" + response = await agent.ainvoke( + {"messages": [{"role": "user", "content": user_input}]} + ) + last_ai_msg = response["messages"][-1].content + print(f"AI: {last_ai_msg}") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/docs/en/samples/pre_post_processing/python/langchain/requirements.txt b/docs/en/samples/pre_post_processing/python/langchain/requirements.txt new file mode 100644 index 0000000000..5638e0c108 --- /dev/null +++ b/docs/en/samples/pre_post_processing/python/langchain/requirements.txt @@ -0,0 +1,3 @@ +langchain==1.2.6 +langchain-google-vertexai==3.2.2 +toolbox-langchain==0.5.8 \ No newline at end of file diff --git a/docs/en/samples/snowflake/_index.md b/docs/en/samples/snowflake/_index.md index 0149544eba..6f3ca05e8d 100644 --- a/docs/en/samples/snowflake/_index.md +++ b/docs/en/samples/snowflake/_index.md @@ -79,32 +79,33 @@ You have two options: Create a `tools.yaml` file and add the following content. You must replace the placeholders with your actual Snowflake configuration. ```yaml -sources: - snowflake-source: - kind: snowflake - account: ${SNOWFLAKE_ACCOUNT} - user: ${SNOWFLAKE_USER} - password: ${SNOWFLAKE_PASSWORD} - database: ${SNOWFLAKE_DATABASE} - schema: ${SNOWFLAKE_SCHEMA} - warehouse: ${SNOWFLAKE_WAREHOUSE} - role: ${SNOWFLAKE_ROLE} - -tools: - execute_sql: - kind: snowflake-execute-sql - source: snowflake-source - description: Use this tool to execute SQL. - - list_tables: - kind: snowflake-sql - source: snowflake-source - description: "Lists detailed schema information for user-created tables." - statement: | - SELECT table_name, table_type - FROM information_schema.tables - WHERE table_schema = current_schema() - ORDER BY table_name; +kind: sources +name: snowflake-source +type: snowflake +account: ${SNOWFLAKE_ACCOUNT} +user: ${SNOWFLAKE_USER} +password: ${SNOWFLAKE_PASSWORD} +database: ${SNOWFLAKE_DATABASE} +schema: ${SNOWFLAKE_SCHEMA} +warehouse: ${SNOWFLAKE_WAREHOUSE} +role: ${SNOWFLAKE_ROLE} +--- +kind: tools +name: execute_sql +type: snowflake-execute-sql +source: snowflake-source +description: Use this tool to execute SQL. +--- +kind: tools +name: list_tables +type: snowflake-sql +source: snowflake-source +description: "Lists detailed schema information for user-created tables." +statement: | + SELECT table_name, table_type + FROM information_schema.tables + WHERE table_schema = current_schema() + ORDER BY table_name; ``` For more info on tools, check out the diff --git a/docs/en/samples/snowflake/snowflake-config.yaml b/docs/en/samples/snowflake/snowflake-config.yaml index 085433e346..29660de2bf 100644 --- a/docs/en/samples/snowflake/snowflake-config.yaml +++ b/docs/en/samples/snowflake/snowflake-config.yaml @@ -12,57 +12,60 @@ # See the License for the specific language governing permissions and # limitations under the License. -sources: - my-snowflake-db: - kind: snowflake - account: ${SNOWFLAKE_ACCOUNT} - user: ${SNOWFLAKE_USER} - password: ${SNOWFLAKE_PASSWORD} - database: ${SNOWFLAKE_DATABASE} - schema: ${SNOWFLAKE_SCHEMA} - warehouse: ${SNOWFLAKE_WAREHOUSE} # Optional, defaults to COMPUTE_WH if not set - role: ${SNOWFLAKE_ROLE} # Optional, defaults to ACCOUNTADMIN if not set - +kind: sources +name: my-snowflake-db +type: snowflake +account: ${SNOWFLAKE_ACCOUNT} +user: ${SNOWFLAKE_USER} +password: ${SNOWFLAKE_PASSWORD} +database: ${SNOWFLAKE_DATABASE} +schema: ${SNOWFLAKE_SCHEMA} +warehouse: ${SNOWFLAKE_WAREHOUSE} # Optional, defaults to COMPUTE_WH if not set +role: ${SNOWFLAKE_ROLE} # Optional, defaults to ACCOUNTADMIN if not set +--- +kind: tools +name: execute_sql +type: snowflake-execute-sql +source: my-snowflake-db +description: Execute arbitrary SQL statements on Snowflake +--- +kind: tools +name: get_customer_orders +type: snowflake-sql +source: my-snowflake-db +description: Get orders for a specific customer +statement: | + SELECT o.order_id, o.order_date, o.total_amount, o.status + FROM orders o + WHERE o.customer_id = $1 + ORDER BY o.order_date DESC +parameters: + - name: customer_id + type: string + description: The customer ID to look up orders for +--- +kind: tools +name: daily_sales_report +type: snowflake-sql +source: my-snowflake-db +description: Generate daily sales report for a specific date +statement: | + SELECT + DATE(order_date) as sales_date, + COUNT(*) as total_orders, + SUM(total_amount) as total_revenue, + AVG(total_amount) as avg_order_value + FROM orders + WHERE DATE(order_date) = $1 + GROUP BY DATE(order_date) +parameters: + - name: report_date + type: string + description: The date to generate report for (YYYY-MM-DD format) +--- +kind: toolsets +name: snowflake-analytics tools: - execute_sql: - kind: snowflake-execute-sql - source: my-snowflake-db - description: Execute arbitrary SQL statements on Snowflake - - get_customer_orders: - kind: snowflake-sql - source: my-snowflake-db - description: Get orders for a specific customer - statement: | - SELECT o.order_id, o.order_date, o.total_amount, o.status - FROM orders o - WHERE o.customer_id = $1 - ORDER BY o.order_date DESC - parameters: - - name: customer_id - type: string - description: The customer ID to look up orders for - - daily_sales_report: - kind: snowflake-sql - source: my-snowflake-db - description: Generate daily sales report for a specific date - statement: | - SELECT - DATE(order_date) as sales_date, - COUNT(*) as total_orders, - SUM(total_amount) as total_revenue, - AVG(total_amount) as avg_order_value - FROM orders - WHERE DATE(order_date) = $1 - GROUP BY DATE(order_date) - parameters: - - name: report_date - type: string - description: The date to generate report for (YYYY-MM-DD format) - -toolsets: - snowflake-analytics: - - execute_sql - - get_customer_orders - - daily_sales_report + - execute_sql + - get_customer_orders + - daily_sales_report diff --git a/gemini-extension.json b/gemini-extension.json index 7e6a846b15..4a5c0e0b8d 100644 --- a/gemini-extension.json +++ b/gemini-extension.json @@ -1,6 +1,6 @@ { "name": "mcp-toolbox-for-databases", - "version": "0.26.0", + "version": "0.27.0", "description": "MCP Toolbox for Databases is an open-source MCP server for more than 30 different datasources.", "contextFileName": "MCP-TOOLBOX-EXTENSION.md" } \ No newline at end of file diff --git a/go.mod b/go.mod index 2254089cee..9fd4617e1b 100644 --- a/go.mod +++ b/go.mod @@ -13,6 +13,7 @@ require ( cloud.google.com/go/dataproc/v2 v2.15.0 cloud.google.com/go/firestore v1.20.0 cloud.google.com/go/geminidataanalytics v0.3.0 + cloud.google.com/go/logging v1.13.1 cloud.google.com/go/longrunning v0.7.0 cloud.google.com/go/spanner v1.86.1 github.com/ClickHouse/clickhouse-go/v2 v2.40.3 @@ -20,6 +21,7 @@ require ( github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.30.0 github.com/apache/cassandra-gocql-driver/v2 v2.0.0 github.com/cenkalti/backoff/v5 v5.0.3 + github.com/cockroachdb/cockroach-go/v2 v2.4.2 github.com/couchbase/gocb/v2 v2.11.1 github.com/couchbase/tools-common/http v1.0.9 github.com/elastic/elastic-transport-go/v8 v8.8.0 @@ -46,11 +48,12 @@ require ( github.com/sijms/go-ora/v2 v2.9.0 github.com/snowflakedb/gosnowflake v1.18.1 github.com/spf13/cobra v1.10.1 + github.com/testcontainers/testcontainers-go v0.40.0 github.com/thlib/go-timezone-local v0.0.7 github.com/trinodb/trino-go-client v0.330.0 github.com/valkey-io/valkey-go v1.0.68 github.com/yugabyte/pgx/v5 v5.5.3-yb-5 - go.mongodb.org/mongo-driver v1.17.4 + go.mongodb.org/mongo-driver/v2 v2.4.2 go.opentelemetry.io/contrib/propagators/autoprop v0.62.0 go.opentelemetry.io/otel v1.38.0 go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.38.0 @@ -90,16 +93,19 @@ require ( cloud.google.com/go/iam v1.5.3 // indirect cloud.google.com/go/monitoring v1.24.3 // indirect cloud.google.com/go/trace v1.11.7 // indirect + dario.cat/mergo v1.0.2 // indirect filippo.io/edwards25519 v1.1.0 // indirect github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect github.com/99designs/keyring v1.2.2 // indirect github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 // indirect github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 // indirect + github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c // indirect github.com/BurntSushi/toml v1.4.0 // indirect github.com/GoogleCloudPlatform/grpc-gcp-go/grpcgcp v1.5.3 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.54.0 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect github.com/PuerkitoBio/goquery v1.10.3 // indirect github.com/VictoriaMetrics/easyproto v0.1.4 // indirect github.com/ajg/form v1.5.1 // indirect @@ -125,17 +131,29 @@ require ( github.com/aws/aws-sdk-go-v2/service/ssooidc v1.34.4 // indirect github.com/aws/aws-sdk-go-v2/service/sts v1.38.4 // indirect github.com/aws/smithy-go v1.23.0 // indirect + github.com/cenkalti/backoff/v4 v4.3.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 // indirect + github.com/containerd/errdefs v1.0.0 // indirect + github.com/containerd/errdefs/pkg v0.3.0 // indirect + github.com/containerd/log v0.1.0 // indirect + github.com/containerd/platforms v0.2.1 // indirect github.com/couchbase/gocbcore/v10 v10.8.1 // indirect github.com/couchbase/gocbcoreps v0.1.4 // indirect github.com/couchbase/goprotostellar v1.0.2 // indirect github.com/couchbase/tools-common/errors v1.0.0 // indirect github.com/couchbaselabs/gocbconnstr/v2 v2.0.0 // indirect + github.com/cpuguy83/dockercfg v0.3.2 // indirect github.com/danieljoos/wincred v1.2.2 // indirect + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect + github.com/distribution/reference v0.6.0 // indirect + github.com/docker/docker v28.5.1+incompatible // indirect + github.com/docker/go-connections v0.6.0 // indirect + github.com/docker/go-units v0.5.0 // indirect github.com/dustin/go-humanize v1.0.1 // indirect github.com/dvsekhvalnov/jose2go v1.7.0 // indirect + github.com/ebitengine/purego v0.8.4 // indirect github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect @@ -144,6 +162,7 @@ require ( github.com/go-logfmt/logfmt v0.6.0 // indirect github.com/go-logr/logr v1.4.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-ole/go-ole v1.2.6 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect github.com/goccy/go-json v0.10.5 // indirect @@ -179,27 +198,46 @@ require ( github.com/klauspost/compress v1.18.0 // indirect github.com/klauspost/cpuid/v2 v2.2.11 // indirect github.com/leodido/go-urn v1.4.0 // indirect + github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect + github.com/magiconair/properties v1.8.10 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 // indirect github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 // indirect + github.com/moby/docker-image-spec v1.3.1 // indirect + github.com/moby/go-archive v0.1.0 // indirect + github.com/moby/patternmatcher v0.6.0 // indirect + github.com/moby/sys/sequential v0.6.0 // indirect + github.com/moby/sys/user v0.4.0 // indirect + github.com/moby/sys/userns v0.1.0 // indirect + github.com/moby/term v0.5.2 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect - github.com/montanaflynn/stats v0.7.1 // indirect + github.com/morikuni/aec v1.0.0 // indirect github.com/mtibben/percent v0.2.1 // indirect github.com/nakagami/chacha20 v0.1.0 // indirect github.com/ncruces/go-strftime v0.1.9 // indirect + github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.1.1 // indirect github.com/pierrec/lz4 v2.6.1+incompatible // indirect github.com/pierrec/lz4/v4 v4.1.22 // indirect github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect + github.com/pkg/errors v0.9.1 // indirect github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect + github.com/shirou/gopsutil/v4 v4.25.6 // indirect github.com/sirupsen/logrus v1.9.3 // indirect github.com/spf13/pflag v1.0.9 // indirect github.com/spiffe/go-spiffe/v2 v2.5.0 // indirect + github.com/stretchr/testify v1.11.1 // indirect + github.com/tklauser/go-sysconf v0.3.12 // indirect + github.com/tklauser/numcpus v0.6.1 // indirect github.com/xdg-go/pbkdf2 v1.0.0 // indirect github.com/xdg-go/scram v1.1.2 // indirect github.com/xdg-go/stringprep v1.0.4 // indirect github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect + github.com/yusufpapurcu/wmi v1.2.4 // indirect github.com/zeebo/errs v1.4.0 // indirect github.com/zeebo/xxh3 v1.0.2 // indirect gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b // indirect @@ -232,6 +270,7 @@ require ( google.golang.org/genproto/googleapis/rpc v0.0.0-20251103181224-f26f9409b101 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/ini.v1 v1.67.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect modernc.org/libc v1.66.10 // indirect modernc.org/mathutil v1.7.1 // indirect modernc.org/memory v1.11.0 // indirect diff --git a/go.sum b/go.sum index 791f5b2457..8ebebe11d5 100644 --- a/go.sum +++ b/go.sum @@ -370,8 +370,8 @@ cloud.google.com/go/lifesciences v0.6.0/go.mod h1:ddj6tSX/7BOnhxCSd3ZcETvtNr8NZ6 cloud.google.com/go/lifesciences v0.8.0/go.mod h1:lFxiEOMqII6XggGbOnKiyZ7IBwoIqA84ClvoezaA/bo= cloud.google.com/go/logging v1.6.1/go.mod h1:5ZO0mHHbvm8gEmeEUHrmDlTDSu5imF6MUP9OfilNXBw= cloud.google.com/go/logging v1.7.0/go.mod h1:3xjP2CjkM3ZkO73aj4ASA5wRPGGCRrPIAeNqVNkzY8M= -cloud.google.com/go/logging v1.13.0 h1:7j0HgAp0B94o1YRDqiqm26w4q1rDMH7XNRU34lJXHYc= -cloud.google.com/go/logging v1.13.0/go.mod h1:36CoKh6KA/M0PbhPKMq6/qety2DCAErbhXT62TuXALA= +cloud.google.com/go/logging v1.13.1 h1:O7LvmO0kGLaHY/gq8cV7T0dyp6zJhYAOtZPX4TF3QtY= +cloud.google.com/go/logging v1.13.1/go.mod h1:XAQkfkMBxQRjQek96WLPNze7vsOmay9H5PqfsNYDqvw= cloud.google.com/go/longrunning v0.1.1/go.mod h1:UUFxuDWkv22EuY93jjmDMFT5GPQKeFVJBIF6QlTqdsE= cloud.google.com/go/longrunning v0.3.0/go.mod h1:qth9Y41RRSUE69rDcOn6DdK3HfQfsUI0YSmW3iIlLJc= cloud.google.com/go/longrunning v0.4.1/go.mod h1:4iWDqhBZ70CvZ6BfETbvam3T8FMvLK+eFj0E6AaRQTo= @@ -647,6 +647,8 @@ github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMb github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4= github.com/99designs/keyring v1.2.2 h1:pZd3neh/EmUzWONb35LxQfvuY7kiSXAq3HQd97+XBn0= github.com/99designs/keyring v1.2.2/go.mod h1:wes/FrByc8j7lFOAGLGSNEg8f/PaI3cgTBqhFkHUrPk= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6 h1:He8afgbRMd7mFxO99hRNu+6tazq8nFF9lIwo9JFroBk= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1 h1:B+blDbyVIG3WaikNxPnhPiJ1MThR03b3vKGtER95TP4= @@ -798,8 +800,18 @@ github.com/cncf/xds/go v0.0.0-20230105202645-06c439db220b/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv1aFbZMiM9vblcSArJRf2Irls= github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= +github.com/cockroachdb/cockroach-go/v2 v2.4.2 h1:QB0ozDWQUUJ0GP8Zw63X/qHefPTCpLvtfCs6TLrPgyE= +github.com/cockroachdb/cockroach-go/v2 v2.4.2/go.mod h1:9U179XbCx4qFWtNhc7BiWLPfuyMVQ7qdAhfrwLz1vH0= github.com/containerd/continuity v0.4.5 h1:ZRoN1sXq9u7V6QoHMcVWGhOwDFqZ4B9i5H6un1Wh0x4= github.com/containerd/continuity v0.4.5/go.mod h1:/lNJvtJKUQStBzpVQ1+rasXO1LAWtUQssk28EZvJ3nE= +github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI= +github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M= +github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE= +github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A= +github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw= github.com/couchbase/gocb/v2 v2.11.1 h1:xWDco7Qk/XSvGUjbUWRaXi0V35nsMijJnm4vHXN/rqY= github.com/couchbase/gocb/v2 v2.11.1/go.mod h1:aSh1Cmd1sPRpYyiBD5iWPehPWaTVF/oYhrtOAITWb/4= github.com/couchbase/gocbcore/v10 v10.8.1 h1:i4SnH0DH9APGC4GS2vS2m+3u08V7oJwviamOXdgAZOQ= @@ -816,8 +828,12 @@ github.com/couchbaselabs/gocaves/client v0.0.0-20250107114554-f96479220ae8 h1:MQ github.com/couchbaselabs/gocaves/client v0.0.0-20250107114554-f96479220ae8/go.mod h1:AVekAZwIY2stsJOMWLAS/0uA/+qdp7pjO8EHnl61QkY= github.com/couchbaselabs/gocbconnstr/v2 v2.0.0 h1:HU9DlAYYWR69jQnLN6cpg0fh0hxW/8d5hnglCXXjW78= github.com/couchbaselabs/gocbconnstr/v2 v2.0.0/go.mod h1:o7T431UOfFVHDNvMBUmUxpHnhivwv7BziUao/nMl81E= +github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA= +github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY= +github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= github.com/danieljoos/wincred v1.2.2 h1:774zMFJrqaeYCK2W57BgAem/MLi6mtSE47MB6BOJ0i0= github.com/danieljoos/wincred v1.2.2/go.mod h1:w7w4Utbrz8lqeMbDAK0lkNJUv5sAOkFi7nd/ogr0Uh8= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -826,10 +842,12 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1 github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= github.com/docker/cli v28.4.0+incompatible h1:RBcf3Kjw2pMtwui5V0DIMdyeab8glEw5QY0UUU4C9kY= github.com/docker/cli v28.4.0+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= -github.com/docker/docker v28.4.0+incompatible h1:KVC7bz5zJY/4AZe/78BIvCnPsLaC9T/zh72xnlrTTOk= -github.com/docker/docker v28.4.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker v28.5.1+incompatible h1:Bm8DchhSD2J6PsFzxC35TZo4TLGR2PdW/E69rU45NhM= +github.com/docker/docker v28.5.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/go-connections v0.6.0 h1:LlMG9azAe1TqfR7sO+NJttz1gy6KO7VJBh+pMmjSD94= github.com/docker/go-connections v0.6.0/go.mod h1:AahvXYshr6JgfUJGdDCs2b5EZG/vmaMAntpSFH5BFKE= github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= @@ -840,6 +858,8 @@ github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkp github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= github.com/dvsekhvalnov/jose2go v1.7.0 h1:bnQc8+GMnidJZA8zc6lLEAb4xNrIqHwO+9TzqvtQZPo= github.com/dvsekhvalnov/jose2go v1.7.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU= +github.com/ebitengine/purego v0.8.4 h1:CF7LEKg5FFOsASUj0+QwaXf8Ht6TlFxg09+S9wz0omw= +github.com/ebitengine/purego v0.8.4/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ= github.com/elastic/elastic-transport-go/v8 v8.8.0 h1:7k1Ua+qluFr6p1jfJjGDl97ssJS/P7cHNInzfxgBQAo= github.com/elastic/elastic-transport-go/v8 v8.8.0/go.mod h1:YLHer5cj0csTzNFXoNQ8qhtGY1GTvSqPnKWKaqQE3Hk= github.com/elastic/go-elasticsearch/v9 v9.2.0 h1:COeL/g20+ixnUbffe4Wfbu88emrHjAq/LhVfmrjqRQs= @@ -913,6 +933,8 @@ github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-pdf/fpdf v0.5.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M= github.com/go-pdf/fpdf v0.6.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= @@ -940,6 +962,8 @@ github.com/godror/godror v0.49.6 h1:ts4ZGw8uLJ42e1D7aXmVuSrld0/lzUzmIUjuUuQOgGM= github.com/godror/godror v0.49.6/go.mod h1:kTMcxZzRw73RT5kn9v3JkBK4kHI6dqowHotqV72ebU8= github.com/godror/knownpb v0.3.0 h1:+caUdy8hTtl7X05aPl3tdL540TvCcaQA6woZQroLZMw= github.com/godror/knownpb v0.3.0/go.mod h1:PpTyfJwiOEAzQl7NtVCM8kdPCnp3uhxsZYIzZ5PV4zU= +github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= +github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= @@ -1174,9 +1198,13 @@ github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/looker-open-source/sdk-codegen/go v0.25.22 h1:DGYt1v2R2uE/m71sWAvgxsJnDLM9B7C40N5/CTDlE2A= github.com/looker-open-source/sdk-codegen/go v0.25.22/go.mod h1:Br1ntSiruDJ/4nYNjpYyWyCbqJ7+GQceWbIgn0hYims= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA= github.com/lyft/protoc-gen-star v0.6.1/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA= github.com/lyft/protoc-gen-star/v2 v2.0.1/go.mod h1:RcCdONR2ScXaYnQC5tUzxzlpA3WVYF7/opLeUgcQs/o= +github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE= +github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= @@ -1194,8 +1222,18 @@ github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8D github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE= github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/go-archive v0.1.0 h1:Kk/5rdW/g+H8NHdJW2gsXyZ7UnzvJNOy6VKJqueWdcQ= +github.com/moby/go-archive v0.1.0/go.mod h1:G9B+YoujNohJmrIYFBpSd54GTUB4lt9S+xVQvsJyFuo= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw= +github.com/moby/sys/atomicwriter v0.1.0/go.mod h1:Ul8oqv2ZMNHOceF643P6FKPXeCmYtlQMvpizfsSoaWs= +github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU= +github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko= github.com/moby/sys/user v0.4.0 h1:jhcMKit7SA80hivmFJcbB1vqmw//wU61Zdui2eQXuMs= github.com/moby/sys/user v0.4.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs= +github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g= +github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28= github.com/moby/term v0.5.2 h1:6qk3FJAFDs6i/q3W/pQ97SX192qKfZgGjCQqfCJkgzQ= github.com/moby/term v0.5.2/go.mod h1:d3djjFCrjnB+fl8NJux+EJzu0msscUP+f8it8hPkFLc= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -1204,8 +1242,8 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= -github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE= -github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs= github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns= github.com/nakagami/chacha20 v0.1.0 h1:2fbf5KeVUw7oRpAe6/A7DqvBJLYYu0ka5WstFbnkEVo= @@ -1254,6 +1292,8 @@ github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1 github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= @@ -1275,6 +1315,8 @@ github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfF github.com/ruudk/golang-pdf417 v0.0.0-20201230142125-a7e3863a1245/go.mod h1:pQAZKsJ8yyVxGRWYNEm9oFB8ieLgKFnamEyDmSA0BRk= github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys= github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs= +github.com/shirou/gopsutil/v4 v4.25.6 h1:kLysI2JsKorfaFPcYmcJqbzROzsBWEOAtw6A7dIfqXs= +github.com/shirou/gopsutil/v4 v4.25.6/go.mod h1:PfybzyydfZcN+JMMjkF6Zb8Mq1A/VcogFFg7hj50W9c= github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= github.com/sijms/go-ora/v2 v2.9.0 h1:+iQbUeTeCOFMb5BsOMgUhV8KWyrv9yjKpcK4x7+MFrg= @@ -1312,9 +1354,15 @@ github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/testcontainers/testcontainers-go v0.40.0 h1:pSdJYLOVgLE8YdUY2FHQ1Fxu+aMnb6JfVz1mxk7OeMU= +github.com/testcontainers/testcontainers-go v0.40.0/go.mod h1:FSXV5KQtX2HAMlm7U3APNyLkkap35zNLxukw9oBi/MY= github.com/thlib/go-timezone-local v0.0.7 h1:fX8zd3aJydqLlTs/TrROrIIdztzsdFV23OzOQx31jII= github.com/thlib/go-timezone-local v0.0.7/go.mod h1:/Tnicc6m/lsJE0irFMA0LfIwTBo4QP7A8IfyIv4zZKI= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= +github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= +github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= +github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= github.com/trinodb/trino-go-client v0.330.0 h1:TBbHjFBuRjYbGtkNyRAJfzLOcwvz8ECihtMtxSzXqOc= github.com/trinodb/trino-go-client v0.330.0/go.mod h1:BXj9QNy6pA4Gn8eIu9dVdRhetABCjFAOZ6xxsVsOZJE= github.com/valkey-io/valkey-go v1.0.68 h1:bTbfonp49b41DqrF30q+y2JL3gcbjd2IiacFAtO4JBA= @@ -1347,6 +1395,8 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0= +github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ= github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0= github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM= @@ -1356,8 +1406,8 @@ github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaD gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b h1:7gd+rd8P3bqcn/96gOZa3F5dpJr/vEiDQYlNb/y2uNs= gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b/go.mod h1:T3BPAOm2cqquPa0MKWeNkmOM5RQsRhkrwMWonFMN7fE= go.mongodb.org/mongo-driver v1.11.4/go.mod h1:PTSz5yu21bkT/wXpkS7WR5f0ddqw5quethTUn9WM+2g= -go.mongodb.org/mongo-driver v1.17.4 h1:jUorfmVzljjr0FLzYQsGP8cgN/qzzxlY9Vh0C9KFXVw= -go.mongodb.org/mongo-driver v1.17.4/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= +go.mongodb.org/mongo-driver/v2 v2.4.2 h1:HrJ+Auygxceby9MLp3YITobef5a8Bv4HcPFIkml1U7U= +go.mongodb.org/mongo-driver/v2 v2.4.2/go.mod h1:jHeEDJHJq7tm6ZF45Issun9dbogjfnPySb1vXA7EeAI= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -1627,6 +1677,7 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1648,6 +1699,7 @@ golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1700,6 +1752,7 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= @@ -2129,6 +2182,8 @@ gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q= +gotest.tools/v3 v3.5.2/go.mod h1:LtdLGcnqToBH83WByAAi/wiwSFCArdFIUV/xxN4pcjA= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/internal/auth/auth.go b/internal/auth/auth.go index e8f15e4f11..5fd30d1f45 100644 --- a/internal/auth/auth.go +++ b/internal/auth/auth.go @@ -21,13 +21,13 @@ import ( // AuthServiceConfig is the interface for configuring authentication services. type AuthServiceConfig interface { - AuthServiceConfigKind() string + AuthServiceConfigType() string Initialize() (AuthService, error) } // AuthService is the interface for authentication services. type AuthService interface { - AuthServiceKind() string + AuthServiceType() string GetName() string GetClaimsFromHeader(context.Context, http.Header) (map[string]any, error) ToConfig() AuthServiceConfig diff --git a/internal/auth/google/google.go b/internal/auth/google/google.go index 90cb809589..4027950557 100644 --- a/internal/auth/google/google.go +++ b/internal/auth/google/google.go @@ -23,7 +23,7 @@ import ( "google.golang.org/api/idtoken" ) -const AuthServiceKind string = "google" +const AuthServiceType string = "google" // validate interface var _ auth.AuthServiceConfig = Config{} @@ -31,13 +31,13 @@ var _ auth.AuthServiceConfig = Config{} // Auth service configuration type Config struct { Name string `yaml:"name" validate:"required"` - Kind string `yaml:"kind" validate:"required"` + Type string `yaml:"type" validate:"required"` ClientID string `yaml:"clientId" validate:"required"` } -// Returns the auth service kind -func (cfg Config) AuthServiceConfigKind() string { - return AuthServiceKind +// Returns the auth service type +func (cfg Config) AuthServiceConfigType() string { + return AuthServiceType } // Initialize a Google auth service @@ -55,9 +55,9 @@ type AuthService struct { Config } -// Returns the auth service kind -func (a AuthService) AuthServiceKind() string { - return AuthServiceKind +// Returns the auth service type +func (a AuthService) AuthServiceType() string { + return AuthServiceType } func (a AuthService) ToConfig() auth.AuthServiceConfig { diff --git a/internal/embeddingmodels/embeddingmodels.go b/internal/embeddingmodels/embeddingmodels.go index d038dd231e..5f82ee32f1 100644 --- a/internal/embeddingmodels/embeddingmodels.go +++ b/internal/embeddingmodels/embeddingmodels.go @@ -22,12 +22,12 @@ import ( // EmbeddingModelConfig is the interface for configuring embedding models. type EmbeddingModelConfig interface { - EmbeddingModelConfigKind() string + EmbeddingModelConfigType() string Initialize(context.Context) (EmbeddingModel, error) } type EmbeddingModel interface { - EmbeddingModelKind() string + EmbeddingModelType() string ToConfig() EmbeddingModelConfig EmbedParameters(context.Context, []string) ([][]float32, error) } diff --git a/internal/embeddingmodels/gemini/gemini.go b/internal/embeddingmodels/gemini/gemini.go index 1a31060e7b..fa63b12acb 100644 --- a/internal/embeddingmodels/gemini/gemini.go +++ b/internal/embeddingmodels/gemini/gemini.go @@ -23,22 +23,22 @@ import ( "google.golang.org/genai" ) -const EmbeddingModelKind string = "gemini" +const EmbeddingModelType string = "gemini" // validate interface var _ embeddingmodels.EmbeddingModelConfig = Config{} type Config struct { Name string `yaml:"name" validate:"required"` - Kind string `yaml:"kind" validate:"required"` + Type string `yaml:"type" validate:"required"` Model string `yaml:"model" validate:"required"` ApiKey string `yaml:"apiKey"` Dimension int32 `yaml:"dimension"` } -// Returns the embedding model kind -func (cfg Config) EmbeddingModelConfigKind() string { - return EmbeddingModelKind +// Returns the embedding model type +func (cfg Config) EmbeddingModelConfigType() string { + return EmbeddingModelType } // Initialize a Gemini embedding model @@ -69,9 +69,9 @@ type EmbeddingModel struct { Config } -// Returns the embedding model kind -func (m EmbeddingModel) EmbeddingModelKind() string { - return EmbeddingModelKind +// Returns the embedding model type +func (m EmbeddingModel) EmbeddingModelType() string { + return EmbeddingModelType } func (m EmbeddingModel) ToConfig() embeddingmodels.EmbeddingModelConfig { diff --git a/internal/embeddingmodels/gemini/gemini_test.go b/internal/embeddingmodels/gemini/gemini_test.go index c4c3b8dcdc..e04e1df1b6 100644 --- a/internal/embeddingmodels/gemini/gemini_test.go +++ b/internal/embeddingmodels/gemini/gemini_test.go @@ -15,9 +15,9 @@ package gemini_test import ( + "context" "testing" - yaml "github.com/goccy/go-yaml" "github.com/google/go-cmp/cmp" "github.com/googleapis/genai-toolbox/internal/embeddingmodels" "github.com/googleapis/genai-toolbox/internal/embeddingmodels/gemini" @@ -34,15 +34,15 @@ func TestParseFromYamlGemini(t *testing.T) { { desc: "basic example", in: ` - embeddingModels: - my-gemini-model: - kind: gemini - model: text-embedding-004 + kind: embeddingModels + name: my-gemini-model + type: gemini + model: text-embedding-004 `, want: map[string]embeddingmodels.EmbeddingModelConfig{ "my-gemini-model": gemini.Config{ Name: "my-gemini-model", - Kind: gemini.EmbeddingModelKind, + Type: gemini.EmbeddingModelType, Model: "text-embedding-004", }, }, @@ -50,17 +50,17 @@ func TestParseFromYamlGemini(t *testing.T) { { desc: "full example with optional fields", in: ` - embeddingModels: - complex-gemini: - kind: gemini - model: text-embedding-004 - apiKey: "test-api-key" - dimension: 768 + kind: embeddingModels + name: complex-gemini + type: gemini + model: text-embedding-004 + apiKey: "test-api-key" + dimension: 768 `, want: map[string]embeddingmodels.EmbeddingModelConfig{ "complex-gemini": gemini.Config{ Name: "complex-gemini", - Kind: gemini.EmbeddingModelKind, + Type: gemini.EmbeddingModelType, Model: "text-embedding-004", ApiKey: "test-api-key", Dimension: 768, @@ -70,16 +70,13 @@ func TestParseFromYamlGemini(t *testing.T) { } for _, tc := range tcs { t.Run(tc.desc, func(t *testing.T) { - got := struct { - Models server.EmbeddingModelConfigs `yaml:"embeddingModels"` - }{} // Parse contents - err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got) + _, _, got, _, _, _, err := server.UnmarshalResourceConfig(context.Background(), testutils.FormatYaml(tc.in)) if err != nil { t.Fatalf("unable to unmarshal: %s", err) } - if !cmp.Equal(tc.want, got.Models) { - t.Fatalf("incorrect parse: %v", cmp.Diff(tc.want, got.Models)) + if !cmp.Equal(tc.want, got) { + t.Fatalf("incorrect parse: %v", cmp.Diff(tc.want, got)) } }) } @@ -93,32 +90,29 @@ func TestFailParseFromYamlGemini(t *testing.T) { { desc: "missing required model field", in: ` - embeddingModels: - bad-model: - kind: gemini + kind: embeddingModels + name: bad-model + type: gemini `, // Removed the specific model name from the prefix to match your output - err: "unable to parse as \"gemini\": Key: 'Config.Model' Error:Field validation for 'Model' failed on the 'required' tag", + err: "error unmarshaling embeddingModels: unable to parse as \"bad-model\": Key: 'Config.Model' Error:Field validation for 'Model' failed on the 'required' tag", }, { desc: "unknown field", in: ` - embeddingModels: - bad-field: - kind: gemini - model: text-embedding-004 - invalid_param: true + kind: embeddingModels + name: bad-field + type: gemini + model: text-embedding-004 + invalid_param: true `, // Updated to match the specific line-starting format of your error output - err: "unable to parse as \"gemini\": [1:1] unknown field \"invalid_param\"\n> 1 | invalid_param: true\n ^\n 2 | kind: gemini\n 3 | model: text-embedding-004", + err: "error unmarshaling embeddingModels: unable to parse as \"bad-field\": [1:1] unknown field \"invalid_param\"\n> 1 | invalid_param: true\n ^\n 2 | model: text-embedding-004\n 3 | name: bad-field\n 4 | type: gemini", }, } for _, tc := range tcs { t.Run(tc.desc, func(t *testing.T) { - got := struct { - Models server.EmbeddingModelConfigs `yaml:"embeddingModels"` - }{} - err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got) + _, _, _, _, _, _, err := server.UnmarshalResourceConfig(context.Background(), testutils.FormatYaml(tc.in)) if err == nil { t.Fatalf("expect parsing to fail") } diff --git a/internal/log/log.go b/internal/log/log.go index 2c9d1a9273..710dc2ee95 100644 --- a/internal/log/log.go +++ b/internal/log/log.go @@ -22,6 +22,18 @@ import ( "strings" ) +// NewLogger creates a new logger based on the provided format and level. +func NewLogger(format, level string, out, err io.Writer) (Logger, error) { + switch strings.ToLower(format) { + case "json": + return NewStructuredLogger(out, err, level) + case "standard": + return NewStdLogger(out, err, level) + default: + return nil, fmt.Errorf("logging format invalid: %s", format) + } +} + // StdLogger is the standard logger type StdLogger struct { outLogger *slog.Logger diff --git a/internal/prebuiltconfigs/prebuiltconfigs_test.go b/internal/prebuiltconfigs/prebuiltconfigs_test.go index 8bc6ef1e41..ce2e119bf6 100644 --- a/internal/prebuiltconfigs/prebuiltconfigs_test.go +++ b/internal/prebuiltconfigs/prebuiltconfigs_test.go @@ -21,6 +21,7 @@ import ( ) var expectedToolSources = []string{ + "alloydb-omni", "alloydb-postgres-admin", "alloydb-postgres-observability", "alloydb-postgres", @@ -99,36 +100,40 @@ func TestLoadPrebuiltToolYAMLs(t *testing.T) { } func TestGetPrebuiltTool(t *testing.T) { - alloydb_admin_config, _ := Get("alloydb-postgres-admin") - alloydb_observability_config, _ := Get("alloydb-postgres-observability") - alloydb_config, _ := Get("alloydb-postgres") - bigquery_config, _ := Get("bigquery") - clickhouse_config, _ := Get("clickhouse") - cloudsqlpg_observability_config, _ := Get("cloud-sql-postgres-observability") - cloudsqlpg_config, _ := Get("cloud-sql-postgres") - cloudsqlpg_admin_config, _ := Get("cloud-sql-postgres-admin") - cloudsqlmysql_admin_config, _ := Get("cloud-sql-mysql-admin") - cloudsqlmssql_admin_config, _ := Get("cloud-sql-mssql-admin") - cloudsqlmysql_observability_config, _ := Get("cloud-sql-mysql-observability") - cloudsqlmysql_config, _ := Get("cloud-sql-mysql") - cloudsqlmssql_observability_config, _ := Get("cloud-sql-mssql-observability") - cloudsqlmssql_config, _ := Get("cloud-sql-mssql") - dataplex_config, _ := Get("dataplex") - firestoreconfig, _ := Get("firestore") - looker_config, _ := Get("looker") - lookerca_config, _ := Get("looker-conversational-analytics") - mysql_config, _ := Get("mysql") - mssql_config, _ := Get("mssql") - oceanbase_config, _ := Get("oceanbase") - postgresconfig, _ := Get("postgres") - singlestore_config, _ := Get("singlestore") - spanner_config, _ := Get("spanner") - spannerpg_config, _ := Get("spanner-postgres") - mindsdb_config, _ := Get("mindsdb") - sqlite_config, _ := Get("sqlite") - neo4jconfig, _ := Get("neo4j") - healthcare_config, _ := Get("cloud-healthcare") - snowflake_config, _ := Get("snowflake") + alloydb_omni_config := getOrFatal(t, "alloydb-omni") + alloydb_admin_config := getOrFatal(t, "alloydb-postgres-admin") + alloydb_observability_config := getOrFatal(t, "alloydb-postgres-observability") + alloydb_config := getOrFatal(t, "alloydb-postgres") + bigquery_config := getOrFatal(t, "bigquery") + clickhouse_config := getOrFatal(t, "clickhouse") + cloudsqlpg_observability_config := getOrFatal(t, "cloud-sql-postgres-observability") + cloudsqlpg_config := getOrFatal(t, "cloud-sql-postgres") + cloudsqlpg_admin_config := getOrFatal(t, "cloud-sql-postgres-admin") + cloudsqlmysql_admin_config := getOrFatal(t, "cloud-sql-mysql-admin") + cloudsqlmssql_admin_config := getOrFatal(t, "cloud-sql-mssql-admin") + cloudsqlmysql_observability_config := getOrFatal(t, "cloud-sql-mysql-observability") + cloudsqlmysql_config := getOrFatal(t, "cloud-sql-mysql") + cloudsqlmssql_observability_config := getOrFatal(t, "cloud-sql-mssql-observability") + cloudsqlmssql_config := getOrFatal(t, "cloud-sql-mssql") + dataplex_config := getOrFatal(t, "dataplex") + firestoreconfig := getOrFatal(t, "firestore") + looker_config := getOrFatal(t, "looker") + lookerca_config := getOrFatal(t, "looker-conversational-analytics") + mysql_config := getOrFatal(t, "mysql") + mssql_config := getOrFatal(t, "mssql") + oceanbase_config := getOrFatal(t, "oceanbase") + postgresconfig := getOrFatal(t, "postgres") + singlestore_config := getOrFatal(t, "singlestore") + spanner_config := getOrFatal(t, "spanner") + spannerpg_config := getOrFatal(t, "spanner-postgres") + mindsdb_config := getOrFatal(t, "mindsdb") + sqlite_config := getOrFatal(t, "sqlite") + neo4jconfig := getOrFatal(t, "neo4j") + healthcare_config := getOrFatal(t, "cloud-healthcare") + snowflake_config := getOrFatal(t, "snowflake") + if len(alloydb_omni_config) <= 0 { + t.Fatalf("unexpected error: could not fetch alloydb omni prebuilt tools yaml") + } if len(alloydb_admin_config) <= 0 { t.Fatalf("unexpected error: could not fetch alloydb admin prebuilt tools yaml") } @@ -233,3 +238,11 @@ func TestFailGetPrebuiltTool(t *testing.T) { t.Fatalf("unexpected an error but got nil.") } } + +func getOrFatal(t *testing.T, prebuiltSourceConfig string) []byte { + bytes, err := Get(prebuiltSourceConfig) + if err != nil { + t.Fatalf("Cannot get prebuilt config for %q, error %v", prebuiltSourceConfig, err) + } + return bytes +} diff --git a/internal/prebuiltconfigs/tools/alloydb-omni.yaml b/internal/prebuiltconfigs/tools/alloydb-omni.yaml new file mode 100644 index 0000000000..9fac9a32e7 --- /dev/null +++ b/internal/prebuiltconfigs/tools/alloydb-omni.yaml @@ -0,0 +1,277 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +sources: + alloydb-omni-source: + kind: postgres + host: ${ALLOYDB_OMNI_HOST:localhost} + port: ${ALLOYDB_OMNI_PORT:5432} + database: ${ALLOYDB_OMNI_DATABASE} + user: ${ALLOYDB_OMNI_USER} + password: ${ALLOYDB_OMNI_PASSWORD:} + queryParams: ${ALLOYDB_OMNI_QUERY_PARAMS:} + +tools: + execute_sql: + kind: postgres-execute-sql + source: alloydb-omni-source + description: Use this tool to execute sql. + + list_tables: + kind: postgres-list-tables + source: alloydb-omni-source + description: "Lists detailed schema information (object type, columns, constraints, indexes, triggers, owner, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas." + + list_active_queries: + kind: postgres-list-active-queries + source: alloydb-omni-source + description: "List the top N (default 50) currently running queries (state='active') from pg_stat_activity, ordered by longest-running first. Returns pid, user, database, application_name, client_addr, state, wait_event_type/wait_event, backend/xact/query start times, computed query_duration, and the SQL text." + + list_available_extensions: + kind: postgres-list-available-extensions + source: alloydb-omni-source + description: "Discover all PostgreSQL extensions available for installation on this server, returning name, default_version, and description." + + list_installed_extensions: + kind: postgres-list-installed-extensions + source: alloydb-omni-source + description: "List all installed PostgreSQL extensions with their name, version, schema, owner, and description." + + long_running_transactions: + kind: postgres-long-running-transactions + source: alloydb-omni-source + + list_locks: + kind: postgres-list-locks + source: alloydb-omni-source + + replication_stats: + kind: postgres-replication-stats + source: alloydb-omni-source + + list_autovacuum_configurations: + kind: postgres-sql + source: alloydb-omni-source + description: "List PostgreSQL autovacuum-related configurations (name and current setting) from pg_settings." + statement: | + SELECT name, + setting + FROM pg_settings + WHERE category = 'Autovacuum'; + + list_columnar_configurations: + kind: postgres-sql + source: alloydb-omni-source + description: "List AlloyDB Omni columnar-related configurations (name and current setting) from pg_settings." + statement: | + SELECT name, + setting + FROM pg_settings + WHERE name like 'google_columnar_engine.%'; + + list_columnar_recommended_columns: + kind: postgres-sql + source: alloydb-omni-source + description: "Lists columns that AlloyDB Omni recommends adding to the columnar engine to improve query performance." + statement: select * from g_columnar_recommended_columns; + + list_memory_configurations: + kind: postgres-sql + source: alloydb-omni-source + description: "List PostgreSQL memory-related configurations (name and current setting) from pg_settings." + statement: | + ( + SELECT + name, + pg_size_pretty((setting::bigint * 1024)::bigint) setting + FROM pg_settings + WHERE name IN ('work_mem', 'maintenance_work_mem') + ) + UNION ALL + ( + SELECT + name, + pg_size_pretty((((setting::bigint) * 8) * 1024)::bigint) + FROM pg_settings + WHERE name IN ('shared_buffers', 'wal_buffers', 'effective_cache_size', 'temp_buffers') + ) + ORDER BY 1 DESC; + + list_top_bloated_tables: + kind: postgres-sql + source: alloydb-omni-source + description: | + List the top tables by dead-tuple (approximate bloat signal), returning schema, table, live/dead tuples, percentage, and last vacuum/analyze times. + statement: | + SELECT + schemaname AS schema_name, + relname AS relation_name, + n_live_tup AS live_tuples, + n_dead_tup AS dead_tuples, + TRUNC((n_dead_tup::NUMERIC / NULLIF(n_live_tup + n_dead_tup, 0)) * 100, 2) AS dead_tuple_percentage, + last_vacuum, + last_autovacuum, + last_analyze, + last_autoanalyze + FROM pg_stat_user_tables + ORDER BY n_dead_tup DESC + LIMIT COALESCE($1::int, 50); + parameters: + - name: limit + description: "The maximum number of results to return." + type: integer + default: 50 + + list_replication_slots: + kind: postgres-sql + source: alloydb-omni-source + description: "List key details for all PostgreSQL replication slots (e.g., type, database, active status) and calculates the size of the outstanding WAL that is being prevented from removal by the slot." + statement: | + SELECT + slot_name, + slot_type, + plugin, + database, + temporary, + active, + restart_lsn, + confirmed_flush_lsn, + xmin, + catalog_xmin, + pg_size_pretty(pg_wal_lsn_diff(pg_current_wal_lsn(), restart_lsn)) AS retained_wal + FROM pg_replication_slots; + + list_invalid_indexes: + kind: postgres-sql + source: alloydb-omni-source + description: "Lists all invalid PostgreSQL indexes which are taking up disk space but are unusable by the query planner. Typically created by failed CREATE INDEX CONCURRENTLY operations." + statement: | + SELECT + nspname AS schema_name, + indexrelid::regclass AS index_name, + indrelid::regclass AS table_name, + pg_size_pretty(pg_total_relation_size(indexrelid)) AS index_size, + indisready, + indisvalid, + pg_get_indexdef(pg_class.oid) AS index_def + FROM pg_index + JOIN pg_class ON pg_class.oid = pg_index.indexrelid + JOIN pg_namespace ON pg_namespace.oid = pg_class.relnamespace + WHERE indisvalid = FALSE; + + get_query_plan: + kind: postgres-sql + source: alloydb-omni-source + description: "Generate a PostgreSQL EXPLAIN plan in JSON format for a single SQL statement—without executing it. This returns the optimizer's estimated plan, costs, and rows (no ANALYZE, no extra options). Use in production safely for plan inspection, regression checks, and query tuning workflows." + statement: | + EXPLAIN (FORMAT JSON) {{.query}}; + templateParameters: + - name: query + type: string + description: "The SQL statement for which you want to generate plan (omit the EXPLAIN keyword)." + required: true + + list_views: + kind: postgres-list-views + source: alloydb-omni-source + + list_schemas: + kind: postgres-list-schemas + source: alloydb-omni-source + + list_indexes: + kind: postgres-list-indexes + source: alloydb-omni-source + + list_sequences: + kind: postgres-list-sequences + source: alloydb-omni-source + + database_overview: + kind: postgres-database-overview + source: alloydb-omni-source + + list_triggers: + kind: postgres-list-triggers + source: alloydb-omni-source + + list_query_stats: + kind: postgres-list-query-stats + source: alloydb-omni-source + + get_column_cardinality: + kind: postgres-get-column-cardinality + source: alloydb-omni-source + + list_table_stats: + kind: postgres-list-table-stats + source: alloydb-omni-source + + list_publication_tables: + kind: postgres-list-publication-tables + source: alloydb-omni-source + + list_tablespaces: + kind: postgres-list-tablespaces + source: alloydb-omni-source + + list_pg_settings: + kind: postgres-list-pg-settings + source: alloydb-omni-source + + list_database_stats: + kind: postgres-list-database-stats + source: alloydb-omni-source + + list_roles: + kind: postgres-list-roles + source: alloydb-omni-source + + list_stored_procedure: + kind: postgres-list-stored-procedure + source: alloydb-omni-source + +toolsets: + alloydb_omni_database_tools: + - execute_sql + - list_tables + - list_active_queries + - list_available_extensions + - list_installed_extensions + - list_autovacuum_configurations + - list_columnar_configurations + - list_columnar_recommended_columns + - list_memory_configurations + - list_top_bloated_tables + - list_replication_slots + - list_invalid_indexes + - get_query_plan + - list_views + - list_schemas + - database_overview + - list_triggers + - list_indexes + - list_sequences + - long_running_transactions + - list_locks + - replication_stats + - list_query_stats + - get_column_cardinality + - list_publication_tables + - list_tablespaces + - list_pg_settings + - list_database_stats + - list_roles + - list_table_stats + - list_stored_procedure diff --git a/internal/prebuiltconfigs/tools/looker.yaml b/internal/prebuiltconfigs/tools/looker.yaml index 442cd11106..c6bbd51c56 100644 --- a/internal/prebuiltconfigs/tools/looker.yaml +++ b/internal/prebuiltconfigs/tools/looker.yaml @@ -959,6 +959,21 @@ tools: Output: A confirmation message upon successful file deletion. + validate_project: + kind: looker-validate-project + source: looker-source + description: | + This tool checks a LookML project for syntax errors. + + Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first. + + Parameters: + - project_id (required): The unique ID of the LookML project. + + Output: + A list of error details including the file path and line number, and also a list of models + that are not currently valid due to LookML errors. + get_connections: kind: looker-get-connections source: looker-source @@ -1072,6 +1087,7 @@ toolsets: - create_project_file - update_project_file - delete_project_file + - validate_project - get_connections - get_connection_schemas - get_connection_databases diff --git a/internal/prompts/custom/custom.go b/internal/prompts/custom/custom.go index 312b53613c..4ed44c4651 100644 --- a/internal/prompts/custom/custom.go +++ b/internal/prompts/custom/custom.go @@ -25,12 +25,12 @@ import ( type Message = prompts.Message -const kind = "custom" +const resourceType = "custom" -// init registers this prompt kind with the prompt framework. +// init registers this prompt type with the prompt framework. func init() { - if !prompts.Register(kind, newConfig) { - panic(fmt.Sprintf("prompt kind %q already registered", kind)) + if !prompts.Register(resourceType, newConfig) { + panic(fmt.Sprintf("prompt type %q already registered", resourceType)) } } @@ -56,8 +56,8 @@ type Config struct { var _ prompts.PromptConfig = Config{} var _ prompts.Prompt = Prompt{} -func (c Config) PromptConfigKind() string { - return kind +func (c Config) PromptConfigType() string { + return resourceType } func (c Config) Initialize() (prompts.Prompt, error) { diff --git a/internal/prompts/custom/custom_test.go b/internal/prompts/custom/custom_test.go index ff411c7b8b..4f9ad565e5 100644 --- a/internal/prompts/custom/custom_test.go +++ b/internal/prompts/custom/custom_test.go @@ -42,7 +42,7 @@ func TestConfig(t *testing.T) { Arguments: testArgs, } - // initialize and check kind + // initialize and check type p, err := cfg.Initialize() if err != nil { t.Fatalf("Initialize() failed: %v", err) @@ -50,8 +50,8 @@ func TestConfig(t *testing.T) { if p == nil { t.Fatal("Initialize() returned a nil prompt") } - if cfg.PromptConfigKind() != "custom" { - t.Errorf("PromptConfigKind() = %q, want %q", cfg.PromptConfigKind(), "custom") + if cfg.PromptConfigType() != "custom" { + t.Errorf("PromptConfigType() = %q, want %q", cfg.PromptConfigType(), "custom") } t.Run("Manifest", func(t *testing.T) { diff --git a/internal/prompts/prompts.go b/internal/prompts/prompts.go index bac784c4c5..ae8312c49e 100644 --- a/internal/prompts/prompts.go +++ b/internal/prompts/prompts.go @@ -30,40 +30,40 @@ var promptRegistry = make(map[string]PromptConfigFactory) // Register allows individual prompt packages to register their configuration // factory function. This is typically called from an init() function in the -// prompt's package. It associates a 'kind' string with a function that can +// prompt's package. It associates a 'type' string with a function that can // produce the specific PromptConfig type. It returns true if the registration was -// successful, and false if a prompt with the same kind was already registered. -func Register(kind string, factory PromptConfigFactory) bool { - if _, exists := promptRegistry[kind]; exists { - // Prompt with this kind already exists, do not overwrite. +// successful, and false if a prompt with the same type was already registered. +func Register(resourceType string, factory PromptConfigFactory) bool { + if _, exists := promptRegistry[resourceType]; exists { + // Prompt with this type already exists, do not overwrite. return false } - promptRegistry[kind] = factory + promptRegistry[resourceType] = factory return true } -// DecodeConfig looks up the registered factory for the given kind and uses it +// DecodeConfig looks up the registered factory for the given type and uses it // to decode the prompt configuration. -func DecodeConfig(ctx context.Context, kind, name string, decoder *yaml.Decoder) (PromptConfig, error) { - factory, found := promptRegistry[kind] - if !found && kind == "" { - kind = "custom" - factory, found = promptRegistry[kind] +func DecodeConfig(ctx context.Context, resourceType, name string, decoder *yaml.Decoder) (PromptConfig, error) { + factory, found := promptRegistry[resourceType] + if !found && resourceType == "" { + resourceType = "custom" + factory, found = promptRegistry[resourceType] } if !found { - return nil, fmt.Errorf("unknown prompt kind: %q", kind) + return nil, fmt.Errorf("unknown prompt type: %q", resourceType) } promptConfig, err := factory(ctx, name, decoder) if err != nil { - return nil, fmt.Errorf("unable to parse prompt %q as kind %q: %w", name, kind, err) + return nil, fmt.Errorf("unable to parse prompt %q as resourceType %q: %w", name, resourceType, err) } return promptConfig, nil } type PromptConfig interface { - PromptConfigKind() string + PromptConfigType() string Initialize() (Prompt, error) } diff --git a/internal/prompts/prompts_test.go b/internal/prompts/prompts_test.go index d0fe0a95f5..72fbe5c3ce 100644 --- a/internal/prompts/prompts_test.go +++ b/internal/prompts/prompts_test.go @@ -29,16 +29,16 @@ import ( type mockPromptConfig struct { name string - kind string + Type string } -func (m *mockPromptConfig) PromptConfigKind() string { return m.kind } +func (m *mockPromptConfig) PromptConfigType() string { return m.Type } func (m *mockPromptConfig) Initialize() (prompts.Prompt, error) { return nil, nil } var errMockFactory = errors.New("mock factory error") func mockFactory(ctx context.Context, name string, decoder *yaml.Decoder) (prompts.PromptConfig, error) { - return &mockPromptConfig{name: name, kind: "mockKind"}, nil + return &mockPromptConfig{name: name, Type: "mockType"}, nil } func mockErrorFactory(ctx context.Context, name string, decoder *yaml.Decoder) (prompts.PromptConfig, error) { @@ -50,17 +50,17 @@ func TestRegistry(t *testing.T) { ctx := context.Background() t.Run("RegisterAndDecodeSuccess", func(t *testing.T) { - kind := "testKindSuccess" - if !prompts.Register(kind, mockFactory) { + resourceType := "testTypeSuccess" + if !prompts.Register(resourceType, mockFactory) { t.Fatal("expected registration to succeed") } // This should fail because we are registering a duplicate - if prompts.Register(kind, mockFactory) { + if prompts.Register(resourceType, mockFactory) { t.Fatal("expected duplicate registration to fail") } decoder := yaml.NewDecoder(strings.NewReader("")) - config, err := prompts.DecodeConfig(ctx, kind, "testPrompt", decoder) + config, err := prompts.DecodeConfig(ctx, resourceType, "testPrompt", decoder) if err != nil { t.Fatalf("expected DecodeConfig to succeed, but got error: %v", err) } @@ -69,25 +69,25 @@ func TestRegistry(t *testing.T) { } }) - t.Run("DecodeUnknownKind", func(t *testing.T) { + t.Run("DecodeUnknownType", func(t *testing.T) { decoder := yaml.NewDecoder(strings.NewReader("")) - _, err := prompts.DecodeConfig(ctx, "unregisteredKind", "testPrompt", decoder) + _, err := prompts.DecodeConfig(ctx, "unregisteredType", "testPrompt", decoder) if err == nil { - t.Fatal("expected an error for unknown kind, but got nil") + t.Fatal("expected an error for unknown type, but got nil") } - if !strings.Contains(err.Error(), "unknown prompt kind") { - t.Errorf("expected error to contain 'unknown prompt kind', but got: %v", err) + if !strings.Contains(err.Error(), "unknown prompt type") { + t.Errorf("expected error to contain 'unknown prompt type', but got: %v", err) } }) t.Run("FactoryReturnsError", func(t *testing.T) { - kind := "testKindError" - if !prompts.Register(kind, mockErrorFactory) { + resourceType := "testTypeError" + if !prompts.Register(resourceType, mockErrorFactory) { t.Fatal("expected registration to succeed") } decoder := yaml.NewDecoder(strings.NewReader("")) - _, err := prompts.DecodeConfig(ctx, kind, "testPrompt", decoder) + _, err := prompts.DecodeConfig(ctx, resourceType, "testPrompt", decoder) if err == nil { t.Fatal("expected an error from the factory, but got nil") } @@ -100,13 +100,13 @@ func TestRegistry(t *testing.T) { decoder := yaml.NewDecoder(strings.NewReader("description: A test prompt")) config, err := prompts.DecodeConfig(ctx, "", "testDefaultPrompt", decoder) if err != nil { - t.Fatalf("expected DecodeConfig with empty kind to succeed, but got error: %v", err) + t.Fatalf("expected DecodeConfig with empty type to succeed, but got error: %v", err) } if config == nil { - t.Fatal("expected a non-nil config for default kind") + t.Fatal("expected a non-nil config for default type") } - if config.PromptConfigKind() != "custom" { - t.Errorf("expected default kind to be 'custom', but got %q", config.PromptConfigKind()) + if config.PromptConfigType() != "custom" { + t.Errorf("expected default type to be 'custom', but got %q", config.PromptConfigType()) } }) } diff --git a/internal/server/api.go b/internal/server/api.go index 0396d1a58a..c992051269 100644 --- a/internal/server/api.go +++ b/internal/server/api.go @@ -19,13 +19,13 @@ import ( "errors" "fmt" "net/http" - "strings" "github.com/go-chi/chi/v5" "github.com/go-chi/chi/v5/middleware" "github.com/go-chi/render" "github.com/googleapis/genai-toolbox/internal/tools" "github.com/googleapis/genai-toolbox/internal/util" + "github.com/googleapis/genai-toolbox/internal/util/parameters" "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/codes" "go.opentelemetry.io/otel/metric" @@ -215,7 +215,7 @@ func toolInvokeHandler(s *Server, w http.ResponseWriter, r *http.Request) { // Check if any of the specified auth services is verified isAuthorized := tool.Authorized(verifiedAuthServices) if !isAuthorized { - err = fmt.Errorf("tool invocation not authorized. Please make sure your specify correct auth headers") + err = fmt.Errorf("tool invocation not authorized. Please make sure you specify correct auth headers") s.logger.DebugContext(ctx, err.Error()) _ = render.Render(w, r, newErrResponse(err, http.StatusUnauthorized)) return @@ -231,17 +231,30 @@ func toolInvokeHandler(s *Server, w http.ResponseWriter, r *http.Request) { return } - params, err := tool.ParseParams(data, claimsFromAuth) + params, err := parameters.ParseParams(tool.GetParameters(), data, claimsFromAuth) if err != nil { - // If auth error, return 401 - if errors.Is(err, util.ErrUnauthorized) { - s.logger.DebugContext(ctx, fmt.Sprintf("error parsing authenticated parameters from ID token: %s", err)) + var clientServerErr *util.ClientServerError + + // Return 401 Authentication errors + if errors.As(err, &clientServerErr) && clientServerErr.Code == http.StatusUnauthorized { + s.logger.DebugContext(ctx, fmt.Sprintf("auth error: %v", err)) _ = render.Render(w, r, newErrResponse(err, http.StatusUnauthorized)) return } - err = fmt.Errorf("provided parameters were invalid: %w", err) - s.logger.DebugContext(ctx, err.Error()) - _ = render.Render(w, r, newErrResponse(err, http.StatusBadRequest)) + + var agentErr *util.AgentError + if errors.As(err, &agentErr) { + s.logger.DebugContext(ctx, fmt.Sprintf("agent validation error: %v", err)) + errMap := map[string]string{"error": err.Error()} + errMarshal, _ := json.Marshal(errMap) + + _ = render.Render(w, r, &resultResponse{Result: string(errMarshal)}) + return + } + + // Return 500 if it's a specific ClientServerError that isn't a 401, or any other unexpected error + s.logger.ErrorContext(ctx, fmt.Sprintf("internal server error: %v", err)) + _ = render.Render(w, r, newErrResponse(err, http.StatusInternalServerError)) return } s.logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params)) @@ -258,34 +271,50 @@ func toolInvokeHandler(s *Server, w http.ResponseWriter, r *http.Request) { // Determine what error to return to the users. if err != nil { - errStr := err.Error() - var statusCode int + var tbErr util.ToolboxError - // Upstream API auth error propagation - switch { - case strings.Contains(errStr, "Error 401"): - statusCode = http.StatusUnauthorized - case strings.Contains(errStr, "Error 403"): - statusCode = http.StatusForbidden - } + if errors.As(err, &tbErr) { + switch tbErr.Category() { + case util.CategoryAgent: + // Agent Errors -> 200 OK + s.logger.DebugContext(ctx, fmt.Sprintf("Tool invocation agent error: %v", err)) + res = map[string]string{ + "error": err.Error(), + } - if statusCode == http.StatusUnauthorized || statusCode == http.StatusForbidden { - if clientAuth { - // Propagate the original 401/403 error. - s.logger.DebugContext(ctx, fmt.Sprintf("error invoking tool. Client credentials lack authorization to the source: %v", err)) + case util.CategoryServer: + // Server Errors -> Check the specific code inside + var clientServerErr *util.ClientServerError + statusCode := http.StatusInternalServerError // Default to 500 + + if errors.As(err, &clientServerErr) { + if clientServerErr.Code != 0 { + statusCode = clientServerErr.Code + } + } + + // Process auth error + if statusCode == http.StatusUnauthorized || statusCode == http.StatusForbidden { + if clientAuth { + // Token error, pass through 401/403 + s.logger.DebugContext(ctx, fmt.Sprintf("Client credentials lack authorization: %v", err)) + _ = render.Render(w, r, newErrResponse(err, statusCode)) + return + } + // ADC/Config error, return 500 + statusCode = http.StatusInternalServerError + } + + s.logger.ErrorContext(ctx, fmt.Sprintf("Tool invocation server error: %v", err)) _ = render.Render(w, r, newErrResponse(err, statusCode)) return } - // ADC lacking permission or credentials configuration error. - internalErr := fmt.Errorf("unexpected auth error occured during Tool invocation: %w", err) - s.logger.ErrorContext(ctx, internalErr.Error()) - _ = render.Render(w, r, newErrResponse(internalErr, http.StatusInternalServerError)) + } else { + // Unknown error -> 500 + s.logger.ErrorContext(ctx, fmt.Sprintf("Tool invocation unknown error: %v", err)) + _ = render.Render(w, r, newErrResponse(err, http.StatusInternalServerError)) return } - err = fmt.Errorf("error while invoking tool: %w", err) - s.logger.DebugContext(ctx, err.Error()) - _ = render.Render(w, r, newErrResponse(err, http.StatusBadRequest)) - return } resMarshal, err := json.Marshal(res) diff --git a/internal/server/common_test.go b/internal/server/common_test.go index 39aca55be3..8944cfba20 100644 --- a/internal/server/common_test.go +++ b/internal/server/common_test.go @@ -24,7 +24,6 @@ import ( "testing" "github.com/go-chi/chi/v5" - "github.com/googleapis/genai-toolbox/internal/embeddingmodels" "github.com/googleapis/genai-toolbox/internal/log" "github.com/googleapis/genai-toolbox/internal/prompts" "github.com/googleapis/genai-toolbox/internal/server/resources" @@ -41,136 +40,6 @@ var ( _ prompts.Prompt = MockPrompt{} ) -// MockTool is used to mock tools in tests -type MockTool struct { - Name string - Description string - Params []parameters.Parameter - manifest tools.Manifest - unauthorized bool - requiresClientAuthrorization bool -} - -func (t MockTool) Invoke(context.Context, tools.SourceProvider, parameters.ParamValues, tools.AccessToken) (any, error) { - mock := []any{t.Name} - return mock, nil -} - -func (t MockTool) ToConfig() tools.ToolConfig { - return nil -} - -// claims is a map of user info decoded from an auth token -func (t MockTool) ParseParams(data map[string]any, claimsMap map[string]map[string]any) (parameters.ParamValues, error) { - return parameters.ParseParams(t.Params, data, claimsMap) -} - -func (t MockTool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) { - return parameters.EmbedParams(ctx, t.Params, paramValues, embeddingModelsMap, nil) -} - -func (t MockTool) Manifest() tools.Manifest { - pMs := make([]parameters.ParameterManifest, 0, len(t.Params)) - for _, p := range t.Params { - pMs = append(pMs, p.Manifest()) - } - return tools.Manifest{Description: t.Description, Parameters: pMs} -} - -func (t MockTool) Authorized(verifiedAuthServices []string) bool { - // defaulted to true - return !t.unauthorized -} - -func (t MockTool) RequiresClientAuthorization(tools.SourceProvider) (bool, error) { - // defaulted to false - return t.requiresClientAuthrorization, nil -} - -func (t MockTool) McpManifest() tools.McpManifest { - properties := make(map[string]parameters.ParameterMcpManifest) - required := make([]string, 0) - authParams := make(map[string][]string) - - for _, p := range t.Params { - name := p.GetName() - paramManifest, authParamList := p.McpManifest() - properties[name] = paramManifest - required = append(required, name) - - if len(authParamList) > 0 { - authParams[name] = authParamList - } - } - - toolsSchema := parameters.McpToolsSchema{ - Type: "object", - Properties: properties, - Required: required, - } - - mcpManifest := tools.McpManifest{ - Name: t.Name, - Description: t.Description, - InputSchema: toolsSchema, - } - - if len(authParams) > 0 { - mcpManifest.Metadata = map[string]any{ - "toolbox/authParams": authParams, - } - } - - return mcpManifest -} - -func (t MockTool) GetAuthTokenHeaderName(tools.SourceProvider) (string, error) { - return "Authorization", nil -} - -// MockPrompt is used to mock prompts in tests -type MockPrompt struct { - Name string - Description string - Args prompts.Arguments -} - -func (p MockPrompt) SubstituteParams(vals parameters.ParamValues) (any, error) { - return []prompts.Message{ - { - Role: "user", - Content: fmt.Sprintf("substituted %s", p.Name), - }, - }, nil -} - -func (p MockPrompt) ParseArgs(data map[string]any, claimsMap map[string]map[string]any) (parameters.ParamValues, error) { - var params parameters.Parameters - for _, arg := range p.Args { - params = append(params, arg.Parameter) - } - return parameters.ParseParams(params, data, claimsMap) -} - -func (p MockPrompt) Manifest() prompts.Manifest { - var argManifests []parameters.ParameterManifest - for _, arg := range p.Args { - argManifests = append(argManifests, arg.Manifest()) - } - return prompts.Manifest{ - Description: p.Description, - Arguments: argManifests, - } -} - -func (p MockPrompt) McpManifest() prompts.McpManifest { - return prompts.GetMcpManifest(p.Name, p.Description, p.Args) -} - -func (p MockPrompt) ToConfig() prompts.PromptConfig { - return nil -} - var tool1 = MockTool{ Name: "no_params", Params: []parameters.Parameter{}, diff --git a/internal/server/config.go b/internal/server/config.go index 652e7547de..48f623b0ea 100644 --- a/internal/server/config.go +++ b/internal/server/config.go @@ -14,8 +14,10 @@ package server import ( + "bytes" "context" "fmt" + "io" "regexp" "strings" @@ -127,315 +129,264 @@ func (s *StringLevel) Type() string { return "stringLevel" } -// SourceConfigs is a type used to allow unmarshal of the data source config map type SourceConfigs map[string]sources.SourceConfig - -// validate interface -var _ yaml.InterfaceUnmarshalerContext = &SourceConfigs{} - -func (c *SourceConfigs) UnmarshalYAML(ctx context.Context, unmarshal func(interface{}) error) error { - *c = make(SourceConfigs) - // Parse the 'kind' fields for each source - var raw map[string]util.DelayedUnmarshaler - if err := unmarshal(&raw); err != nil { - return err - } - - for name, u := range raw { - // Unmarshal to a general type that ensure it capture all fields - var v map[string]any - if err := u.Unmarshal(&v); err != nil { - return fmt.Errorf("unable to unmarshal %q: %w", name, err) - } - - kind, ok := v["kind"] - if !ok { - return fmt.Errorf("missing 'kind' field for source %q", name) - } - kindStr, ok := kind.(string) - if !ok { - return fmt.Errorf("invalid 'kind' field for source %q (must be a string)", name) - } - - yamlDecoder, err := util.NewStrictDecoder(v) - if err != nil { - return fmt.Errorf("error creating YAML decoder for source %q: %w", name, err) - } - - sourceConfig, err := sources.DecodeConfig(ctx, kindStr, name, yamlDecoder) - if err != nil { - return err - } - (*c)[name] = sourceConfig - } - return nil -} - -// AuthServiceConfigs is a type used to allow unmarshal of the data authService config map type AuthServiceConfigs map[string]auth.AuthServiceConfig - -// validate interface -var _ yaml.InterfaceUnmarshalerContext = &AuthServiceConfigs{} - -func (c *AuthServiceConfigs) UnmarshalYAML(ctx context.Context, unmarshal func(interface{}) error) error { - *c = make(AuthServiceConfigs) - // Parse the 'kind' fields for each authService - var raw map[string]util.DelayedUnmarshaler - if err := unmarshal(&raw); err != nil { - return err - } - - for name, u := range raw { - var v map[string]any - if err := u.Unmarshal(&v); err != nil { - return fmt.Errorf("unable to unmarshal %q: %w", name, err) - } - - kind, ok := v["kind"] - if !ok { - return fmt.Errorf("missing 'kind' field for %q", name) - } - - dec, err := util.NewStrictDecoder(v) - if err != nil { - return fmt.Errorf("error creating decoder: %w", err) - } - switch kind { - case google.AuthServiceKind: - actual := google.Config{Name: name} - if err := dec.DecodeContext(ctx, &actual); err != nil { - return fmt.Errorf("unable to parse as %q: %w", kind, err) - } - (*c)[name] = actual - default: - return fmt.Errorf("%q is not a valid kind of auth source", kind) - } - } - return nil -} - -// EmbeddingModelConfigs is a type used to allow unmarshal of the embedding model config map type EmbeddingModelConfigs map[string]embeddingmodels.EmbeddingModelConfig - -// validate interface -var _ yaml.InterfaceUnmarshalerContext = &EmbeddingModelConfigs{} - -func (c *EmbeddingModelConfigs) UnmarshalYAML(ctx context.Context, unmarshal func(interface{}) error) error { - *c = make(EmbeddingModelConfigs) - // Parse the 'kind' fields for each embedding model - var raw map[string]util.DelayedUnmarshaler - if err := unmarshal(&raw); err != nil { - return err - } - - for name, u := range raw { - // Unmarshal to a general type that ensure it capture all fields - var v map[string]any - if err := u.Unmarshal(&v); err != nil { - return fmt.Errorf("unable to unmarshal embedding model %q: %w", name, err) - } - - kind, ok := v["kind"] - if !ok { - return fmt.Errorf("missing 'kind' field for embedding model %q", name) - } - - dec, err := util.NewStrictDecoder(v) - if err != nil { - return fmt.Errorf("error creating decoder: %w", err) - } - switch kind { - case gemini.EmbeddingModelKind: - actual := gemini.Config{Name: name} - if err := dec.DecodeContext(ctx, &actual); err != nil { - return fmt.Errorf("unable to parse as %q: %w", kind, err) - } - (*c)[name] = actual - default: - return fmt.Errorf("%q is not a valid kind of auth source", kind) - } - } - return nil -} - -// ToolConfigs is a type used to allow unmarshal of the tool configs type ToolConfigs map[string]tools.ToolConfig - -// validate interface -var _ yaml.InterfaceUnmarshalerContext = &ToolConfigs{} - -func (c *ToolConfigs) UnmarshalYAML(ctx context.Context, unmarshal func(interface{}) error) error { - *c = make(ToolConfigs) - // Parse the 'kind' fields for each source - var raw map[string]util.DelayedUnmarshaler - if err := unmarshal(&raw); err != nil { - return err - } - - for name, u := range raw { - err := NameValidation(name) - if err != nil { - return err - } - var v map[string]any - if err := u.Unmarshal(&v); err != nil { - return fmt.Errorf("unable to unmarshal %q: %w", name, err) - } - - // `authRequired` and `useClientOAuth` cannot be specified together - if v["authRequired"] != nil && v["useClientOAuth"] == true { - return fmt.Errorf("`authRequired` and `useClientOAuth` are mutually exclusive. Choose only one authentication method") - } - - // Make `authRequired` an empty list instead of nil for Tool manifest - if v["authRequired"] == nil { - v["authRequired"] = []string{} - } - - kindVal, ok := v["kind"] - if !ok { - return fmt.Errorf("missing 'kind' field for tool %q", name) - } - kindStr, ok := kindVal.(string) - if !ok { - return fmt.Errorf("invalid 'kind' field for tool %q (must be a string)", name) - } - - // validify parameter references - if rawParams, ok := v["parameters"]; ok { - if paramsList, ok := rawParams.([]any); ok { - // Turn params into a map - validParamNames := make(map[string]bool) - for _, rawP := range paramsList { - if pMap, ok := rawP.(map[string]any); ok { - if pName, ok := pMap["name"].(string); ok && pName != "" { - validParamNames[pName] = true - } - } - } - - // Validate references - for i, rawP := range paramsList { - pMap, ok := rawP.(map[string]any) - if !ok { - continue - } - - pName, _ := pMap["name"].(string) - refName, _ := pMap["valueFromParam"].(string) - - if refName != "" { - // Check if the referenced parameter exists - if !validParamNames[refName] { - return fmt.Errorf("tool %q config error: parameter %q (index %d) references '%q' in the 'valueFromParam' field, which is not a defined parameter", name, pName, i, refName) - } - - // Check for self-reference - if refName == pName { - return fmt.Errorf("tool %q config error: parameter %q cannot copy value from itself", name, pName) - } - } - } - } - } - yamlDecoder, err := util.NewStrictDecoder(v) - if err != nil { - return fmt.Errorf("error creating YAML decoder for tool %q: %w", name, err) - } - - toolCfg, err := tools.DecodeConfig(ctx, kindStr, name, yamlDecoder) - if err != nil { - return err - } - (*c)[name] = toolCfg - } - return nil -} - -// ToolsetConfigs is a type used to allow unmarshal of the toolset configs type ToolsetConfigs map[string]tools.ToolsetConfig - -// validate interface -var _ yaml.InterfaceUnmarshalerContext = &ToolsetConfigs{} - -func (c *ToolsetConfigs) UnmarshalYAML(ctx context.Context, unmarshal func(interface{}) error) error { - *c = make(ToolsetConfigs) - - var raw map[string][]string - if err := unmarshal(&raw); err != nil { - return err - } - - for name, toolList := range raw { - (*c)[name] = tools.ToolsetConfig{Name: name, ToolNames: toolList} - } - return nil -} - -// PromptConfigs is a type used to allow unmarshal of the prompt configs type PromptConfigs map[string]prompts.PromptConfig - -// validate interface -var _ yaml.InterfaceUnmarshalerContext = &PromptConfigs{} - -func (c *PromptConfigs) UnmarshalYAML(ctx context.Context, unmarshal func(interface{}) error) error { - *c = make(PromptConfigs) - var raw map[string]util.DelayedUnmarshaler - if err := unmarshal(&raw); err != nil { - return err - } - - for name, u := range raw { - var v map[string]any - if err := u.Unmarshal(&v); err != nil { - return fmt.Errorf("unable to unmarshal prompt %q: %w", name, err) - } - - // Look for the 'kind' field. If it's not present, kindStr will be an - // empty string, which prompts.DecodeConfig will correctly default to "custom". - var kindStr string - if kindVal, ok := v["kind"]; ok { - var isString bool - kindStr, isString = kindVal.(string) - if !isString { - return fmt.Errorf("invalid 'kind' field for prompt %q (must be a string)", name) - } - } - - // Create a new, strict decoder for this specific prompt's data. - yamlDecoder, err := util.NewStrictDecoder(v) - if err != nil { - return fmt.Errorf("error creating YAML decoder for prompt %q: %w", name, err) - } - - // Use the central registry to decode the prompt based on its kind. - promptCfg, err := prompts.DecodeConfig(ctx, kindStr, name, yamlDecoder) - if err != nil { - return err - } - (*c)[name] = promptCfg - } - return nil -} - -// PromptsetConfigs is a type used to allow unmarshal of the PromptsetConfigs configs type PromptsetConfigs map[string]prompts.PromptsetConfig -// validate interface -var _ yaml.InterfaceUnmarshalerContext = &PromptsetConfigs{} +func UnmarshalResourceConfig(ctx context.Context, raw []byte) (SourceConfigs, AuthServiceConfigs, EmbeddingModelConfigs, ToolConfigs, ToolsetConfigs, PromptConfigs, error) { + // prepare configs map + var sourceConfigs SourceConfigs + var authServiceConfigs AuthServiceConfigs + var embeddingModelConfigs EmbeddingModelConfigs + var toolConfigs ToolConfigs + var toolsetConfigs ToolsetConfigs + var promptConfigs PromptConfigs + // promptset configs is not yet supported -func (c *PromptsetConfigs) UnmarshalYAML(ctx context.Context, unmarshal func(interface{}) error) error { - *c = make(PromptsetConfigs) + decoder := yaml.NewDecoder(bytes.NewReader(raw)) + // for loop to unmarshal documents with the `---` separator + for { + var resource map[string]any + if err := decoder.DecodeContext(ctx, &resource); err != nil { + if err == io.EOF { + break + } + return nil, nil, nil, nil, nil, nil, fmt.Errorf("unable to decode YAML document: %w", err) + } + var kind, name string + var ok bool + if kind, ok = resource["kind"].(string); !ok { + return nil, nil, nil, nil, nil, nil, fmt.Errorf("missing 'kind' field or it is not a string: %v", resource) + } + if name, ok = resource["name"].(string); !ok { + return nil, nil, nil, nil, nil, nil, fmt.Errorf("missing 'name' field or it is not a string") + } + // remove 'kind' from map for strict unmarshaling + delete(resource, "kind") + switch kind { + case "sources": + c, err := UnmarshalYAMLSourceConfig(ctx, name, resource) + if err != nil { + return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err) + } + if sourceConfigs == nil { + sourceConfigs = make(SourceConfigs) + } + sourceConfigs[name] = c + case "authServices": + c, err := UnmarshalYAMLAuthServiceConfig(ctx, name, resource) + if err != nil { + return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err) + } + if authServiceConfigs == nil { + authServiceConfigs = make(AuthServiceConfigs) + } + authServiceConfigs[name] = c + case "tools": + c, err := UnmarshalYAMLToolConfig(ctx, name, resource) + if err != nil { + return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err) + } + if toolConfigs == nil { + toolConfigs = make(ToolConfigs) + } + toolConfigs[name] = c + case "toolsets": + c, err := UnmarshalYAMLToolsetConfig(ctx, name, resource) + if err != nil { + return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err) + } + if toolsetConfigs == nil { + toolsetConfigs = make(ToolsetConfigs) + } + toolsetConfigs[name] = c + case "embeddingModels": + c, err := UnmarshalYAMLEmbeddingModelConfig(ctx, name, resource) + if err != nil { + return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err) + } + if embeddingModelConfigs == nil { + embeddingModelConfigs = make(EmbeddingModelConfigs) + } + embeddingModelConfigs[name] = c + case "prompts": + c, err := UnmarshalYAMLPromptConfig(ctx, name, resource) + if err != nil { + return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err) + } + if promptConfigs == nil { + promptConfigs = make(PromptConfigs) + } + promptConfigs[name] = c + default: + return nil, nil, nil, nil, nil, nil, fmt.Errorf("invalid kind %s", kind) + } + } + return sourceConfigs, authServiceConfigs, embeddingModelConfigs, toolConfigs, toolsetConfigs, promptConfigs, nil +} + +func UnmarshalYAMLSourceConfig(ctx context.Context, name string, r map[string]any) (sources.SourceConfig, error) { + resourceType, ok := r["type"].(string) + if !ok { + return nil, fmt.Errorf("missing 'type' field or it is not a string") + } + dec, err := util.NewStrictDecoder(r) + if err != nil { + return nil, fmt.Errorf("error creating decoder: %w", err) + } + sourceConfig, err := sources.DecodeConfig(ctx, resourceType, name, dec) + if err != nil { + return nil, err + } + return sourceConfig, nil +} + +func UnmarshalYAMLAuthServiceConfig(ctx context.Context, name string, r map[string]any) (auth.AuthServiceConfig, error) { + resourceType, ok := r["type"].(string) + if !ok { + return nil, fmt.Errorf("missing 'type' field or it is not a string") + } + if resourceType != google.AuthServiceType { + return nil, fmt.Errorf("%s is not a valid type of auth service", resourceType) + } + dec, err := util.NewStrictDecoder(r) + if err != nil { + return nil, fmt.Errorf("error creating decoder: %s", err) + } + actual := google.Config{Name: name} + if err := dec.DecodeContext(ctx, &actual); err != nil { + return nil, fmt.Errorf("unable to parse as %s: %w", name, err) + } + return actual, nil +} + +func UnmarshalYAMLEmbeddingModelConfig(ctx context.Context, name string, r map[string]any) (embeddingmodels.EmbeddingModelConfig, error) { + resourceType, ok := r["type"].(string) + if !ok { + return nil, fmt.Errorf("missing 'type' field or it is not a string") + } + if resourceType != gemini.EmbeddingModelType { + return nil, fmt.Errorf("%s is not a valid type of embedding model", resourceType) + } + dec, err := util.NewStrictDecoder(r) + if err != nil { + return nil, fmt.Errorf("error creating decoder: %s", err) + } + actual := gemini.Config{Name: name} + if err := dec.DecodeContext(ctx, &actual); err != nil { + return nil, fmt.Errorf("unable to parse as %q: %w", name, err) + } + return actual, nil +} + +func UnmarshalYAMLToolConfig(ctx context.Context, name string, r map[string]any) (tools.ToolConfig, error) { + resourceType, ok := r["type"].(string) + if !ok { + return nil, fmt.Errorf("missing 'type' field or it is not a string") + } + // `authRequired` and `useClientOAuth` cannot be specified together + if r["authRequired"] != nil && r["useClientOAuth"] == true { + return nil, fmt.Errorf("`authRequired` and `useClientOAuth` are mutually exclusive. Choose only one authentication method") + } + // Make `authRequired` an empty list instead of nil for Tool manifest + if r["authRequired"] == nil { + r["authRequired"] = []string{} + } + + // validify parameter references + if rawParams, ok := r["parameters"]; ok { + if paramsList, ok := rawParams.([]any); ok { + // Turn params into a map + validParamNames := make(map[string]bool) + for _, rawP := range paramsList { + if pMap, ok := rawP.(map[string]any); ok { + if pName, ok := pMap["name"].(string); ok && pName != "" { + validParamNames[pName] = true + } + } + } + + // Validate references + for i, rawP := range paramsList { + pMap, ok := rawP.(map[string]any) + if !ok { + continue + } + + pName, _ := pMap["name"].(string) + refName, _ := pMap["valueFromParam"].(string) + + if refName != "" { + // Check if the referenced parameter exists + if !validParamNames[refName] { + return nil, fmt.Errorf("tool %q config error: parameter %q (index %d) references '%q' in the 'valueFromParam' field, which is not a defined parameter", name, pName, i, refName) + } + + // Check for self-reference + if refName == pName { + return nil, fmt.Errorf("tool %q config error: parameter %q cannot copy value from itself", name, pName) + } + } + } + } + } + + dec, err := util.NewStrictDecoder(r) + if err != nil { + return nil, fmt.Errorf("error creating decoder: %s", err) + } + toolCfg, err := tools.DecodeConfig(ctx, resourceType, name, dec) + if err != nil { + return nil, err + } + return toolCfg, nil +} + +func UnmarshalYAMLToolsetConfig(ctx context.Context, name string, r map[string]any) (tools.ToolsetConfig, error) { + var toolsetConfig tools.ToolsetConfig + toolList, ok := r["tools"].([]any) + if !ok { + return toolsetConfig, fmt.Errorf("tools is missing or not a list of strings: %v", r) + } + justTools := map[string]any{"tools": toolList} + dec, err := util.NewStrictDecoder(justTools) + if err != nil { + return toolsetConfig, fmt.Errorf("error creating decoder: %s", err) + } var raw map[string][]string - if err := unmarshal(&raw); err != nil { - return err + if err := dec.DecodeContext(ctx, &raw); err != nil { + return toolsetConfig, fmt.Errorf("unable to unmarshal tools: %s", err) + } + return tools.ToolsetConfig{Name: name, ToolNames: raw["tools"]}, nil +} + +func UnmarshalYAMLPromptConfig(ctx context.Context, name string, r map[string]any) (prompts.PromptConfig, error) { + // Look for the 'type' field. If it's not present, typeStr will be an + // empty string, which prompts.DecodeConfig will correctly default to "custom". + var resourceType string + if typeVal, ok := r["type"]; ok { + var isString bool + resourceType, isString = typeVal.(string) + if !isString { + return nil, fmt.Errorf("invalid 'type' field for prompt %q (must be a string)", name) + } + } + dec, err := util.NewStrictDecoder(r) + if err != nil { + return nil, fmt.Errorf("error creating decoder: %s", err) } - for name, promptList := range raw { - (*c)[name] = prompts.PromptsetConfig{Name: name, PromptNames: promptList} + // Use the central registry to decode the prompt based on its type. + promptCfg, err := prompts.DecodeConfig(ctx, resourceType, name, dec) + if err != nil { + return nil, err } - return nil + return promptCfg, nil } // Tools naming validation is added in the MCP v2025-11-25, but we'll be diff --git a/internal/server/mcp.go b/internal/server/mcp.go index aecd2454f2..65ace06d66 100644 --- a/internal/server/mcp.go +++ b/internal/server/mcp.go @@ -23,7 +23,6 @@ import ( "fmt" "io" "net/http" - "strings" "sync" "time" @@ -37,9 +36,11 @@ import ( v20241105 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20241105" v20250326 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20250326" "github.com/googleapis/genai-toolbox/internal/util" + "go.opentelemetry.io/otel" "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/codes" "go.opentelemetry.io/otel/metric" + "go.opentelemetry.io/otel/trace" ) type sseSession struct { @@ -117,6 +118,55 @@ type stdioSession struct { writer io.Writer } +// traceContextCarrier implements propagation.TextMapCarrier for extracting trace context from _meta +type traceContextCarrier map[string]string + +func (c traceContextCarrier) Get(key string) string { + return c[key] +} + +func (c traceContextCarrier) Set(key, value string) { + c[key] = value +} + +func (c traceContextCarrier) Keys() []string { + keys := make([]string, 0, len(c)) + for k := range c { + keys = append(keys, k) + } + return keys +} + +// extractTraceContext extracts W3C Trace Context from params._meta +func extractTraceContext(ctx context.Context, body []byte) context.Context { + // Try to parse the request to extract _meta + var req struct { + Params struct { + Meta struct { + Traceparent string `json:"traceparent,omitempty"` + Tracestate string `json:"tracestate,omitempty"` + } `json:"_meta,omitempty"` + } `json:"params,omitempty"` + } + + if err := json.Unmarshal(body, &req); err != nil { + return ctx + } + + // If traceparent is present, extract the context + if req.Params.Meta.Traceparent != "" { + carrier := traceContextCarrier{ + "traceparent": req.Params.Meta.Traceparent, + } + if req.Params.Meta.Tracestate != "" { + carrier["tracestate"] = req.Params.Meta.Tracestate + } + return otel.GetTextMapPropagator().Extract(ctx, carrier) + } + + return ctx +} + func NewStdioSession(s *Server, stdin io.Reader, stdout io.Writer) *stdioSession { stdioSession := &stdioSession{ server: s, @@ -143,18 +193,29 @@ func (s *stdioSession) readInputStream(ctx context.Context) error { } return err } - v, res, err := processMcpMessage(ctx, []byte(line), s.server, s.protocol, "", "", nil) + // This ensures the transport span becomes a child of the client span + msgCtx := extractTraceContext(ctx, []byte(line)) + + // Create span for STDIO transport + msgCtx, span := s.server.instrumentation.Tracer.Start(msgCtx, "toolbox/server/mcp/stdio", + trace.WithSpanKind(trace.SpanKindServer), + ) + defer span.End() + + v, res, err := processMcpMessage(msgCtx, []byte(line), s.server, s.protocol, "", "", nil, "") if err != nil { // errors during the processing of message will generate a valid MCP Error response. // server can continue to run. - s.server.logger.ErrorContext(ctx, err.Error()) + s.server.logger.ErrorContext(msgCtx, err.Error()) + span.SetStatus(codes.Error, err.Error()) } + if v != "" { s.protocol = v } // no responses for notifications if res != nil { - if err = s.write(ctx, res); err != nil { + if err = s.write(msgCtx, res); err != nil { return err } } @@ -240,7 +301,9 @@ func mcpRouter(s *Server) (chi.Router, error) { // sseHandler handles sse initialization and message. func sseHandler(s *Server, w http.ResponseWriter, r *http.Request) { - ctx, span := s.instrumentation.Tracer.Start(r.Context(), "toolbox/server/mcp/sse") + ctx, span := s.instrumentation.Tracer.Start(r.Context(), "toolbox/server/mcp/sse", + trace.WithSpanKind(trace.SpanKindServer), + ) r = r.WithContext(ctx) sessionId := uuid.New().String() @@ -336,9 +399,27 @@ func methodNotAllowed(s *Server, w http.ResponseWriter, r *http.Request) { func httpHandler(s *Server, w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") - ctx, span := s.instrumentation.Tracer.Start(r.Context(), "toolbox/server/mcp") + ctx := r.Context() + ctx = util.WithLogger(ctx, s.logger) + + // Read body first so we can extract trace context + body, err := io.ReadAll(r.Body) + if err != nil { + // Generate a new uuid if unable to decode + id := uuid.New().String() + s.logger.DebugContext(ctx, err.Error()) + render.JSON(w, r, jsonrpc.NewError(id, jsonrpc.PARSE_ERROR, err.Error(), nil)) + return + } + + // This ensures the transport span becomes a child of the client span + ctx = extractTraceContext(ctx, body) + + // Create span for HTTP transport + ctx, span := s.instrumentation.Tracer.Start(ctx, "toolbox/server/mcp/http", + trace.WithSpanKind(trace.SpanKindServer), + ) r = r.WithContext(ctx) - ctx = util.WithLogger(r.Context(), s.logger) var sessionId, protocolVersion string var session *sseSession @@ -380,7 +461,6 @@ func httpHandler(s *Server, w http.ResponseWriter, r *http.Request) { s.logger.DebugContext(ctx, fmt.Sprintf("toolset name: %s", toolsetName)) span.SetAttributes(attribute.String("toolset_name", toolsetName)) - var err error defer func() { if err != nil { span.SetStatus(codes.Error, err.Error()) @@ -399,17 +479,9 @@ func httpHandler(s *Server, w http.ResponseWriter, r *http.Request) { ) }() - // Read and returns a body from io.Reader - body, err := io.ReadAll(r.Body) - if err != nil { - // Generate a new uuid if unable to decode - id := uuid.New().String() - s.logger.DebugContext(ctx, err.Error()) - render.JSON(w, r, jsonrpc.NewError(id, jsonrpc.PARSE_ERROR, err.Error(), nil)) - return - } + networkProtocolVersion := fmt.Sprintf("%d.%d", r.ProtoMajor, r.ProtoMinor) - v, res, err := processMcpMessage(ctx, body, s, protocolVersion, toolsetName, promptsetName, r.Header) + v, res, err := processMcpMessage(ctx, body, s, protocolVersion, toolsetName, promptsetName, r.Header, networkProtocolVersion) if err != nil { s.logger.DebugContext(ctx, fmt.Errorf("error processing message: %w", err).Error()) } @@ -444,15 +516,12 @@ func httpHandler(s *Server, w http.ResponseWriter, r *http.Request) { code := rpcResponse.Error.Code switch code { case jsonrpc.INTERNAL_ERROR: + // Map Internal RPC Error (-32603) to HTTP 500 w.WriteHeader(http.StatusInternalServerError) case jsonrpc.INVALID_REQUEST: - errStr := err.Error() - if errors.Is(err, util.ErrUnauthorized) { - w.WriteHeader(http.StatusUnauthorized) - } else if strings.Contains(errStr, "Error 401") { - w.WriteHeader(http.StatusUnauthorized) - } else if strings.Contains(errStr, "Error 403") { - w.WriteHeader(http.StatusForbidden) + var clientServerErr *util.ClientServerError + if errors.As(err, &clientServerErr) { + w.WriteHeader(clientServerErr.Code) } } } @@ -462,7 +531,7 @@ func httpHandler(s *Server, w http.ResponseWriter, r *http.Request) { } // processMcpMessage process the messages received from clients -func processMcpMessage(ctx context.Context, body []byte, s *Server, protocolVersion string, toolsetName string, promptsetName string, header http.Header) (string, any, error) { +func processMcpMessage(ctx context.Context, body []byte, s *Server, protocolVersion string, toolsetName string, promptsetName string, header http.Header, networkProtocolVersion string) (string, any, error) { logger, err := util.LoggerFromContext(ctx) if err != nil { return "", jsonrpc.NewError("", jsonrpc.INTERNAL_ERROR, err.Error(), nil), err @@ -498,31 +567,95 @@ func processMcpMessage(ctx context.Context, body []byte, s *Server, protocolVers return "", jsonrpc.NewError(baseMessage.Id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err } + // Create method-specific span with semantic conventions + // Note: Trace context is already extracted and set in ctx by the caller + ctx, span := s.instrumentation.Tracer.Start(ctx, baseMessage.Method, + trace.WithSpanKind(trace.SpanKindServer), + ) + defer span.End() + + // Determine network transport and protocol based on header presence + networkTransport := "pipe" // default for stdio + networkProtocolName := "stdio" + if header != nil { + networkTransport = "tcp" // HTTP/SSE transport + networkProtocolName = "http" + } + + // Set required semantic attributes for span according to OTEL MCP semcov + // ref: https://opentelemetry.io/docs/specs/semconv/gen-ai/mcp/#server + span.SetAttributes( + attribute.String("mcp.method.name", baseMessage.Method), + attribute.String("network.transport", networkTransport), + attribute.String("network.protocol.name", networkProtocolName), + ) + + // Set network protocol version if available + if networkProtocolVersion != "" { + span.SetAttributes(attribute.String("network.protocol.version", networkProtocolVersion)) + } + + // Set MCP protocol version if available + if protocolVersion != "" { + span.SetAttributes(attribute.String("mcp.protocol.version", protocolVersion)) + } + + // Set request ID + if baseMessage.Id != nil { + span.SetAttributes(attribute.String("jsonrpc.request.id", fmt.Sprintf("%v", baseMessage.Id))) + } + + // Set toolset name + span.SetAttributes(attribute.String("toolset.name", toolsetName)) + // Check if message is a notification if baseMessage.Id == nil { err := mcp.NotificationHandler(ctx, body) + if err != nil { + span.SetStatus(codes.Error, err.Error()) + } return "", nil, err } + // Process the method switch baseMessage.Method { case mcputil.INITIALIZE: - res, v, err := mcp.InitializeResponse(ctx, baseMessage.Id, body, s.version) + result, version, err := mcp.InitializeResponse(ctx, baseMessage.Id, body, s.version) if err != nil { - return "", res, err + span.SetStatus(codes.Error, err.Error()) + if rpcErr, ok := result.(jsonrpc.JSONRPCError); ok { + span.SetAttributes(attribute.String("error.type", rpcErr.Error.String())) + } + return "", result, err } - return v, res, err + span.SetAttributes(attribute.String("mcp.protocol.version", version)) + return version, result, err default: toolset, ok := s.ResourceMgr.GetToolset(toolsetName) if !ok { - err = fmt.Errorf("toolset does not exist") - return "", jsonrpc.NewError(baseMessage.Id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err + err := fmt.Errorf("toolset does not exist") + rpcErr := jsonrpc.NewError(baseMessage.Id, jsonrpc.INVALID_REQUEST, err.Error(), nil) + span.SetStatus(codes.Error, err.Error()) + span.SetAttributes(attribute.String("error.type", rpcErr.Error.String())) + return "", rpcErr, err } promptset, ok := s.ResourceMgr.GetPromptset(promptsetName) if !ok { - err = fmt.Errorf("promptset does not exist") - return "", jsonrpc.NewError(baseMessage.Id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err + err := fmt.Errorf("promptset does not exist") + rpcErr := jsonrpc.NewError(baseMessage.Id, jsonrpc.INVALID_REQUEST, err.Error(), nil) + span.SetStatus(codes.Error, err.Error()) + span.SetAttributes(attribute.String("error.type", rpcErr.Error.String())) + return "", rpcErr, err } - res, err := mcp.ProcessMethod(ctx, protocolVersion, baseMessage.Id, baseMessage.Method, toolset, promptset, s.ResourceMgr, body, header) - return "", res, err + result, err := mcp.ProcessMethod(ctx, protocolVersion, baseMessage.Id, baseMessage.Method, toolset, promptset, s.ResourceMgr, body, header) + if err != nil { + span.SetStatus(codes.Error, err.Error()) + // Set error.type based on JSON-RPC error code + if rpcErr, ok := result.(jsonrpc.JSONRPCError); ok { + span.SetAttributes(attribute.Int("jsonrpc.error.code", rpcErr.Error.Code)) + span.SetAttributes(attribute.String("error.type", rpcErr.Error.String())) + } + } + return "", result, err } } diff --git a/internal/server/mcp/jsonrpc/jsonrpc.go b/internal/server/mcp/jsonrpc/jsonrpc.go index 7099ea8a63..8a4aaaf15b 100644 --- a/internal/server/mcp/jsonrpc/jsonrpc.go +++ b/internal/server/mcp/jsonrpc/jsonrpc.go @@ -45,6 +45,9 @@ type Request struct { // notifications. The receiver is not obligated to provide these // notifications. ProgressToken ProgressToken `json:"progressToken,omitempty"` + // W3C Trace Context fields for distributed tracing + Traceparent string `json:"traceparent,omitempty"` + Tracestate string `json:"tracestate,omitempty"` } `json:"_meta,omitempty"` } `json:"params,omitempty"` } @@ -97,6 +100,24 @@ type Error struct { Data interface{} `json:"data,omitempty"` } +// String returns the error type as a string based on the error code. +func (e Error) String() string { + switch e.Code { + case METHOD_NOT_FOUND: + return "method_not_found" + case INVALID_PARAMS: + return "invalid_params" + case INTERNAL_ERROR: + return "internal_error" + case PARSE_ERROR: + return "parse_error" + case INVALID_REQUEST: + return "invalid_request" + default: + return "jsonrpc_error" + } +} + // JSONRPCError represents a non-successful (error) response to a request. type JSONRPCError struct { Jsonrpc string `json:"jsonrpc"` diff --git a/internal/server/mcp/v20241105/method.go b/internal/server/mcp/v20241105/method.go index d34d0074a4..4684f4687c 100644 --- a/internal/server/mcp/v20241105/method.go +++ b/internal/server/mcp/v20241105/method.go @@ -21,13 +21,15 @@ import ( "errors" "fmt" "net/http" - "strings" "github.com/googleapis/genai-toolbox/internal/prompts" "github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc" "github.com/googleapis/genai-toolbox/internal/server/resources" "github.com/googleapis/genai-toolbox/internal/tools" "github.com/googleapis/genai-toolbox/internal/util" + "github.com/googleapis/genai-toolbox/internal/util/parameters" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" ) // ProcessMethod returns a response for the request. @@ -101,6 +103,14 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re toolName := req.Params.Name toolArgument := req.Params.Arguments logger.DebugContext(ctx, fmt.Sprintf("tool name: %s", toolName)) + + // Update span name and set gen_ai attributes + span := trace.SpanFromContext(ctx) + span.SetName(fmt.Sprintf("%s %s", TOOLS_CALL, toolName)) + span.SetAttributes( + attribute.String("gen_ai.tool.name", toolName), + attribute.String("gen_ai.operation.name", "execute_tool"), + ) tool, ok := resourceMgr.GetTool(toolName) if !ok { err = fmt.Errorf("invalid tool name: tool with name %q does not exist", toolName) @@ -123,7 +133,12 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re } if clientAuth { if accessToken == "" { - return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, "missing access token in the 'Authorization' header", nil), util.ErrUnauthorized + err := util.NewClientServerError( + "missing access token in the 'Authorization' header", + http.StatusUnauthorized, + nil, + ) + return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err } } @@ -171,12 +186,16 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re // Check if any of the specified auth services is verified isAuthorized := tool.Authorized(verifiedAuthServices) if !isAuthorized { - err = fmt.Errorf("unauthorized Tool call: Please make sure your specify correct auth headers: %w", util.ErrUnauthorized) + err = util.NewClientServerError( + "unauthorized Tool call: Please make sure you specify correct auth headers", + http.StatusUnauthorized, + nil, + ) return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err } logger.DebugContext(ctx, "tool invocation authorized") - params, err := tool.ParseParams(data, claimsFromAuth) + params, err := parameters.ParseParams(tool.GetParameters(), data, claimsFromAuth) if err != nil { err = fmt.Errorf("provided parameters were invalid: %w", err) return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err @@ -193,30 +212,44 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re // run tool invocation and generate response. results, err := tool.Invoke(ctx, resourceMgr, params, accessToken) if err != nil { - errStr := err.Error() - // Missing authService tokens. - if errors.Is(err, util.ErrUnauthorized) { - return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err - } - // Upstream auth error - if strings.Contains(errStr, "Error 401") || strings.Contains(errStr, "Error 403") { - if clientAuth { - // Error with client credentials should pass down to the client - return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err + var tbErr util.ToolboxError + + if errors.As(err, &tbErr) { + switch tbErr.Category() { + case util.CategoryAgent: + // MCP - Tool execution error + // Return SUCCESS but with IsError: true + text := TextContent{ + Type: "text", + Text: err.Error(), + } + return jsonrpc.JSONRPCResponse{ + Jsonrpc: jsonrpc.JSONRPC_VERSION, + Id: id, + Result: CallToolResult{Content: []TextContent{text}, IsError: true}, + }, nil + + case util.CategoryServer: + // MCP Spec - Protocol error + // Return JSON-RPC ERROR + var clientServerErr *util.ClientServerError + rpcCode := jsonrpc.INTERNAL_ERROR // Default to Internal Error (-32603) + + if errors.As(err, &clientServerErr) { + if clientServerErr.Code == http.StatusUnauthorized || clientServerErr.Code == http.StatusForbidden { + if clientAuth { + rpcCode = jsonrpc.INVALID_REQUEST + } else { + rpcCode = jsonrpc.INTERNAL_ERROR + } + } + } + return jsonrpc.NewError(id, rpcCode, err.Error(), nil), err } - // Auth error with ADC should raise internal 500 error + } else { + // Unknown error -> 500 return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err } - - text := TextContent{ - Type: "text", - Text: err.Error(), - } - return jsonrpc.JSONRPCResponse{ - Jsonrpc: jsonrpc.JSONRPC_VERSION, - Id: id, - Result: CallToolResult{Content: []TextContent{text}, IsError: true}, - }, nil } content := make([]TextContent, 0) @@ -287,6 +320,11 @@ func promptsGetHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *r promptName := req.Params.Name logger.DebugContext(ctx, fmt.Sprintf("prompt name: %s", promptName)) + + // Update span name and set gen_ai attributes + span := trace.SpanFromContext(ctx) + span.SetName(fmt.Sprintf("%s %s", PROMPTS_GET, promptName)) + span.SetAttributes(attribute.String("gen_ai.prompt.name", promptName)) prompt, ok := resourceMgr.GetPrompt(promptName) if !ok { err := fmt.Errorf("prompt with name %q does not exist", promptName) diff --git a/internal/server/mcp/v20250326/method.go b/internal/server/mcp/v20250326/method.go index 86aa5d9e0b..24c61fd617 100644 --- a/internal/server/mcp/v20250326/method.go +++ b/internal/server/mcp/v20250326/method.go @@ -21,13 +21,15 @@ import ( "errors" "fmt" "net/http" - "strings" "github.com/googleapis/genai-toolbox/internal/prompts" "github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc" "github.com/googleapis/genai-toolbox/internal/server/resources" "github.com/googleapis/genai-toolbox/internal/tools" "github.com/googleapis/genai-toolbox/internal/util" + "github.com/googleapis/genai-toolbox/internal/util/parameters" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" ) // ProcessMethod returns a response for the request. @@ -101,6 +103,15 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re toolName := req.Params.Name toolArgument := req.Params.Arguments logger.DebugContext(ctx, fmt.Sprintf("tool name: %s", toolName)) + + // Update span name and set gen_ai attributes + span := trace.SpanFromContext(ctx) + span.SetName(fmt.Sprintf("%s %s", TOOLS_CALL, toolName)) + span.SetAttributes( + attribute.String("gen_ai.tool.name", toolName), + attribute.String("gen_ai.operation.name", "execute_tool"), + ) + tool, ok := resourceMgr.GetTool(toolName) if !ok { err = fmt.Errorf("invalid tool name: tool with name %q does not exist", toolName) @@ -123,7 +134,12 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re } if clientAuth { if accessToken == "" { - return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, "missing access token in the 'Authorization' header", nil), util.ErrUnauthorized + err := util.NewClientServerError( + "missing access token in the 'Authorization' header", + http.StatusUnauthorized, + nil, + ) + return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err } } @@ -171,12 +187,16 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re // Check if any of the specified auth services is verified isAuthorized := tool.Authorized(verifiedAuthServices) if !isAuthorized { - err = fmt.Errorf("unauthorized Tool call: Please make sure your specify correct auth headers: %w", util.ErrUnauthorized) + err = util.NewClientServerError( + "unauthorized Tool call: Please make sure you specify correct auth headers", + http.StatusUnauthorized, + nil, + ) return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err } logger.DebugContext(ctx, "tool invocation authorized") - params, err := tool.ParseParams(data, claimsFromAuth) + params, err := parameters.ParseParams(tool.GetParameters(), data, claimsFromAuth) if err != nil { err = fmt.Errorf("provided parameters were invalid: %w", err) return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err @@ -193,31 +213,45 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re // run tool invocation and generate response. results, err := tool.Invoke(ctx, resourceMgr, params, accessToken) if err != nil { - errStr := err.Error() - // Missing authService tokens. - if errors.Is(err, util.ErrUnauthorized) { - return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err - } - // Upstream auth error - if strings.Contains(errStr, "Error 401") || strings.Contains(errStr, "Error 403") { - if clientAuth { - // Error with client credentials should pass down to the client - return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err + var tbErr util.ToolboxError + + if errors.As(err, &tbErr) { + switch tbErr.Category() { + case util.CategoryAgent: + // MCP - Tool execution error + // Return SUCCESS but with IsError: true + text := TextContent{ + Type: "text", + Text: err.Error(), + } + return jsonrpc.JSONRPCResponse{ + Jsonrpc: jsonrpc.JSONRPC_VERSION, + Id: id, + Result: CallToolResult{Content: []TextContent{text}, IsError: true}, + }, nil + + case util.CategoryServer: + // MCP Spec - Protocol error + // Return JSON-RPC ERROR + var clientServerErr *util.ClientServerError + rpcCode := jsonrpc.INTERNAL_ERROR // Default to Internal Error (-32603) + + if errors.As(err, &clientServerErr) { + if clientServerErr.Code == http.StatusUnauthorized || clientServerErr.Code == http.StatusForbidden { + if clientAuth { + rpcCode = jsonrpc.INVALID_REQUEST + } else { + rpcCode = jsonrpc.INTERNAL_ERROR + } + } + } + return jsonrpc.NewError(id, rpcCode, err.Error(), nil), err } - // Auth error with ADC should raise internal 500 error + } else { + // Unknown error -> 500 return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err } - text := TextContent{ - Type: "text", - Text: err.Error(), - } - return jsonrpc.JSONRPCResponse{ - Jsonrpc: jsonrpc.JSONRPC_VERSION, - Id: id, - Result: CallToolResult{Content: []TextContent{text}, IsError: true}, - }, nil } - content := make([]TextContent, 0) sliceRes, ok := results.([]any) @@ -286,6 +320,12 @@ func promptsGetHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *r promptName := req.Params.Name logger.DebugContext(ctx, fmt.Sprintf("prompt name: %s", promptName)) + + // Update span name and set gen_ai attributes + span := trace.SpanFromContext(ctx) + span.SetName(fmt.Sprintf("%s %s", PROMPTS_GET, promptName)) + span.SetAttributes(attribute.String("gen_ai.prompt.name", promptName)) + prompt, ok := resourceMgr.GetPrompt(promptName) if !ok { err := fmt.Errorf("prompt with name %q does not exist", promptName) diff --git a/internal/server/mcp/v20250618/method.go b/internal/server/mcp/v20250618/method.go index f8746d9d9d..b6cb45059b 100644 --- a/internal/server/mcp/v20250618/method.go +++ b/internal/server/mcp/v20250618/method.go @@ -21,13 +21,15 @@ import ( "errors" "fmt" "net/http" - "strings" "github.com/googleapis/genai-toolbox/internal/prompts" "github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc" "github.com/googleapis/genai-toolbox/internal/server/resources" "github.com/googleapis/genai-toolbox/internal/tools" "github.com/googleapis/genai-toolbox/internal/util" + "github.com/googleapis/genai-toolbox/internal/util/parameters" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" ) // ProcessMethod returns a response for the request. @@ -94,6 +96,15 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re toolName := req.Params.Name toolArgument := req.Params.Arguments logger.DebugContext(ctx, fmt.Sprintf("tool name: %s", toolName)) + + // Update span name and set gen_ai attributes + span := trace.SpanFromContext(ctx) + span.SetName(fmt.Sprintf("%s %s", TOOLS_CALL, toolName)) + span.SetAttributes( + attribute.String("gen_ai.tool.name", toolName), + attribute.String("gen_ai.operation.name", "execute_tool"), + ) + tool, ok := resourceMgr.GetTool(toolName) if !ok { err = fmt.Errorf("invalid tool name: tool with name %q does not exist", toolName) @@ -116,7 +127,12 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re } if clientAuth { if accessToken == "" { - return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, "missing access token in the 'Authorization' header", nil), util.ErrUnauthorized + err := util.NewClientServerError( + "missing access token in the 'Authorization' header", + http.StatusUnauthorized, + nil, + ) + return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err } } @@ -164,12 +180,16 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re // Check if any of the specified auth services is verified isAuthorized := tool.Authorized(verifiedAuthServices) if !isAuthorized { - err = fmt.Errorf("unauthorized Tool call: Please make sure your specify correct auth headers: %w", util.ErrUnauthorized) + err = util.NewClientServerError( + "unauthorized Tool call: Please make sure you specify correct auth headers", + http.StatusUnauthorized, + nil, + ) return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err } logger.DebugContext(ctx, "tool invocation authorized") - params, err := tool.ParseParams(data, claimsFromAuth) + params, err := parameters.ParseParams(tool.GetParameters(), data, claimsFromAuth) if err != nil { err = fmt.Errorf("provided parameters were invalid: %w", err) return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err @@ -186,29 +206,44 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re // run tool invocation and generate response. results, err := tool.Invoke(ctx, resourceMgr, params, accessToken) if err != nil { - errStr := err.Error() - // Missing authService tokens. - if errors.Is(err, util.ErrUnauthorized) { - return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err - } - // Upstream auth error - if strings.Contains(errStr, "Error 401") || strings.Contains(errStr, "Error 403") { - if clientAuth { - // Error with client credentials should pass down to the client - return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err + var tbErr util.ToolboxError + + if errors.As(err, &tbErr) { + switch tbErr.Category() { + case util.CategoryAgent: + // MCP - Tool execution error + // Return SUCCESS but with IsError: true + text := TextContent{ + Type: "text", + Text: err.Error(), + } + return jsonrpc.JSONRPCResponse{ + Jsonrpc: jsonrpc.JSONRPC_VERSION, + Id: id, + Result: CallToolResult{Content: []TextContent{text}, IsError: true}, + }, nil + + case util.CategoryServer: + // MCP Spec - Protocol error + // Return JSON-RPC ERROR + var clientServerErr *util.ClientServerError + rpcCode := jsonrpc.INTERNAL_ERROR // Default to Internal Error (-32603) + + if errors.As(err, &clientServerErr) { + if clientServerErr.Code == http.StatusUnauthorized || clientServerErr.Code == http.StatusForbidden { + if clientAuth { + rpcCode = jsonrpc.INVALID_REQUEST + } else { + rpcCode = jsonrpc.INTERNAL_ERROR + } + } + } + return jsonrpc.NewError(id, rpcCode, err.Error(), nil), err } - // Auth error with ADC should raise internal 500 error + } else { + // Unknown error -> 500 return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err } - text := TextContent{ - Type: "text", - Text: err.Error(), - } - return jsonrpc.JSONRPCResponse{ - Jsonrpc: jsonrpc.JSONRPC_VERSION, - Id: id, - Result: CallToolResult{Content: []TextContent{text}, IsError: true}, - }, nil } content := make([]TextContent, 0) @@ -279,6 +314,12 @@ func promptsGetHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *r promptName := req.Params.Name logger.DebugContext(ctx, fmt.Sprintf("prompt name: %s", promptName)) + + // Update span name and set gen_ai attributes + span := trace.SpanFromContext(ctx) + span.SetName(fmt.Sprintf("%s %s", PROMPTS_GET, promptName)) + span.SetAttributes(attribute.String("gen_ai.prompt.name", promptName)) + prompt, ok := resourceMgr.GetPrompt(promptName) if !ok { err := fmt.Errorf("prompt with name %q does not exist", promptName) diff --git a/internal/server/mcp/v20251125/method.go b/internal/server/mcp/v20251125/method.go index f67bfb5468..2d59554c55 100644 --- a/internal/server/mcp/v20251125/method.go +++ b/internal/server/mcp/v20251125/method.go @@ -21,13 +21,15 @@ import ( "errors" "fmt" "net/http" - "strings" "github.com/googleapis/genai-toolbox/internal/prompts" "github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc" "github.com/googleapis/genai-toolbox/internal/server/resources" "github.com/googleapis/genai-toolbox/internal/tools" "github.com/googleapis/genai-toolbox/internal/util" + "github.com/googleapis/genai-toolbox/internal/util/parameters" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" ) // ProcessMethod returns a response for the request. @@ -94,6 +96,15 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re toolName := req.Params.Name toolArgument := req.Params.Arguments logger.DebugContext(ctx, fmt.Sprintf("tool name: %s", toolName)) + + // Update span name and set gen_ai attributes + span := trace.SpanFromContext(ctx) + span.SetName(fmt.Sprintf("%s %s", TOOLS_CALL, toolName)) + span.SetAttributes( + attribute.String("gen_ai.tool.name", toolName), + attribute.String("gen_ai.operation.name", "execute_tool"), + ) + tool, ok := resourceMgr.GetTool(toolName) if !ok { err = fmt.Errorf("invalid tool name: tool with name %q does not exist", toolName) @@ -116,7 +127,12 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re } if clientAuth { if accessToken == "" { - return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, "missing access token in the 'Authorization' header", nil), util.ErrUnauthorized + err := util.NewClientServerError( + "missing access token in the 'Authorization' header", + http.StatusUnauthorized, + nil, + ) + return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err } } @@ -164,12 +180,16 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re // Check if any of the specified auth services is verified isAuthorized := tool.Authorized(verifiedAuthServices) if !isAuthorized { - err = fmt.Errorf("unauthorized Tool call: Please make sure your specify correct auth headers: %w", util.ErrUnauthorized) + err = util.NewClientServerError( + "unauthorized Tool call: Please make sure you specify correct auth headers", + http.StatusUnauthorized, + nil, + ) return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err } logger.DebugContext(ctx, "tool invocation authorized") - params, err := tool.ParseParams(data, claimsFromAuth) + params, err := parameters.ParseParams(tool.GetParameters(), data, claimsFromAuth) if err != nil { err = fmt.Errorf("provided parameters were invalid: %w", err) return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err @@ -186,29 +206,44 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re // run tool invocation and generate response. results, err := tool.Invoke(ctx, resourceMgr, params, accessToken) if err != nil { - errStr := err.Error() - // Missing authService tokens. - if errors.Is(err, util.ErrUnauthorized) { - return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err - } - // Upstream auth error - if strings.Contains(errStr, "Error 401") || strings.Contains(errStr, "Error 403") { - if clientAuth { - // Error with client credentials should pass down to the client - return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err + var tbErr util.ToolboxError + + if errors.As(err, &tbErr) { + switch tbErr.Category() { + case util.CategoryAgent: + // MCP - Tool execution error + // Return SUCCESS but with IsError: true + text := TextContent{ + Type: "text", + Text: err.Error(), + } + return jsonrpc.JSONRPCResponse{ + Jsonrpc: jsonrpc.JSONRPC_VERSION, + Id: id, + Result: CallToolResult{Content: []TextContent{text}, IsError: true}, + }, nil + + case util.CategoryServer: + // MCP Spec - Protocol error + // Return JSON-RPC ERROR + var clientServerErr *util.ClientServerError + rpcCode := jsonrpc.INTERNAL_ERROR // Default to Internal Error (-32603) + + if errors.As(err, &clientServerErr) { + if clientServerErr.Code == http.StatusUnauthorized || clientServerErr.Code == http.StatusForbidden { + if clientAuth { + rpcCode = jsonrpc.INVALID_REQUEST + } else { + rpcCode = jsonrpc.INTERNAL_ERROR + } + } + } + return jsonrpc.NewError(id, rpcCode, err.Error(), nil), err } - // Auth error with ADC should raise internal 500 error + } else { + // Unknown error -> 500 return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err } - text := TextContent{ - Type: "text", - Text: err.Error(), - } - return jsonrpc.JSONRPCResponse{ - Jsonrpc: jsonrpc.JSONRPC_VERSION, - Id: id, - Result: CallToolResult{Content: []TextContent{text}, IsError: true}, - }, nil } content := make([]TextContent, 0) @@ -279,6 +314,12 @@ func promptsGetHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *r promptName := req.Params.Name logger.DebugContext(ctx, fmt.Sprintf("prompt name: %s", promptName)) + + // Update span name and set gen_ai attributes + span := trace.SpanFromContext(ctx) + span.SetName(fmt.Sprintf("%s %s", PROMPTS_GET, promptName)) + span.SetAttributes(attribute.String("gen_ai.prompt.name", promptName)) + prompt, ok := resourceMgr.GetPrompt(promptName) if !ok { err := fmt.Errorf("prompt with name %q does not exist", promptName) diff --git a/internal/server/mcp_test.go b/internal/server/mcp_test.go index 0d50af2b24..bbfce7ad41 100644 --- a/internal/server/mcp_test.go +++ b/internal/server/mcp_test.go @@ -231,7 +231,7 @@ func TestMcpEndpointWithoutInitialized(t *testing.T) { "id": "tools-call-tool4", "error": map[string]any{ "code": -32600.0, - "message": "unauthorized Tool call: Please make sure your specify correct auth headers: unauthorized", + "message": "unauthorized Tool call: Please make sure you specify correct auth headers", }, }, }, @@ -320,7 +320,7 @@ func TestMcpEndpointWithoutInitialized(t *testing.T) { Params: map[string]any{ "name": "prompt2", "arguments": map[string]any{ - "arg1": 42, // prompt2 expects a string, we send a number + "arg1": 42, }, }, }, @@ -834,7 +834,7 @@ func TestMcpEndpoint(t *testing.T) { "id": "tools-call-tool4", "error": map[string]any{ "code": -32600.0, - "message": "unauthorized Tool call: Please make sure your specify correct auth headers: unauthorized", + "message": "unauthorized Tool call: Please make sure you specify correct auth headers", }, }, }, diff --git a/internal/server/mocks.go b/internal/server/mocks.go new file mode 100644 index 0000000000..56e458110b --- /dev/null +++ b/internal/server/mocks.go @@ -0,0 +1,160 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "context" + "fmt" + + "github.com/googleapis/genai-toolbox/internal/embeddingmodels" + "github.com/googleapis/genai-toolbox/internal/prompts" + "github.com/googleapis/genai-toolbox/internal/tools" + "github.com/googleapis/genai-toolbox/internal/util" + "github.com/googleapis/genai-toolbox/internal/util/parameters" +) + +// MockTool is used to mock tools in tests +type MockTool struct { + Name string + Description string + Params []parameters.Parameter + manifest tools.Manifest + unauthorized bool + requiresClientAuthrorization bool +} + +func (t MockTool) Invoke(context.Context, tools.SourceProvider, parameters.ParamValues, tools.AccessToken) (any, util.ToolboxError) { + mock := []any{t.Name} + return mock, nil +} + +func (t MockTool) ToConfig() tools.ToolConfig { + return nil +} + +// claims is a map of user info decoded from an auth token +func (t MockTool) ParseParams(data map[string]any, claimsMap map[string]map[string]any) (parameters.ParamValues, error) { + return parameters.ParseParams(t.Params, data, claimsMap) +} + +func (t MockTool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) { + return parameters.EmbedParams(ctx, t.Params, paramValues, embeddingModelsMap, nil) +} + +func (t MockTool) Manifest() tools.Manifest { + pMs := make([]parameters.ParameterManifest, 0, len(t.Params)) + for _, p := range t.Params { + pMs = append(pMs, p.Manifest()) + } + return tools.Manifest{Description: t.Description, Parameters: pMs} +} + +func (t MockTool) Authorized(verifiedAuthServices []string) bool { + // defaulted to true + return !t.unauthorized +} + +func (t MockTool) RequiresClientAuthorization(tools.SourceProvider) (bool, error) { + // defaulted to false + return t.requiresClientAuthrorization, nil +} + +func (t MockTool) GetParameters() parameters.Parameters { + return t.Params +} + +func (t MockTool) McpManifest() tools.McpManifest { + properties := make(map[string]parameters.ParameterMcpManifest) + required := make([]string, 0) + authParams := make(map[string][]string) + + for _, p := range t.Params { + name := p.GetName() + paramManifest, authParamList := p.McpManifest() + properties[name] = paramManifest + required = append(required, name) + + if len(authParamList) > 0 { + authParams[name] = authParamList + } + } + + toolsSchema := parameters.McpToolsSchema{ + Type: "object", + Properties: properties, + Required: required, + } + + mcpManifest := tools.McpManifest{ + Name: t.Name, + Description: t.Description, + InputSchema: toolsSchema, + } + + if len(authParams) > 0 { + mcpManifest.Metadata = map[string]any{ + "toolbox/authParams": authParams, + } + } + + return mcpManifest +} + +func (t MockTool) GetAuthTokenHeaderName(tools.SourceProvider) (string, error) { + return "Authorization", nil +} + +// MockPrompt is used to mock prompts in tests +type MockPrompt struct { + Name string + Description string + Args prompts.Arguments +} + +func (p MockPrompt) SubstituteParams(vals parameters.ParamValues) (any, error) { + return []prompts.Message{ + { + Role: "user", + Content: fmt.Sprintf("substituted %s", p.Name), + }, + }, nil +} + +func (p MockPrompt) ParseArgs(data map[string]any, claimsMap map[string]map[string]any) (parameters.ParamValues, error) { + var params parameters.Parameters + for _, arg := range p.Args { + params = append(params, arg.Parameter) + } + return parameters.ParseParams(params, data, claimsMap) +} + +func (p MockPrompt) Manifest() prompts.Manifest { + var argManifests []parameters.ParameterManifest + for _, arg := range p.Args { + argManifests = append(argManifests, arg.Manifest()) + } + return prompts.Manifest{ + Description: p.Description, + Arguments: argManifests, + } +} + +func (p MockPrompt) McpManifest() prompts.McpManifest { + return prompts.GetMcpManifest(p.Name, p.Description, p.Args) +} + +func (p MockPrompt) ToConfig() prompts.PromptConfig { + return nil +} diff --git a/internal/server/resources/resources_test.go b/internal/server/resources/resources_test.go index ad4bfdd326..e0682a27a9 100644 --- a/internal/server/resources/resources_test.go +++ b/internal/server/resources/resources_test.go @@ -32,7 +32,7 @@ func TestUpdateServer(t *testing.T) { "example-source": &alloydbpg.Source{ Config: alloydbpg.Config{ Name: "example-alloydb-source", - Kind: "alloydb-postgres", + Type: "alloydb-postgres", }, }, } @@ -92,7 +92,7 @@ func TestUpdateServer(t *testing.T) { "example-source2": &alloydbpg.Source{ Config: alloydbpg.Config{ Name: "example-alloydb-source2", - Kind: "alloydb-postgres", + Type: "alloydb-postgres", }, }, } diff --git a/internal/server/server.go b/internal/server/server.go index 961ffc48a3..f77c504113 100644 --- a/internal/server/server.go +++ b/internal/server/server.go @@ -86,7 +86,7 @@ func InitializeConfigs(ctx context.Context, cfg ServerConfig) ( childCtx, span := instrumentation.Tracer.Start( ctx, "toolbox/server/source/init", - trace.WithAttributes(attribute.String("source_kind", sc.SourceConfigKind())), + trace.WithAttributes(attribute.String("source_type", sc.SourceConfigType())), trace.WithAttributes(attribute.String("source_name", name)), ) defer span.End() @@ -114,7 +114,7 @@ func InitializeConfigs(ctx context.Context, cfg ServerConfig) ( _, span := instrumentation.Tracer.Start( ctx, "toolbox/server/auth/init", - trace.WithAttributes(attribute.String("auth_kind", sc.AuthServiceConfigKind())), + trace.WithAttributes(attribute.String("auth_type", sc.AuthServiceConfigType())), trace.WithAttributes(attribute.String("auth_name", name)), ) defer span.End() @@ -142,7 +142,7 @@ func InitializeConfigs(ctx context.Context, cfg ServerConfig) ( _, span := instrumentation.Tracer.Start( ctx, "toolbox/server/embeddingmodel/init", - trace.WithAttributes(attribute.String("model_kind", ec.EmbeddingModelConfigKind())), + trace.WithAttributes(attribute.String("model_type", ec.EmbeddingModelConfigType())), trace.WithAttributes(attribute.String("model_name", name)), ) defer span.End() @@ -170,7 +170,7 @@ func InitializeConfigs(ctx context.Context, cfg ServerConfig) ( _, span := instrumentation.Tracer.Start( ctx, "toolbox/server/tool/init", - trace.WithAttributes(attribute.String("tool_kind", tc.ToolConfigKind())), + trace.WithAttributes(attribute.String("tool_type", tc.ToolConfigType())), trace.WithAttributes(attribute.String("tool_name", name)), ) defer span.End() @@ -239,7 +239,7 @@ func InitializeConfigs(ctx context.Context, cfg ServerConfig) ( _, span := instrumentation.Tracer.Start( ctx, "toolbox/server/prompt/init", - trace.WithAttributes(attribute.String("prompt_kind", pc.PromptConfigKind())), + trace.WithAttributes(attribute.String("prompt_type", pc.PromptConfigType())), trace.WithAttributes(attribute.String("prompt_name", name)), ) defer span.End() diff --git a/internal/server/server_test.go b/internal/server/server_test.go index c13df83be4..ab809fc579 100644 --- a/internal/server/server_test.go +++ b/internal/server/server_test.go @@ -141,7 +141,7 @@ func TestUpdateServer(t *testing.T) { "example-source": &alloydbpg.Source{ Config: alloydbpg.Config{ Name: "example-alloydb-source", - Kind: "alloydb-postgres", + Type: "alloydb-postgres", }, }, } diff --git a/internal/server/static/js/auth.js b/internal/server/static/js/auth.js index 25ff16c736..c805617c48 100644 --- a/internal/server/static/js/auth.js +++ b/internal/server/static/js/auth.js @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +import { escapeHtml } from './sanitize.js'; + /** * Renders the Google Sign-In button using the GIS library. * @param {string} toolId The ID of the tool. @@ -112,13 +114,14 @@ function handleCredentialResponse(response, toolId, authProfileName) { // creates the Google Auth method dropdown export function createGoogleAuthMethodItem(toolId, authProfileName) { + const safeProfileName = escapeHtml(authProfileName); const UNIQUE_ID_BASE = `${toolId}-${authProfileName}`; const item = document.createElement('div'); item.className = 'auth-method-item'; item.innerHTML = `
- Google ID Token (${authProfileName}) + Google ID Token (${safeProfileName})