Compare commits

..

107 Commits

Author SHA1 Message Date
Harsh Jha
10532bebc2 Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-09 17:39:36 +05:30
Harsh Jha
d93a23b35a chore: fix auth 2025-09-09 17:39:05 +05:30
Harsh Jha
c323dc4618 Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-09 17:29:43 +05:30
Harsh Jha
feadcfbef0 chore: added pkg of wget 2025-09-09 17:29:16 +05:30
Harsh Jha
e06e802f5e Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-09 17:25:07 +05:30
Harsh Jha
5b7bcf0715 chore: fix wget cmd 2025-09-09 17:24:33 +05:30
Harsh Jha
71dd32de60 Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-09 17:21:32 +05:30
Harsh Jha
5615f4636d chore: fix of cloudsql 2025-09-09 17:21:08 +05:30
Harsh Jha
68a85b80e7 Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-09 17:11:09 +05:30
Harsh Jha
b44bfec6cc chore: fix toolbox instance 2025-09-09 17:10:43 +05:30
Harsh Jha
55635cedf6 Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-09 16:47:17 +05:30
Harsh Jha
a9b84d6165 chore: fix toolbox url 2025-09-09 16:46:49 +05:30
Harsh Jha
c23c5c9e8f Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-09 12:11:51 +05:30
Harsh Jha
beb524de67 chore: fix variable name of version of toolbox 2025-09-09 12:11:24 +05:30
Harsh Jha
c3257fe912 Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-09 12:07:19 +05:30
Harsh Jha
4fa243063c removed log of secret value 2025-09-09 12:06:56 +05:30
Harsh Jha
7b69829a4b Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-09 11:59:18 +05:30
Harsh Jha
ea9e4f323a chore: removed cloudsql steps 2025-09-09 11:58:49 +05:30
Harsh Jha
aded6630b6 Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-09 11:42:36 +05:30
Harsh Jha
674e98875a fix: removed extra steps 2025-09-09 11:36:36 +05:30
Harsh Jha
82e901b193 chore: added license for quickstart_py.yaml 2025-09-08 11:57:59 +05:30
Harsh Jha
6f43a283fa added os package in langchain 2025-09-08 11:44:08 +05:30
Harsh Jha
51f6c4a946 fix: version for llamaindex packages 2025-09-08 11:36:14 +05:30
Harsh Jha
577e094330 fix: version of langchain packages 2025-09-08 11:23:59 +05:30
Harsh Jha
3399f4ed61 updated verison 2025-09-04 18:59:08 +05:30
Harsh Jha
bed5ce5d71 updated 2025-09-04 18:48:38 +05:30
Harsh Jha
6db2fcbcfa updated 2025-09-04 18:33:48 +05:30
Harsh Jha
05bb6820e2 Merge branch 'main' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-04 18:33:06 +05:30
Harsh Jha
b754ce9e34 updated 2025-09-04 18:18:03 +05:30
Harsh Jha
2a9138dd01 updated 2025-09-04 18:11:57 +05:30
Harsh Jha
a366f99524 quickstart update 2025-09-04 17:39:07 +05:30
Harsh Jha
37eb54d693 updated 2025-09-04 17:32:45 +05:30
Harsh Jha
8e5908251e update 2025-09-04 17:23:39 +05:30
Harsh Jha
fd814cdf27 update 2025-09-04 17:14:42 +05:30
Harsh Jha
7df5d8975c updated quickstart 2025-09-04 17:06:38 +05:30
Harsh Jha
d09dd1ac76 updated quickstart.py 2025-09-04 17:02:30 +05:30
Harsh Jha
4e022ef3ef updated 2025-09-04 16:46:40 +05:30
Harsh Jha
2b9710ffef quickstart.py updated 2025-09-04 16:39:37 +05:30
Harsh Jha
653a9f0f46 modified for quickstart.py for toolbox url 2025-09-04 16:30:43 +05:30
Harsh Jha
8df929fcd4 Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-04 13:47:34 +05:30
Harsh Jha
7280065c23 updated 2025-09-04 13:47:05 +05:30
Harsh Jha
ce83384393 Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-04 13:37:09 +05:30
Harsh Jha
34c4edf1ae debug for pg password 2025-09-04 13:36:40 +05:30
Harsh Jha
6ae1039f4a Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-04 13:25:33 +05:30
Harsh Jha
d85ae4fb78 updated 2025-09-04 13:25:07 +05:30
Harsh Jha
8f77e081b5 Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-04 13:11:02 +05:30
Harsh Jha
a5bc7d8110 updated 2025-09-04 13:10:33 +05:30
Harsh Jha
3a1814f9e6 Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-04 13:03:12 +05:30
Harsh Jha
9e45500df6 updated 2025-09-04 13:02:44 +05:30
Harsh Jha
b80a4d8612 Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-04 12:53:37 +05:30
Harsh Jha
69fcd51966 updated 2025-09-04 12:52:58 +05:30
Harsh Jha
e7708ef17e Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-04 12:48:21 +05:30
Harsh Jha
1cd05b6fc1 update 2025-09-04 12:47:50 +05:30
Harsh Jha
e195a5bc74 Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-04 12:39:23 +05:30
Harsh Jha
54827c46c8 fixed: quickstart_py.yaml 2025-09-04 12:37:21 +05:30
Harsh Jha
f28074285c Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-04 12:26:30 +05:30
Harsh Jha
2355ca5a96 added psql cmd 2025-09-04 12:26:00 +05:30
Harsh Jha
9160aad696 Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-04 12:19:15 +05:30
Harsh Jha
46eb49245e updated 2025-09-04 12:17:34 +05:30
Harsh Jha
ff31de5eb2 Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-04 12:00:13 +05:30
Harsh Jha
a3c4306897 updated quickstart_py.yaml 2025-09-04 11:59:34 +05:30
Harsh Jha
a295585367 Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-04 11:54:18 +05:30
Harsh Jha
1571daa145 updated quickstart_py.yaml 2025-09-04 11:53:42 +05:30
Harsh Jha
371f03e0ed updated quickstart_py.yaml 2025-09-04 11:52:28 +05:30
Harsh Jha
39e3250a70 Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-03 19:51:26 +05:30
Harsh Jha
b462583ccf chore: updated ci files 2025-09-03 19:50:51 +05:30
Harsh Jha
06042e6731 Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-03 18:55:11 +05:30
Harsh Jha
94adbbd992 updated quickstart_py.yaml 2025-09-03 18:54:11 +05:30
Harsh Jha
7abec9e53e Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-03 18:39:28 +05:30
Harsh Jha
577c37f0df chore: updated quickstart_py.yaml 2025-09-03 18:38:46 +05:30
Harsh Jha
a07a8aa2d7 Merge branch 'sts-py-sh' of https://github.com/googleapis/genai-toolbox into sts-py-sh-eg 2025-09-03 18:16:54 +05:30
Harsh Jha
05662f1410 chore(ci): update quickstart_py.yaml 2025-09-03 18:14:34 +05:30
Harsh Jha
7c7b3b6305 renamed sample function to run_application 2025-09-03 18:10:33 +05:30
Harsh Jha
3bae22efcf chore(ci): update _GCP_PROJECT_NUMBER value in quickstart_py.yaml 2025-09-03 18:07:12 +05:30
Harsh Jha
9bde22bfce feat(ci): add ci files for py sample workflow 2025-09-03 17:50:25 +05:30
Harsh Jha
8c1467d8a9 Merge branch 'main' into sts-python-test 2025-09-03 10:53:23 +05:30
Harsh Jha
623b91af84 Merge branch 'main' into sts-python-test 2025-09-02 15:28:47 +05:30
Harsh Jha
e1cc144e72 Remove unnecessary newline in run_application function 2025-09-02 15:26:56 +05:30
Harsh Jha
1a0386b360 chore(python/quickstart): address multiple review comments in sample code 2025-09-02 15:25:48 +05:30
Harsh Jha
7dfdede4be test: Remove redundant script output check from tests 2025-09-02 15:17:50 +05:30
Harsh Jha
25e116584a fix(python/quickstart): pin exact versions in requirements.txt 2025-09-02 15:17:50 +05:30
Harsh Jha
e0d6c1f6d8 fix(python/quickstart): revert code sample in all python quickstart examples 2025-09-02 15:17:50 +05:30
Harsh Jha
7ffb0e2d82 feat: Implement language-agnostic snippet shortcode 2025-09-02 15:17:50 +05:30
Harsh Jha
0e23d73ea2 refactor(docs): Fix Table of Contents and optimize region include shortcode 2025-09-02 15:17:50 +05:30
Harsh Jha
79adf9d70b refactor(docs): Replace duplicated content with a shared shortcode 2025-09-02 15:17:50 +05:30
Harsh Jha
c97b89555e feat: Add shortcode for including file regions 2025-09-02 15:17:50 +05:30
Harsh Jha
018edb5d61 Merge branch 'main' into sts-python-test 2025-09-01 10:43:51 +05:30
Harsh Jha
a5beea5756 chore(docs/python): remove compile-time test from quickstart samples 2025-09-01 10:42:40 +05:30
Harsh Jha
1130354ac5 refactor: replace duplicate snippet code with regionInclude 2025-08-29 10:47:03 +05:30
Harsh Jha
28021d760e Merge branch 'main' of https://github.com/googleapis/genai-toolbox into sts-python-test 2025-08-29 10:39:56 +05:30
Harsh Jha
ff7f34bba6 feat: remove duplicate snippet shortcode 2025-08-29 10:39:44 +05:30
Harsh Jha
a93ab0c25a Merge branch 'main' into sts-python-test 2025-08-28 17:17:01 +05:30
Harsh Jha
921db2a546 refactor(test): update quickstart tests to only check for error-free execution 2025-08-28 16:08:34 +05:30
Harsh Jha
5b7cc83472 feat: add pytest-based test infrastructure for Python quickstart frameworks 2025-08-21 14:27:04 +05:30
Harsh Jha
e5922a69ec Merge branch 'sample-testing-strategy-python' of https://github.com/googleapis/genai-toolbox into sts-python-test 2025-08-20 19:46:37 +05:30
Harsh Jha
faa79cd3c1 Update quickstart.py 2025-08-20 18:21:11 +05:30
Harsh Jha
959941d4ae feat(adk): add Python quickstart testing infrastructure with requirements.txt, golden.txt, and quickstart_test.py 2025-08-20 18:14:31 +05:30
Harsh Jha
7dc77fec19 fix(quickstart): remove angle brackets from ToolboxSyncClient URL in Python ADK 2025-08-20 18:09:48 +05:30
Harsh Jha
061f38382e feat(python): add quickstart samples for adk, core, langchain, and llamaindex 2025-08-19 17:14:44 +05:30
Harsh Jha
64e64a0d51 Update quickstart docs and shortcodes for improved region includes 2025-08-19 16:34:13 +05:30
Anmol Shukla
5e0a1b03ab Merge branch 'main' into docs/add-region-shortcode 2025-08-19 14:28:14 +05:30
Anmol Shukla
b7cf0562ed Merge branch 'main' into docs/add-region-shortcode 2025-08-18 14:16:04 +05:30
Anmol Shukla
8309816f44 Merge branch 'main' into docs/add-region-shortcode 2025-08-14 15:12:34 +05:30
Harsh Jha
114a0c91d8 feat: Implement language-agnostic snippet shortcode 2025-08-05 14:39:53 +05:30
Harsh Jha
5f1e4b940c refactor(docs): Fix Table of Contents and optimize region include shortcode 2025-08-01 19:56:13 +05:30
Harsh Jha
e0b6d2d26b refactor(docs): Replace duplicated content with a shared shortcode 2025-07-31 18:52:46 +05:30
Harsh Jha
590bfaf4d3 feat: Add shortcode for including file regions 2025-07-31 11:31:19 +05:30
1529 changed files with 31897 additions and 183450 deletions

View File

@@ -18,9 +18,7 @@ steps:
script: |
#!/usr/bin/env bash
docker buildx create --name container-builder --driver docker-container --bootstrap --use
export TAGS="-t ${_DOCKER_URI}:$SHORT_SHA -t ${_DOCKER_URI}:$REF_NAME"
docker buildx build --platform linux/amd64,linux/arm64 --build-arg COMMIT_SHA=$(git rev-parse --short HEAD) $TAGS --push .
docker buildx build --platform linux/amd64,linux/arm64 --build-arg COMMIT_SHA=$(git rev-parse HEAD) -t ${_DOCKER_URI}:$REF_NAME --push .
- id: "install-dependencies"
name: golang:1
@@ -33,56 +31,19 @@ steps:
script: |
go get -d ./...
- id: "install-zig"
name: golang:1
waitFor: ['-']
volumes:
- name: 'zig'
path: '/zig-tools'
script: |
#!/usr/bin/env bash
set -e
apt-get update && apt-get install -y xz-utils
curl -fL "https://ziglang.org/download/0.15.2/zig-x86_64-linux-0.15.2.tar.xz" -o zig.tar.xz
tar -xf zig.tar.xz -C /zig-tools --strip-components=1
- id: "install-macos-sdk"
name: golang:1
waitFor: ['-']
volumes:
- name: 'macos-sdk'
path: '/macos-sdk'
script: |
#!/usr/bin/env bash
set -e
apt-get update && apt-get install -y xz-utils
echo "Downloading macOS 14.5 SDK..."
curl -fL -o sdk.tar.xz https://github.com/alexey-lysiuk/macos-sdk/releases/download/14.5/MacOSX14.5.tar.xz
mkdir -p /macos-sdk/MacOSX14.5.sdk
echo "Unpacking macOS 14.5 SDK..."
tar -xf sdk.tar.xz -C /macos-sdk/MacOSX14.5.sdk --strip-components=1
- id: "build-linux-amd64"
name: golang:1
waitFor:
waitFor:
- "install-dependencies"
- "install-zig"
env:
- 'GOPATH=/gopath'
- 'CGO_ENABLED=1'
- 'GOOS=linux'
- 'GOARCH=amd64'
- 'CC=/zig-tools/zig cc -target x86_64-linux-gnu'
- 'CXX=/zig-tools/zig c++ -target x86_64-linux-gnu'
volumes:
- name: 'go'
path: '/gopath'
- name: 'zig'
path: '/zig-tools'
script: |
#!/usr/bin/env bash
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.linux.amd64
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.linux.amd64
- id: "store-linux-amd64"
name: "gcr.io/cloud-builders/gcloud:latest"
@@ -92,55 +53,19 @@ steps:
#!/usr/bin/env bash
gcloud storage cp toolbox.linux.amd64 gs://$_BUCKET_NAME/$REF_NAME/linux/amd64/toolbox
- id: "build-linux-amd64-geminicli"
name: golang:1
waitFor:
- "install-dependencies"
- "install-zig"
env:
- 'GOPATH=/gopath'
- 'CGO_ENABLED=1'
- 'GOOS=linux'
- 'GOARCH=amd64'
- 'CC=/zig-tools/zig cc -target x86_64-linux-gnu'
- 'CXX=/zig-tools/zig c++ -target x86_64-linux-gnu'
volumes:
- name: 'go'
path: '/gopath'
- name: 'zig'
path: '/zig-tools'
script: |
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.linux.amd64
- id: "build-darwin-arm64"
name: golang:1
waitFor:
waitFor:
- "install-dependencies"
- "install-zig"
- "install-macos-sdk"
env:
- 'GOPATH=/gopath'
- 'CGO_ENABLED=1'
- 'GOOS=darwin'
- 'GOARCH=arm64'
- 'SDK_PATH=/macos-sdk/MacOSX14.5.sdk'
- 'MACOS_MIN_VER=10.14'
- 'CGO_LDFLAGS=-mmacosx-version-min=10.14 --sysroot /macos-sdk/MacOSX14.5.sdk -F/macos-sdk/MacOSX14.5.sdk/System/Library/Frameworks -L/usr/lib'
- 'COMMON_FLAGS=-mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
- 'CC=/zig-tools/zig cc -mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
- 'CXX=/zig-tools/zig c++ -mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
volumes:
- name: 'go'
path: '/gopath'
- name: 'zig'
path: '/zig-tools'
- name: 'macos-sdk'
path: '/macos-sdk'
script: |
#!/usr/bin/env bash
go build -trimpath -buildmode=pie -ldflags "-s -w -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.darwin.arm64
CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.arm64
- id: "store-darwin-arm64"
name: "gcr.io/cloud-builders/gcloud:latest"
@@ -150,61 +75,19 @@ steps:
#!/usr/bin/env bash
gcloud storage cp toolbox.darwin.arm64 gs://$_BUCKET_NAME/$REF_NAME/darwin/arm64/toolbox
- id: "build-darwin-arm64-geminicli"
name: golang:1
waitFor:
- "install-dependencies"
- "install-zig"
- "install-macos-sdk"
env:
- 'GOPATH=/gopath'
- 'CGO_ENABLED=1'
- 'GOOS=darwin'
- 'GOARCH=arm64'
- 'SDK_PATH=/macos-sdk/MacOSX14.5.sdk'
- 'MACOS_MIN_VER=10.14'
- 'CGO_LDFLAGS=-mmacosx-version-min=10.14 --sysroot /macos-sdk/MacOSX14.5.sdk -F/macos-sdk/MacOSX14.5.sdk/System/Library/Frameworks -L/usr/lib'
- 'COMMON_FLAGS=-mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
- 'CC=/zig-tools/zig cc -mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
- 'CXX=/zig-tools/zig c++ -mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
volumes:
- name: 'go'
path: '/gopath'
- name: 'zig'
path: '/zig-tools'
- name: 'macos-sdk'
path: '/macos-sdk'
script: |
#!/usr/bin/env bash
go build -trimpath -buildmode=pie -ldflags "-s -w -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.darwin.arm64
- id: "build-darwin-amd64"
name: golang:1
waitFor:
waitFor:
- "install-dependencies"
- "install-zig"
- "install-macos-sdk"
env:
- 'GOPATH=/gopath'
- 'CGO_ENABLED=1'
- 'GOOS=darwin'
- 'GOARCH=amd64'
- 'SDK_PATH=/macos-sdk/MacOSX14.5.sdk'
- 'MACOS_MIN_VER=10.14'
- 'CGO_LDFLAGS=-mmacosx-version-min=10.14 --sysroot /macos-sdk/MacOSX14.5.sdk -F/macos-sdk/MacOSX14.5.sdk/System/Library/Frameworks -L/usr/lib'
- 'COMMON_FLAGS=-mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
- 'CC=/zig-tools/zig cc -mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
- 'CXX=/zig-tools/zig c++ -mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
volumes:
- name: 'go'
path: '/gopath'
- name: 'zig'
path: '/zig-tools'
- name: 'macos-sdk'
path: '/macos-sdk'
script: |
#!/usr/bin/env bash
go build -trimpath -buildmode=pie -ldflags "-s -w -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.darwin.amd64
CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.amd64
- id: "store-darwin-amd64"
name: "gcr.io/cloud-builders/gcloud:latest"
@@ -214,54 +97,19 @@ steps:
#!/usr/bin/env bash
gcloud storage cp toolbox.darwin.amd64 gs://$_BUCKET_NAME/$REF_NAME/darwin/amd64/toolbox
- id: "build-darwin-amd64-geminicli"
name: golang:1
waitFor:
- "install-dependencies"
- "install-zig"
- "install-macos-sdk"
env:
- 'GOPATH=/gopath'
- 'CGO_ENABLED=1'
- 'GOOS=darwin'
- 'GOARCH=amd64'
- 'SDK_PATH=/macos-sdk/MacOSX14.5.sdk'
- 'MACOS_MIN_VER=10.14'
- 'CGO_LDFLAGS=-mmacosx-version-min=10.14 --sysroot /macos-sdk/MacOSX14.5.sdk -F/macos-sdk/MacOSX14.5.sdk/System/Library/Frameworks -L/usr/lib'
- 'COMMON_FLAGS=-mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
- 'CC=/zig-tools/zig cc -mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
- 'CXX=/zig-tools/zig c++ -mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
volumes:
- name: 'go'
path: '/gopath'
- name: 'zig'
path: '/zig-tools'
- name: 'macos-sdk'
path: '/macos-sdk'
script: |
#!/usr/bin/env bash
go build -trimpath -buildmode=pie -ldflags "-s -w -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.darwin.amd64
- id: "build-windows-amd64"
name: golang:1
waitFor:
waitFor:
- "install-dependencies"
- "install-zig"
env:
- 'GOPATH=/gopath'
- 'CGO_ENABLED=1'
- 'GOOS=windows'
- 'GOARCH=amd64'
- 'CC=/zig-tools/zig cc -target x86_64-windows-gnu'
- 'CXX=/zig-tools/zig c++ -target x86_64-windows-gnu'
volumes:
- name: 'go'
path: '/gopath'
- name: 'zig'
path: '/zig-tools'
script: |
#!/usr/bin/env bash
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.windows.amd64
CGO_ENABLED=0 GOOS=windows GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.windows.amd64
- id: "store-windows-amd64"
name: "gcr.io/cloud-builders/gcloud:latest"
@@ -271,38 +119,15 @@ steps:
#!/usr/bin/env bash
gcloud storage cp toolbox.windows.amd64 gs://$_BUCKET_NAME/$REF_NAME/windows/amd64/toolbox.exe
- id: "build-windows-amd64-geminicli"
name: golang:1
waitFor:
- "install-dependencies"
- "install-zig"
env:
- 'GOPATH=/gopath'
- 'CGO_ENABLED=1'
- 'GOOS=windows'
- 'GOARCH=amd64'
- 'CC=/zig-tools/zig cc -target x86_64-windows-gnu'
- 'CXX=/zig-tools/zig c++ -target x86_64-windows-gnu'
volumes:
- name: 'go'
path: '/gopath'
- name: 'zig'
path: '/zig-tools'
script: |
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.windows.amd64
options:
automapSubstitutions: true
dynamicSubstitutions: true
logging: CLOUD_LOGGING_ONLY # Necessary for custom service account
pool:
name: projects/$PROJECT_ID/locations/us-central1/workerPools/run-release # to increase resource for running releases
machineType: 'E2_HIGHCPU_32'
substitutions:
_REGION: us-central1
_AR_HOSTNAME: ${_REGION}-docker.pkg.dev
_AR_REPO_NAME: toolbox-dev
_BUCKET_NAME: genai-toolbox-dev
_DOCKER_URI: ${_AR_HOSTNAME}/${PROJECT_ID}/${_AR_REPO_NAME}/toolbox
_DOCKER_URI: ${_AR_HOSTNAME}/${PROJECT_ID}/${_AR_REPO_NAME}/toolbox

File diff suppressed because it is too large Load Diff

View File

@@ -1,136 +0,0 @@
#!/bin/bash
set -e
python3 - << 'EOF'
"""
MCP TOOLBOX: SOURCE PAGE LINTER
===============================
This script enforces a standardized structure for integration Source pages
(source.md files). It ensures users can predictably find connection details
and configurations across all database integrations.
Note: The structural _index.md folder wrappers are intentionally ignored
by this script as they should only contain YAML frontmatter.
MAINTENANCE GUIDE:
------------------
1. TO ADD A NEW HEADING:
Add the exact heading text to the 'ALLOWED_ORDER' list in the desired
sequence.
2. TO MAKE A HEADING MANDATORY/OPTIONAL:
Add or remove the heading text in the 'REQUIRED' set.
3. TO IGNORE NEW CONTENT TYPES:
Update the regex in the 'clean_body' variable to strip out
Markdown before linting.
4. SCOPE:
This script only targets docs/en/integrations/**/source.md.
"""
import os
import re
import sys
from pathlib import Path
# --- CONFIGURATION ---
ALLOWED_ORDER = [
"About",
"Available Tools",
"Requirements",
"Example",
"Reference",
"Advanced Usage",
"Troubleshooting",
"Additional Resources"
]
REQUIRED = {"About", "Example", "Reference"}
# Regex to catch any variation of the list-tools shortcode
SHORTCODE_PATTERN = r"\{\{<\s*list-tools.*?>\}\}"
# ---------------------
integration_dir = Path("./docs/en/integrations")
if not integration_dir.exists():
print("Info: Directory './docs/en/integrations' not found. Skipping linting.")
sys.exit(0)
has_errors = False
source_pages_found = 0
# ONLY scan files specifically named "source.md"
for filepath in integration_dir.rglob("source.md"):
source_pages_found += 1
file_errors = False
if filepath.parent.parent != integration_dir:
continue
with open(filepath, "r", encoding="utf-8") as f:
content = f.read()
match = re.match(r'^\s*---\s*\n(.*?)\n---\s*(.*)', content, re.DOTALL)
if match:
frontmatter, body = match.group(1), match.group(2)
else:
print(f"[{filepath}] Error: Missing or invalid YAML frontmatter.")
has_errors = True
continue
# 1. Check for linkTitle: "Source" in frontmatter
link_title_match = re.search(r"^linkTitle:\s*[\"']?(.*?)[\"']?\s*$", frontmatter, re.MULTILINE)
if not link_title_match or link_title_match.group(1).strip() != "Source":
print(f"[{filepath}] Error: Frontmatter must contain exactly linkTitle: \"Source\".")
file_errors = True
# 2. Check for weight: 1 in frontmatter
weight_match = re.search(r"^weight:\s*[\"']?(\d+)[\"']?\s*$", frontmatter, re.MULTILINE)
if not weight_match or weight_match.group(1).strip() != "1":
print(f"[{filepath}] Error: Frontmatter must contain exactly weight: 1.")
file_errors = True
# 3. Check Shortcode Placement & Available Tools Section (Only if present)
tools_section_match = re.search(r"^##\s+Available Tools\s*(.*?)(?=^##\s|\Z)", body, re.MULTILINE | re.DOTALL)
if tools_section_match:
if not re.search(SHORTCODE_PATTERN, tools_section_match.group(1)):
print(f"[{filepath}] Error: The list-tools shortcode must be placed under the '## Available Tools' heading.")
file_errors = True
# Strip code blocks from body to avoid linting example markdown headings
clean_body = re.sub(r"```.*?```", "", body, flags=re.DOTALL)
if re.search(r"^#\s+\w+", clean_body, re.MULTILINE):
print(f"[{filepath}] Error: H1 (#) headings are forbidden in the body.")
file_errors = True
h2s = [h.strip() for h in re.findall(r"^##\s+(.*)", clean_body, re.MULTILINE)]
# Missing Required Headings
missing = REQUIRED - set(h2s)
if missing:
print(f"[{filepath}] Error: Missing required H2 headings: {missing}")
file_errors = True
if unauthorized := (set(h2s) - set(ALLOWED_ORDER)):
print(f"[{filepath}] Error: Unauthorized H2s found: {unauthorized}")
file_errors = True
# 5. Order Check
if [h for h in h2s if h in ALLOWED_ORDER] != [h for h in ALLOWED_ORDER if h in h2s]:
print(f"[{filepath}] Error: Headings out of order. Reference: {ALLOWED_ORDER}")
file_errors = True
if file_errors: has_errors = True
# Handle final output based on what was found
if source_pages_found == 0:
print("Info: No 'source.md' files found in integrations. Passing gracefully.")
sys.exit(0)
elif has_errors:
print(f"\nLinting failed. Please fix the structure errors in the {source_pages_found} 'source.md' file(s) above.")
sys.exit(1)
else:
print(f"Success: {source_pages_found} 'source.md' file(s) passed structure validation.")
EOF

View File

@@ -1,155 +0,0 @@
#!/bin/bash
set -e
python3 - << 'EOF'
"""
MCP TOOLBOX: TOOL PAGE LINTER
=============================
This script enforces a standardized structure for individual Tool pages
and their parent directory wrappers. It ensures LLM agents can parse
tool capabilities and parameter definitions reliably.
MAINTENANCE GUIDE:
------------------
1. TO ADD A NEW HEADING:
Add the exact heading text to the 'ALLOWED_ORDER' list in the desired
sequence.
2. TO MAKE A HEADING MANDATORY/OPTIONAL:
Add or remove the heading text in the 'REQUIRED' set.
3. TO UPDATE SHORTCODE LOGIC:
If the shortcode name changes, update the 'SHORTCODE_PATTERN' variable.
4. SCOPE & BEHAVIOR:
This script targets all .md files in docs/en/integrations/**/tools/.
- For `_index.md` files: It only validates the frontmatter (requiring
`title: "Tools"` and `weight: 2`) and ignores the body.
- For regular tool files: It validates H1/H2 hierarchy, checks for
required headings ("About", "Example"), and enforces that the
`{{< compatible-sources >}}` shortcode is paired with the
"## Compatible Sources" heading.
"""
import os
import re
import sys
from pathlib import Path
# --- CONFIGURATION ---
ALLOWED_ORDER = [
"About",
"Compatible Sources",
"Requirements",
"Parameters",
"Example",
"Output Format",
"Reference",
"Advanced Usage",
"Troubleshooting",
"Additional Resources"
]
REQUIRED = {"About", "Example"}
SHORTCODE_PATTERN = r"\{\{<\s*compatible-sources.*?>\}\}"
# ---------------------
integration_dir = Path("./docs/en/integrations")
if not integration_dir.exists():
print("Info: Directory './docs/en/integrations' not found. Skipping linting.")
sys.exit(0)
has_errors = False
tools_pages_found = 0
# Specifically target the tools directories
for filepath in integration_dir.rglob("tools/*.md"):
tools_pages_found += 1
with open(filepath, "r", encoding="utf-8") as f:
content = f.read()
# Separate YAML frontmatter from the markdown body
match = re.match(r'^\s*---\s*\n(.*?)\n---\s*(.*)', content, re.DOTALL)
if match:
frontmatter = match.group(1)
body = match.group(2)
else:
print(f"[{filepath}] Error: Missing or invalid YAML frontmatter.")
has_errors = True
continue
file_errors = False
# --- SPECIAL VALIDATION FOR tools/_index.md ---
if filepath.name == "_index.md":
title_match = re.search(r"^title:\s*[\"']?(.*?)[\"']?\s*$", frontmatter, re.MULTILINE)
if not title_match or title_match.group(1).strip() != "Tools":
print(f"[{filepath}] Error: tools/_index.md must have exactly title: \"Tools\"")
file_errors = True
weight_match = re.search(r"^weight:\s*(\d+)\s*$", frontmatter, re.MULTILINE)
if not weight_match or weight_match.group(1).strip() != "2":
print(f"[{filepath}] Error: tools/_index.md must have exactly weight: 2")
file_errors = True
if file_errors:
has_errors = True
continue # Skip the rest of the body linting for this structural file
# --- VALIDATION FOR REGULAR TOOL PAGES ---
# If the file has no markdown content (metadata placeholder only), skip it entirely
if not body.strip():
continue
# 1. Check Shortcode Placement
sources_section_match = re.search(r"^##\s+Compatible Sources\s*(.*?)(?=^##\s|\Z)", body, re.MULTILINE | re.DOTALL)
if sources_section_match:
if not re.search(SHORTCODE_PATTERN, sources_section_match.group(1)):
print(f"[{filepath}] Error: The compatible-sources shortcode must be placed under '## Compatible Sources'.")
file_errors = True
elif re.search(SHORTCODE_PATTERN, body):
print(f"[{filepath}] Error: Shortcode found, but '## Compatible Sources' heading is missing.")
file_errors = True
# 2. Strip code blocks from body to avoid linting example markdown headings
clean_body = re.sub(r"```.*?```", "", body, flags=re.DOTALL)
# 3. Check H1 Headings
if re.search(r"^#\s+\w+", clean_body, re.MULTILINE):
print(f"[{filepath}] Error: H1 headings (#) are forbidden in the body.")
file_errors = True
# 4. Check H2 Headings
h2s = re.findall(r"^##\s+(.*)", clean_body, re.MULTILINE)
h2s = [h2.strip() for h2 in h2s]
# Missing Required
if missing := (REQUIRED - set(h2s)):
print(f"[{filepath}] Error: Missing required H2 headings: {missing}")
file_errors = True
# Unauthorized Headings
if unauthorized := (set(h2s) - set(ALLOWED_ORDER)):
print(f"[{filepath}] Error: Unauthorized H2 headings found: {unauthorized}")
file_errors = True
# Strict Ordering
filtered_h2s = [h for h in h2s if h in ALLOWED_ORDER]
expected_order = [h for h in ALLOWED_ORDER if h in h2s]
if filtered_h2s != expected_order:
print(f"[{filepath}] Error: Headings are out of order.")
print(f" Expected: {expected_order}")
print(f" Found: {filtered_h2s}")
file_errors = True
if file_errors:
has_errors = True
if tools_pages_found == 0:
print("Info: No tool directories found. Passing gracefully.")
sys.exit(0)
elif has_errors:
print("Linting failed for Tool pages. Please fix the structure errors above.")
sys.exit(1)
else:
print(f"Success: All {tools_pages_found} Tool page(s) passed structure validation.")
EOF

107
.ci/quickstart_py.sh Normal file
View File

@@ -0,0 +1,107 @@
#!/bin/bash
set -e
set -u
TABLE_NAME="hotels"
QUICKSTART_PYTHON_DIR="docs/en/getting-started/quickstart/python"
TOOLBOX_SETUP_DIR="/workspace/toolbox_setup"
apt-get update && apt-get install -y postgresql-client python3-venv curl wget
if [ ! -d "$QUICKSTART_PYTHON_DIR" ]; then
exit 1
fi
wget https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64 -O /usr/local/bin/cloud-sql-proxy
chmod +x /usr/local/bin/cloud-sql-proxy
cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" &
PROXY_PID=$!
export PGHOST=127.0.0.1
export PGPORT=5432
export PGPASSWORD="$DB_PASSWORD"
export GOOGLE_API_KEY="$GOOGLE_API_KEY"
mkdir -p "${TOOLBOX_SETUP_DIR}"
echo "${TOOLS_YAML_CONTENT}" > "${TOOLBOX_SETUP_DIR}/tools.yaml"
if [ ! -f "${TOOLBOX_SETUP_DIR}/tools.yaml" ]; then echo "Failed to create tools.yaml"; exit 1; fi
curl -L "https://storage.googleapis.com/genai-toolbox/v${VERSION}/linux/amd64/toolbox" -o "${TOOLBOX_SETUP_DIR}/toolbox"
chmod +x "${TOOLBOX_SETUP_DIR}/toolbox"
if [ ! -f "${TOOLBOX_SETUP_DIR}/toolbox" ]; then echo "Failed to download toolbox"; exit 1; fi
echo "--- Starting Toolbox Server ---"
cd "${TOOLBOX_SETUP_DIR}"
./toolbox --tools-file ./tools.yaml &
TOOLBOX_PID=$!
cd "/workspace"
sleep 5
cleanup_all() {
kill $TOOLBOX_PID || true
kill $PROXY_PID || true
}
trap cleanup_all EXIT
for ORCH_DIR in "$QUICKSTART_PYTHON_DIR"/*/; do
if [ ! -d "$ORCH_DIR" ]; then
continue
fi
(
set -e
ORCH_NAME=$(basename "$ORCH_DIR")
cleanup_orch() {
psql -h "$PGHOST" -p "$PGPORT" -U "$DB_USER" -d "$DATABASE_NAME" -c "DROP TABLE IF EXISTS $TABLE_NAME;"
if [ -d ".venv" ]; then
rm -rf ".venv"
fi
}
trap cleanup_orch EXIT
cd "$ORCH_DIR"
psql -h "$PGHOST" -p "$PGPORT" -U "$DB_USER" -d "$DATABASE_NAME" <<EOF
CREATE TABLE $TABLE_NAME (
id INTEGER NOT NULL PRIMARY KEY,
name VARCHAR NOT NULL,
location VARCHAR NOT NULL,
price_tier VARCHAR NOT NULL,
checkin_date DATE NOT NULL,
checkout_date DATE NOT NULL,
booked BIT NOT NULL
);
INSERT INTO $TABLE_NAME (id, name, location, price_tier, checkin_date, checkout_date, booked)
VALUES
(1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-22', '2024-04-20', B'0'),
(2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', B'0'),
(3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', B'0'),
(4, 'Radisson Blu Lucerne', 'Lucerne', 'Midscale', '2024-04-24', '2024-04-05', B'0'),
(5, 'Best Western Bern', 'Bern', 'Upper Midscale', '2024-04-23', '2024-04-01', B'0'),
(6, 'InterContinental Geneva', 'Geneva', 'Luxury', '2024-04-23', '2024-04-28', B'0'),
(7, 'Sheraton Zurich', 'Zurich', 'Upper Upscale', '2024-04-27', '2024-04-02', B'0'),
(8, 'Holiday Inn Basel', 'Basel', 'Upper Midscale', '2024-04-24', '2024-04-09', B'0'),
(9, 'Courtyard Zurich', 'Zurich', 'Upscale', '2024-04-03', '2024-04-13', B'0'),
(10, 'Comfort Inn Bern', 'Bern', 'Midscale', '2024-04-04', '2024-04-16', B'0');
EOF
VENV_DIR=".venv"
python3 -m venv "$VENV_DIR"
source "$VENV_DIR/bin/activate"
if [ -f "requirements.txt" ]; then
pip install -r requirements.txt
else
echo "Warning: requirements.txt not found. Skipping."
fi
echo "Running tests for $ORCH_NAME..."
pytest
)
done

View File

@@ -12,41 +12,46 @@
# See the License for the specific language governing permissions and
# limitations under the License.
substitutions:
_GCP_PROJECT: "your-project-id"
_CLOUD_SQL_INSTANCE: "project-id:region:instance-name"
_DATABASE_NAME: "db-name"
_DB_USER: "db-user"
_TOOLS_YAML_SECRET: "tools yaml secret"
_API_KEY_SECRET: "api key secret"
_DB_PASS_SECRET: "db pass secret"
_GCP_PROJECT_NUMBER: "your-project-number"
steps:
- name: 'node:22'
id: 'js-quickstart-test'
- name: 'gcr.io/google.com/cloudsdktool/cloud-sdk'
id: 'run-psql-commands'
entrypoint: 'bash'
args:
# The '-c' flag tells bash to execute the following string as a command.
# The 'set -ex' enables debug output and exits on error for easier troubleshooting.
- -c
- |
set -ex
export VERSION=$(cat ./cmd/version.txt)
chmod +x .ci/sample_tests/run_tests.sh
.ci/sample_tests/run_tests.sh
chmod +x .ci/quickstart_py.sh
.ci/quickstart_py.sh
env:
- 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}'
- 'GCP_PROJECT=${_GCP_PROJECT}'
- 'DATABASE_NAME=${_DATABASE_NAME}'
- 'DB_USER=${_DB_USER}'
- 'TARGET_ROOT=docs/en/documentation/getting-started/quickstart/js'
- 'TARGET_LANG=js'
- 'TABLE_NAME=hotels_js'
- 'SQL_FILE=.ci/sample_tests/setup_hotels.sql'
- 'AGENT_FILE_PATTERN=quickstart.js'
secretEnv: ['TOOLS_YAML_CONTENT', 'GOOGLE_API_KEY', 'DB_PASSWORD']
availableSecrets:
secretManager:
- versionName: projects/${_GCP_PROJECT}/secrets/${_TOOLS_YAML_SECRET}/versions/9
- versionName: projects/${_GCP_PROJECT}/secrets/${_TOOLS_YAML_SECRET}/versions/latest
env: 'TOOLS_YAML_CONTENT'
- versionName: projects/${_GCP_PROJECT_NUMBER}/secrets/${_API_KEY_SECRET}/versions/latest
env: 'GOOGLE_API_KEY'
- versionName: projects/${_GCP_PROJECT}/secrets/${_DB_PASS_SECRET}/versions/latest
env: 'DB_PASSWORD'
timeout: 1000s
timeout: 1800s
options:
logging: CLOUD_LOGGING_ONLY

View File

@@ -1,57 +0,0 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
steps:
- name: "${_IMAGE}"
id: "go-pre-post-processing-test"
entrypoint: "bash"
args:
- -c
- |
set -ex
chmod +x .ci/sample_tests/run_tests.sh
.ci/sample_tests/run_tests.sh
env:
- "CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}"
- "GCP_PROJECT=${_GCP_PROJECT}"
- "DATABASE_NAME=${_DATABASE_NAME}"
- "DB_USER=${_DB_USER}"
- "TARGET_ROOT=${_TARGET_ROOT}"
- "TARGET_LANG=${_TARGET_LANG}"
- "TABLE_NAME=${_TABLE_NAME}"
- "SQL_FILE=${_SQL_FILE}"
- "AGENT_FILE_PATTERN=${_AGENT_FILE_PATTERN}"
secretEnv: ["TOOLS_YAML_CONTENT", "GOOGLE_API_KEY", "DB_PASSWORD"]
availableSecrets:
secretManager:
- versionName: projects/${_GCP_PROJECT}/secrets/${_TOOLS_YAML_SECRET}/versions/8
env: "TOOLS_YAML_CONTENT"
- versionName: projects/${_GCP_PROJECT_NUMBER}/secrets/${_API_KEY_SECRET}/versions/latest
env: "GOOGLE_API_KEY"
- versionName: projects/${_GCP_PROJECT}/secrets/${_DB_PASS_SECRET}/versions/latest
env: "DB_PASSWORD"
timeout: 1200s
substitutions:
_TARGET_LANG: "go"
_IMAGE: "golang:1.25.7"
_TARGET_ROOT: "docs/en/documentation/configuration/pre-post-processing/go"
_TABLE_NAME: "hotels_go_pre_post_processing"
_SQL_FILE: ".ci/sample_tests/setup_hotels.sql"
_AGENT_FILE_PATTERN: "agent.go"
options:
logging: CLOUD_LOGGING_ONLY

View File

@@ -1,57 +0,0 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
steps:
- name: "${_IMAGE}"
id: "js-pre-post-processing-test"
entrypoint: "bash"
args:
- -c
- |
set -ex
chmod +x .ci/sample_tests/run_tests.sh
.ci/sample_tests/run_tests.sh
env:
- "CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}"
- "GCP_PROJECT=${_GCP_PROJECT}"
- "DATABASE_NAME=${_DATABASE_NAME}"
- "DB_USER=${_DB_USER}"
- "TARGET_ROOT=${_TARGET_ROOT}"
- "TARGET_LANG=${_TARGET_LANG}"
- "TABLE_NAME=${_TABLE_NAME}"
- "SQL_FILE=${_SQL_FILE}"
- "AGENT_FILE_PATTERN=${_AGENT_FILE_PATTERN}"
secretEnv: ["TOOLS_YAML_CONTENT", "GOOGLE_API_KEY", "DB_PASSWORD"]
availableSecrets:
secretManager:
- versionName: projects/${_GCP_PROJECT}/secrets/${_TOOLS_YAML_SECRET}/versions/8
env: "TOOLS_YAML_CONTENT"
- versionName: projects/${_GCP_PROJECT_NUMBER}/secrets/${_API_KEY_SECRET}/versions/latest
env: "GOOGLE_API_KEY"
- versionName: projects/${_GCP_PROJECT}/secrets/${_DB_PASS_SECRET}/versions/latest
env: "DB_PASSWORD"
timeout: 1200s
substitutions:
_TARGET_LANG: "js"
_IMAGE: "node:22"
_TARGET_ROOT: "docs/en/documentation/configuration/pre-post-processing/js"
_TABLE_NAME: "hotels_js_pre_post_processing"
_SQL_FILE: ".ci/sample_tests/setup_hotels.sql"
_AGENT_FILE_PATTERN: "agent.js"
options:
logging: CLOUD_LOGGING_ONLY

View File

@@ -1,57 +0,0 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
steps:
- name: "${_IMAGE}"
id: "py-pre-post-processing-test"
entrypoint: "bash"
args:
- -c
- |
set -ex
chmod +x .ci/sample_tests/run_tests.sh
.ci/sample_tests/run_tests.sh
env:
- "CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}"
- "GCP_PROJECT=${_GCP_PROJECT}"
- "DATABASE_NAME=${_DATABASE_NAME}"
- "DB_USER=${_DB_USER}"
- "TARGET_ROOT=${_TARGET_ROOT}"
- "TARGET_LANG=${_TARGET_LANG}"
- "TABLE_NAME=${_TABLE_NAME}"
- "SQL_FILE=${_SQL_FILE}"
- "AGENT_FILE_PATTERN=${_AGENT_FILE_PATTERN}"
secretEnv: ["TOOLS_YAML_CONTENT", "GOOGLE_API_KEY", "DB_PASSWORD"]
availableSecrets:
secretManager:
- versionName: projects/${_GCP_PROJECT}/secrets/${_TOOLS_YAML_SECRET}/versions/8
env: "TOOLS_YAML_CONTENT"
- versionName: projects/${_GCP_PROJECT_NUMBER}/secrets/${_API_KEY_SECRET}/versions/latest
env: "GOOGLE_API_KEY"
- versionName: projects/${_GCP_PROJECT}/secrets/${_DB_PASS_SECRET}/versions/latest
env: "DB_PASSWORD"
timeout: 1200s
substitutions:
_TARGET_LANG: "python"
_IMAGE: "gcr.io/google.com/cloudsdktool/cloud-sdk:537.0.0"
_TARGET_ROOT: "docs/en/documentation/configuration/pre-post-processing/python"
_TABLE_NAME: "hotels_py_pre_post_processing"
_SQL_FILE: ".ci/sample_tests/setup_hotels.sql"
_AGENT_FILE_PATTERN: "agent.py"
options:
logging: CLOUD_LOGGING_ONLY

View File

@@ -1,52 +0,0 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
steps:
- name: 'golang:1.25.7'
id: 'go-quickstart-test'
entrypoint: 'bash'
args:
# The '-c' flag tells bash to execute the following string as a command.
# The 'set -ex' enables debug output and exits on error for easier troubleshooting.
- -c
- |
set -ex
export VERSION=$(cat ./cmd/version.txt)
chmod +x .ci/sample_tests/run_tests.sh
.ci/sample_tests/run_tests.sh
env:
- 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}'
- 'GCP_PROJECT=${_GCP_PROJECT}'
- 'DATABASE_NAME=${_DATABASE_NAME}'
- 'DB_USER=${_DB_USER}'
- 'TARGET_ROOT=docs/en/documentation/getting-started/quickstart/go'
- 'TARGET_LANG=go'
- 'TABLE_NAME=hotels_go'
- 'SQL_FILE=.ci/sample_tests/setup_hotels.sql'
- 'AGENT_FILE_PATTERN=quickstart.go'
secretEnv: ['TOOLS_YAML_CONTENT', 'GOOGLE_API_KEY', 'DB_PASSWORD']
availableSecrets:
secretManager:
- versionName: projects/${_GCP_PROJECT}/secrets/${_TOOLS_YAML_SECRET}/versions/10
env: 'TOOLS_YAML_CONTENT'
- versionName: projects/${_GCP_PROJECT_NUMBER}/secrets/${_API_KEY_SECRET}/versions/latest
env: 'GOOGLE_API_KEY'
- versionName: projects/${_GCP_PROJECT}/secrets/${_DB_PASS_SECRET}/versions/latest
env: 'DB_PASSWORD'
timeout: 1000s
options:
logging: CLOUD_LOGGING_ONLY

View File

@@ -1,52 +0,0 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
steps:
- name: 'gcr.io/google.com/cloudsdktool/cloud-sdk:537.0.0'
id: 'python-quickstart-test'
entrypoint: 'bash'
args:
# The '-c' flag tells bash to execute the following string as a command.
# The 'set -ex' enables debug output and exits on error for easier troubleshooting.
- -c
- |
set -ex
export VERSION=$(cat ./cmd/version.txt)
chmod +x .ci/sample_tests/run_tests.sh
.ci/sample_tests/run_tests.sh
env:
- 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}'
- 'GCP_PROJECT=${_GCP_PROJECT}'
- 'DATABASE_NAME=${_DATABASE_NAME}'
- 'DB_USER=${_DB_USER}'
- 'TARGET_ROOT=docs/en/documentation/getting-started/quickstart/python'
- 'TARGET_LANG=python'
- 'TABLE_NAME=hotels_python'
- 'SQL_FILE=.ci/sample_tests/setup_hotels.sql'
- 'AGENT_FILE_PATTERN=quickstart.py'
secretEnv: ['TOOLS_YAML_CONTENT', 'GOOGLE_API_KEY', 'DB_PASSWORD']
availableSecrets:
secretManager:
- versionName: projects/${_GCP_PROJECT}/secrets/${_TOOLS_YAML_SECRET}/versions/8
env: 'TOOLS_YAML_CONTENT'
- versionName: projects/${_GCP_PROJECT_NUMBER}/secrets/${_API_KEY_SECRET}/versions/latest
env: 'GOOGLE_API_KEY'
- versionName: projects/${_GCP_PROJECT}/secrets/${_DB_PASS_SECRET}/versions/latest
env: 'DB_PASSWORD'
timeout: 1000s
options:
logging: CLOUD_LOGGING_ONLY

View File

@@ -1,202 +0,0 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
# --- Configuration (from Environment Variables) ---
# TARGET_ROOT: The directory to search for tests (e.g., docs/en/getting-started/quickstart/js)
# TARGET_LANG: python, js, go
# TABLE_NAME: Database table name to use
# SQL_FILE: Path to the SQL setup file
# AGENT_FILE_PATTERN: Filename to look for (e.g., quickstart.js or agent.py)
VERSION=$(cat ./cmd/version.txt)
# Process IDs & Logs
PROXY_PID=""
TOOLBOX_PID=""
PROXY_LOG="cloud_sql_proxy.log"
TOOLBOX_LOG="toolbox_server.log"
install_system_packages() {
echo "Installing system packages..."
apt-get update && apt-get install -y \
postgresql-client \
wget \
gettext-base \
netcat-openbsd
if [[ "$TARGET_LANG" == "python" ]]; then
apt-get install -y python3-venv
fi
}
start_cloud_sql_proxy() {
echo "Starting Cloud SQL Proxy..."
wget -q "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy
chmod +x /usr/local/bin/cloud-sql-proxy
cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" > "$PROXY_LOG" 2>&1 &
PROXY_PID=$!
# Health Check
for i in {1..30}; do
if nc -z 127.0.0.1 5432; then
echo "Cloud SQL Proxy is up and running."
return
fi
sleep 1
done
echo "ERROR: Cloud SQL Proxy failed to start. Logs:"
cat "$PROXY_LOG"
exit 1
}
setup_toolbox() {
echo "Setting up Toolbox server..."
TOOLBOX_YAML="/tools.yaml"
echo "${TOOLS_YAML_CONTENT}" > "$TOOLBOX_YAML"
wget -q "https://storage.googleapis.com/genai-toolbox/v${VERSION}/linux/amd64/toolbox" -O "/toolbox"
chmod +x "/toolbox"
/toolbox --tools-file "$TOOLBOX_YAML" > "$TOOLBOX_LOG" 2>&1 &
TOOLBOX_PID=$!
# Health Check
for i in {1..15}; do
if nc -z 127.0.0.1 5000; then
echo "Toolbox server is up and running."
return
fi
sleep 1
done
echo "ERROR: Toolbox server failed to start. Logs:"
cat "$TOOLBOX_LOG"
exit 1
}
setup_db_table() {
echo "Setting up database table $TABLE_NAME using $SQL_FILE..."
export TABLE_NAME
envsubst < "$SQL_FILE" | psql -h 127.0.0.1 -p 5432 -U "$DB_USER" -d "$DATABASE_NAME"
}
run_python_test() {
local dir=$1
local name=$(basename "$dir")
echo "--- Running Python Test: $name ---"
(
cd "$dir"
python3 -m venv .venv
source .venv/bin/activate
pip install -q -r requirements.txt pytest
cd ..
local test_file=$(find . -maxdepth 1 -name "*test.py" | head -n 1)
if [ -n "$test_file" ]; then
echo "Found native test: $test_file. Running pytest..."
export ORCH_NAME="$name"
export PYTHONPATH="../"
pytest "$test_file"
else
echo "No native test found. running agent directly..."
export PYTHONPATH="../"
python3 "${name}/${AGENT_FILE_PATTERN}"
fi
rm -rf "${name}/.venv"
)
}
run_js_test() {
local dir=$1
local name=$(basename "$dir")
echo "--- Running JS Test: $name ---"
(
cd "$dir"
if [ -f "package-lock.json" ]; then npm ci -q; else npm install -q; fi
cd ..
# Looking for a JS test file in the parent directory
local test_file=$(find . -maxdepth 1 -name "*test.js" | head -n 1)
if [ -n "$test_file" ]; then
echo "Found native test: $test_file. Running node --test..."
export ORCH_NAME="$name"
node --test "$test_file"
else
echo "No native test found. running agent directly..."
node "${name}/${AGENT_FILE_PATTERN}"
fi
rm -rf "${name}/node_modules"
)
}
run_go_test() {
local dir=$1
local name=$(basename "$dir")
if [ "$name" == "openAI" ]; then
echo -e "\nSkipping framework '${name}': Temporarily excluded."
return
fi
echo "--- Running Go Test: $name ---"
(
cd "$dir"
if [ -f "go.mod" ]; then
go mod tidy
fi
cd ..
local test_file=$(find . -maxdepth 1 -name "*test.go" | head -n 1)
if [ -n "$test_file" ]; then
echo "Found native test: $test_file. Running go test..."
export ORCH_NAME="$name"
go test -v ./...
else
echo "No native test found. running agent directly..."
cd "$name"
go run "."
fi
)
}
cleanup() {
echo "Cleaning up background processes..."
[ -n "$TOOLBOX_PID" ] && kill "$TOOLBOX_PID" || true
[ -n "$PROXY_PID" ] && kill "$PROXY_PID" || true
}
trap cleanup EXIT
# --- Execution ---
install_system_packages
start_cloud_sql_proxy
export PGHOST=127.0.0.1
export PGPORT=5432
export PGPASSWORD="$DB_PASSWORD"
export GOOGLE_API_KEY="$GOOGLE_API_KEY"
setup_toolbox
setup_db_table
echo "Scanning $TARGET_ROOT for tests with pattern $AGENT_FILE_PATTERN..."
find "$TARGET_ROOT" -name "$AGENT_FILE_PATTERN" | while read -r agent_file; do
sample_dir=$(dirname "$agent_file")
if [[ "$TARGET_LANG" == "python" ]]; then
run_python_test "$sample_dir"
elif [[ "$TARGET_LANG" == "js" ]]; then
run_js_test "$sample_dir"
elif [[ "$TARGET_LANG" == "go" ]]; then
run_go_test "$sample_dir"
fi
done

View File

@@ -1,28 +0,0 @@
-- Copyright 2025 Google LLC
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
TRUNCATE TABLE $TABLE_NAME;
INSERT INTO $TABLE_NAME (id, name, location, price_tier, checkin_date, checkout_date, booked)
VALUES
(1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-22', '2024-04-20', B'0'),
(2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', B'0'),
(3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', B'0'),
(4, 'Radisson Blu Lucerne', 'Lucerne', 'Midscale', '2024-04-24', '2024-04-05', B'0'),
(5, 'Best Western Bern', 'Bern', 'Upper Midscale', '2024-04-23', '2024-04-01', B'0'),
(6, 'InterContinental Geneva', 'Geneva', 'Luxury', '2024-04-23', '2024-04-28', B'0'),
(7, 'Sheraton Zurich', 'Zurich', 'Upper Upscale', '2024-04-27', '2024-04-02', B'0'),
(8, 'Holiday Inn Basel', 'Basel', 'Upper Midscale', '2024-04-24', '2024-04-09', B'0'),
(9, 'Courtyard Zurich', 'Zurich', 'Upscale', '2024-04-03', '2024-04-13', B'0'),
(10, 'Comfort Inn Bern', 'Bern', 'Midscale', '2024-04-04', '2024-04-16', B'0');

View File

@@ -1,75 +0,0 @@
#!/bin/bash
# .ci/test_prompts_with_coverage.sh
#
# This script runs a specific prompt integration test, calculates its
# code coverage, and checks if it meets a minimum threshold.
#
# It is called with one argument: the type of the prompt.
# Example usage: .ci/test_prompts_with_coverage.sh "custom"
# Exit immediately if a command fails.
set -e
# --- 1. Define Variables ---
# The first argument is the prompt type (e.g., "custom").
PROMPT_TYPE=$1
COVERAGE_THRESHOLD=80 # Minimum coverage percentage required.
if [ -z "$PROMPT_TYPE" ]; then
echo "Error: No prompt type provided. Please call this script with an argument."
echo "Usage: .ci/test_prompts_with_coverage.sh <prompt_type>"
exit 1
fi
# Construct names based on the prompt type.
TEST_BINARY="./prompt.${PROMPT_TYPE}.test"
TEST_NAME="$(tr '[:lower:]' '[:upper:]' <<< ${PROMPT_TYPE:0:1})${PROMPT_TYPE:1} Prompts"
COVERAGE_FILE="coverage.prompts-${PROMPT_TYPE}.out"
# --- 2. Run Integration Tests ---
echo "--- Running integration tests for ${TEST_NAME} ---"
# Safety check for the binary's existence.
if [ ! -f "$TEST_BINARY" ]; then
echo "Error: Test binary not found at ${TEST_BINARY}. Aborting."
exit 1
fi
# Execute the test binary and generate the coverage file.
# If the tests fail, the 'set -e' command will cause the script to exit here.
if ! ./"${TEST_BINARY}" -test.v -test.coverprofile="${COVERAGE_FILE}"; then
echo "Error: Tests for ${TEST_NAME} failed. Exiting."
exit 1
fi
echo "--- Tests for ${TEST_NAME} passed successfully ---"
# --- 3. Calculate and Check Coverage ---
echo "Calculating coverage for ${TEST_NAME}..."
# Calculate the total coverage percentage from the generated file.
# The '2>/dev/null' suppresses warnings if the coverage file is empty.
total_coverage=$(go tool cover -func="${COVERAGE_FILE}" 2>/dev/null | grep "total:" | awk '{print $3}')
if [ -z "$total_coverage" ]; then
echo "Warning: Could not calculate coverage for ${TEST_NAME}. The coverage report might be empty."
total_coverage="0%"
fi
echo "${TEST_NAME} total coverage: $total_coverage"
# Remove the '%' sign for numerical comparison.
coverage_numeric=$(echo "$total_coverage" | sed 's/%//')
# Check if the coverage is below the defined threshold.
if awk -v coverage="$coverage_numeric" -v threshold="$COVERAGE_THRESHOLD" 'BEGIN {exit !(coverage < threshold)}'; then
echo "Coverage failure: ${TEST_NAME} total coverage (${total_coverage}) is below the ${COVERAGE_THRESHOLD}% threshold."
exit 1
else
echo "Coverage for ${TEST_NAME} is sufficient."
fi

View File

@@ -35,7 +35,7 @@ for tool_name in "${TOOL_PACKAGE_NAMES[@]}"; do
done
# Run integration test
if ! ./"${TEST_BINARY}" -test.v ${EXTRA_TEST_ARGS:-} -test.coverprofile="${COVERAGE_FILE}"; then
if ! ./"${TEST_BINARY}" -test.v -test.coverprofile="${COVERAGE_FILE}"; then
echo "Error: Tests for ${DISPLAY_NAME} failed. Exiting."
exit 1
fi

View File

@@ -17,31 +17,19 @@ steps:
waitFor: ['-']
script: |
#!/usr/bin/env bash
set -e
export VERSION=$(cat ./cmd/version.txt)
docker buildx create --name container-builder --driver docker-container --bootstrap --use
export TAGS="-t ${_DOCKER_URI}:$VERSION"
if [[ "$_PUSH_LATEST" == "true" ]]; then
if [[ $_PUSH_LATEST == 'true' ]]; then
export TAGS="$TAGS -t ${_DOCKER_URI}:latest"
fi
# Build and push
docker buildx build \
--platform linux/amd64,linux/arm64 \
--build-arg BUILD_TYPE=container.release \
--build-arg COMMIT_SHA=$(git rev-parse --short HEAD) \
$TAGS \
--push .
# Pull the image to ensure it's available for provenance generation.
docker pull ${_DOCKER_URI}:${VERSION}
docker tag ${_DOCKER_URI}:${VERSION} ${_DOCKER_URI}:latest
docker buildx build --platform linux/amd64,linux/arm64 --build-arg BUILD_TYPE=container.release --build-arg COMMIT_SHA=$(git rev-parse HEAD) $TAGS --push .
- id: "install-dependencies"
name: golang:1
waitFor: ['-']
env:
env:
- 'GOPATH=/gopath'
volumes:
- name: 'go'
@@ -49,56 +37,20 @@ steps:
script: |
go get -d ./...
- id: "install-zig"
name: golang:1
waitFor: ['-']
volumes:
- name: 'zig'
path: '/zig-tools'
script: |
#!/usr/bin/env bash
set -e
apt-get update && apt-get install -y xz-utils
curl -fL "https://ziglang.org/download/0.15.2/zig-x86_64-linux-0.15.2.tar.xz" -o zig.tar.xz
tar -xf zig.tar.xz -C /zig-tools --strip-components=1
- id: "install-macos-sdk"
name: golang:1
waitFor: ['-']
volumes:
- name: 'macos-sdk'
path: '/macos-sdk'
script: |
#!/usr/bin/env bash
set -e
apt-get update && apt-get install -y xz-utils
echo "Downloading macOS 14.5 SDK..."
curl -fL -o sdk.tar.xz https://github.com/alexey-lysiuk/macos-sdk/releases/download/14.5/MacOSX14.5.tar.xz
mkdir -p /macos-sdk/MacOSX14.5.sdk
echo "Unpacking macOS 14.5 SDK..."
tar -xf sdk.tar.xz -C /macos-sdk/MacOSX14.5.sdk --strip-components=1
- id: "build-linux-amd64"
name: golang:1
waitFor:
waitFor:
- "install-dependencies"
- "install-zig"
env:
env:
- 'GOPATH=/gopath'
- 'CGO_ENABLED=1'
- 'GOOS=linux'
- 'GOARCH=amd64'
- 'CC=/zig-tools/zig cc -target x86_64-linux-gnu'
- 'CXX=/zig-tools/zig c++ -target x86_64-linux-gnu'
volumes:
- name: 'go'
path: '/gopath'
- name: 'zig'
path: '/zig-tools'
script: |
#!/usr/bin/env bash
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.linux.amd64
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.linux.amd64
- id: "store-linux-amd64"
name: "gcr.io/cloud-builders/gcloud:latest"
@@ -109,63 +61,20 @@ steps:
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.linux.amd64 gs://$_BUCKET_NAME/$VERSION/linux/amd64/toolbox
- id: "build-linux-amd64-geminicli"
name: golang:1
waitFor:
- "install-dependencies"
- "install-zig"
env:
- 'GOPATH=/gopath'
- 'CGO_ENABLED=1'
- 'GOOS=linux'
- 'GOARCH=amd64'
- 'CC=/zig-tools/zig cc -target x86_64-linux-gnu'
- 'CXX=/zig-tools/zig c++ -target x86_64-linux-gnu'
volumes:
- name: 'go'
path: '/gopath'
- name: 'zig'
path: '/zig-tools'
script: |
#!/usr/bin/env bash
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.linux.amd64
- id: "store-linux-amd64-geminicli"
name: "gcr.io/cloud-builders/gcloud:latest"
waitFor:
- "build-linux-amd64-geminicli"
script: |
#!/usr/bin/env bash
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.geminicli.linux.amd64 gs://$_BUCKET_NAME/geminicli/$VERSION/linux/amd64/toolbox
- id: "build-darwin-arm64"
name: golang:1
waitFor:
waitFor:
- "install-dependencies"
- "install-zig"
- "install-macos-sdk"
env:
env:
- 'GOPATH=/gopath'
- 'CGO_ENABLED=1'
- 'GOOS=darwin'
- 'GOARCH=arm64'
- 'SDK_PATH=/macos-sdk/MacOSX14.5.sdk'
- 'MACOS_MIN_VER=10.14'
- 'CGO_LDFLAGS=-mmacosx-version-min=10.14 --sysroot /macos-sdk/MacOSX14.5.sdk -F/macos-sdk/MacOSX14.5.sdk/System/Library/Frameworks -L/usr/lib'
- 'COMMON_FLAGS=-mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
- 'CC=/zig-tools/zig cc -mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
- 'CXX=/zig-tools/zig c++ -mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
volumes:
- name: 'go'
path: '/gopath'
- name: 'zig'
path: '/zig-tools'
- name: 'macos-sdk'
path: '/macos-sdk'
script: |
#!/usr/bin/env bash
go build -trimpath -buildmode=pie -ldflags "-s -w -X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.darwin.arm64
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.arm64
- id: "store-darwin-arm64"
name: "gcr.io/cloud-builders/gcloud:latest"
@@ -174,73 +83,22 @@ steps:
script: |
#!/usr/bin/env bash
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.darwin.arm64 \
gs://$_BUCKET_NAME/$VERSION/darwin/arm64/toolbox
- id: "build-darwin-arm64-geminicli"
name: golang:1
waitFor:
- "install-dependencies"
- "install-zig"
- "install-macos-sdk"
env:
- 'GOPATH=/gopath'
- 'CGO_ENABLED=1'
- 'GOOS=darwin'
- 'GOARCH=arm64'
- 'SDK_PATH=/macos-sdk/MacOSX14.5.sdk'
- 'MACOS_MIN_VER=10.14'
- 'CGO_LDFLAGS=-mmacosx-version-min=10.14 --sysroot /macos-sdk/MacOSX14.5.sdk -F/macos-sdk/MacOSX14.5.sdk/System/Library/Frameworks -L/usr/lib'
- 'COMMON_FLAGS=-mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
- 'CC=/zig-tools/zig cc -mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
- 'CXX=/zig-tools/zig c++ -mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
volumes:
- name: 'go'
path: '/gopath'
- name: 'zig'
path: '/zig-tools'
- name: 'macos-sdk'
path: '/macos-sdk'
script: |
#!/usr/bin/env bash
go build -trimpath -buildmode=pie -ldflags "-s -w -X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.darwin.arm64
- id: "store-darwin-arm64-geminicli"
name: "gcr.io/cloud-builders/gcloud:latest"
waitFor:
- "build-darwin-arm64-geminicli"
script: |
#!/usr/bin/env bash
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.geminicli.darwin.arm64 gs://$_BUCKET_NAME/geminicli/$VERSION/darwin/arm64/toolbox
gcloud storage cp toolbox.darwin.arm64 gs://$_BUCKET_NAME/$VERSION/darwin/arm64/toolbox
- id: "build-darwin-amd64"
name: golang:1
waitFor:
waitFor:
- "install-dependencies"
- "install-zig"
- "install-macos-sdk"
env:
env:
- 'GOPATH=/gopath'
- 'CGO_ENABLED=1'
- 'GOOS=darwin'
- 'GOARCH=amd64'
- 'SDK_PATH=/macos-sdk/MacOSX14.5.sdk'
- 'MACOS_MIN_VER=10.14'
- 'CGO_LDFLAGS=-mmacosx-version-min=10.14 --sysroot /macos-sdk/MacOSX14.5.sdk -F/macos-sdk/MacOSX14.5.sdk/System/Library/Frameworks -L/usr/lib'
- 'COMMON_FLAGS=-mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
- 'CC=/zig-tools/zig cc -mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
- 'CXX=/zig-tools/zig c++ -mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
volumes:
- name: 'go'
path: '/gopath'
- name: 'zig'
path: '/zig-tools'
- name: 'macos-sdk'
path: '/macos-sdk'
script: |
#!/usr/bin/env bash
go build -trimpath -buildmode=pie -ldflags "-s -w -X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.darwin.amd64
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.amd64
- id: "store-darwin-amd64"
name: "gcr.io/cloud-builders/gcloud:latest"
@@ -251,64 +109,20 @@ steps:
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.darwin.amd64 gs://$_BUCKET_NAME/$VERSION/darwin/amd64/toolbox
- id: "build-darwin-amd64-geminicli"
name: golang:1
waitFor:
- "install-dependencies"
- "install-zig"
- "install-macos-sdk"
env:
- 'GOPATH=/gopath'
- 'CGO_ENABLED=1'
- 'GOOS=darwin'
- 'GOARCH=amd64'
- 'SDK_PATH=/macos-sdk/MacOSX14.5.sdk'
- 'MACOS_MIN_VER=10.14'
- 'CGO_LDFLAGS=-mmacosx-version-min=10.14 --sysroot /macos-sdk/MacOSX14.5.sdk -F/macos-sdk/MacOSX14.5.sdk/System/Library/Frameworks -L/usr/lib'
- 'COMMON_FLAGS=-mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
- 'CC=/zig-tools/zig cc -mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
- 'CXX=/zig-tools/zig c++ -mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
volumes:
- name: 'go'
path: '/gopath'
- name: 'zig'
path: '/zig-tools'
- name: 'macos-sdk'
path: '/macos-sdk'
script: |
#!/usr/bin/env bash
go build -trimpath -buildmode=pie -ldflags "-s -w -X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.darwin.amd64
- id: "store-darwin-amd64-geminicli"
name: "gcr.io/cloud-builders/gcloud:latest"
waitFor:
- "build-darwin-amd64-geminicli"
script: |
#!/usr/bin/env bash
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.geminicli.darwin.amd64 \
gs://$_BUCKET_NAME/geminicli/$VERSION/darwin/amd64/toolbox
- id: "build-windows-amd64"
name: golang:1
waitFor:
waitFor:
- "install-dependencies"
- "install-zig"
env:
env:
- 'GOPATH=/gopath'
- 'CGO_ENABLED=1'
- 'GOOS=windows'
- 'GOARCH=amd64'
- 'CC=/zig-tools/zig cc -target x86_64-windows-gnu'
- 'CXX=/zig-tools/zig c++ -target x86_64-windows-gnu'
volumes:
- name: 'go'
path: '/gopath'
- name: 'zig'
path: '/zig-tools'
script: |
#!/usr/bin/env bash
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.windows.amd64
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=0 GOOS=windows GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.windows.amd64
- id: "store-windows-amd64"
name: "gcr.io/cloud-builders/gcloud:latest"
@@ -319,49 +133,11 @@ steps:
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.windows.amd64 gs://$_BUCKET_NAME/$VERSION/windows/amd64/toolbox.exe
- id: "build-windows-amd64-geminicli"
name: golang:1
waitFor:
- "install-dependencies"
- "install-zig"
env:
- 'GOPATH=/gopath'
- 'CGO_ENABLED=1'
- 'GOOS=windows'
- 'GOARCH=amd64'
- 'CC=/zig-tools/zig cc -target x86_64-windows-gnu'
- 'CXX=/zig-tools/zig c++ -target x86_64-windows-gnu'
volumes:
- name: 'go'
path: '/gopath'
- name: 'zig'
path: '/zig-tools'
- name: 'macos-sdk'
path: '/macos-sdk'
script: |
#!/usr/bin/env bash
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.windows.amd64
- id: "store-windows-amd64-geminicli"
name: "gcr.io/cloud-builders/gcloud:latest"
waitFor:
- "build-windows-amd64-geminicli"
script: |
#!/usr/bin/env bash
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.geminicli.windows.amd64 gs://$_BUCKET_NAME/geminicli/$VERSION/windows/amd64/toolbox.exe
images:
- "${_DOCKER_URI}:latest"
options:
requestedVerifyOption: VERIFIED # This ensures provenance is generated
automapSubstitutions: true
dynamicSubstitutions: true
logging: CLOUD_LOGGING_ONLY # Necessary for custom service account
pool:
name: projects/$PROJECT_ID/locations/us-central1/workerPools/run-release # to increase resource for running releases
machineType: 'E2_HIGHCPU_32'
substitutions:
_REGION: us-central1
@@ -369,4 +145,4 @@ substitutions:
_AR_REPO_NAME: toolbox
_BUCKET_NAME: genai-toolbox
_DOCKER_URI: ${_AR_HOSTNAME}/${PROJECT_ID}/${_AR_REPO_NAME}/toolbox
_PUSH_LATEST: "false" # Substituted in trigger
_PUSH_LATEST: "true"

View File

@@ -1,18 +0,0 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ignore_patterns:
- "package-lock.json"
- "go.sum"
- "requirements.txt"

View File

@@ -1 +0,0 @@
../GEMINI.md

13
.github/CODEOWNERS vendored
View File

@@ -3,15 +3,4 @@
# For syntax help see:
# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
* @googleapis/senseai-eco-team
# Code & Tests
**/alloydb*/ @googleapis/toolbox-alloydb-team @googleapis/senseai-eco-team
**/bigquery/ @googleapis/toolbox-bigquery-team @googleapis/senseai-eco-team
**/bigtable/ @googleapis/toolbox-bigtable-team @googleapis/senseai-eco-team
**/cloudsqlmssql/ @googleapis/toolbox-cloud-sql-mssql-team @googleapis/senseai-eco-team
**/cloudsqlmysql/ @googleapis/toolbox-cloud-sql-mysql-team @googleapis/senseai-eco-team
**/cloudsqlpg/ @googleapis/toolbox-cloud-sql-postgres-team @googleapis/senseai-eco-team
**/dataplex/ @googleapis/toolbox-dataplex-team @googleapis/senseai-eco-team
**/firestore/ @googleapis/toolbox-firestore-team @googleapis/senseai-eco-team
**/looker/ @googleapis/toolbox-looker-team @googleapis/senseai-eco-team
**/spanner/ @googleapis/toolbox-spanner-team @googleapis/senseai-eco-team
* @googleapis/senseai-eco

View File

@@ -1,22 +1,21 @@
## Description
---
> Should include a concise description of the changes (bug or feature), it's
> impact, along with a summary of the solution
## PR Checklist
---
> Thank you for opening a Pull Request! Before submitting your PR, there are a
> few things you can do to make sure it goes smoothly:
- [ ] Make sure you reviewed
[CONTRIBUTING.md](https://github.com/googleapis/genai-toolbox/blob/main/CONTRIBUTING.md)
- [ ] Make sure to open an issue as a
[bug/issue](https://github.com/googleapis/genai-toolbox/issues/new/choose)
before writing your code! That way we can discuss the change, evaluate
[bug/issue](https://github.com/googleapis/langchain-google-alloydb-pg-python/issues/new/choose)
before writing your code! That way we can discuss the change, evaluate
designs, and agree on the general idea
- [ ] Ensure the tests and linter pass
- [ ] Code coverage does not decrease (if any source code was changed)
- [ ] Appropriate docs were updated (if necessary)
- [ ] Make sure to add `!` if this involve a breaking change
🛠️ Fixes #<issue_number_goes_here>
🛠️ Fixes #<issue_number_goes_here>

View File

@@ -1,106 +1,19 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
assign_issues:
- Yuan325
- duwenxin99
- anubhav756
- twishabansal
- dishaprakash
- averikitsch
assign_issues_by:
- labels:
- 'product: alloydb'
to:
- 'googleapis/toolbox-alloydb-team'
- labels:
- 'product: bigquery'
to:
- 'googleapis/toolbox-bigquery-team'
- labels:
- 'product: bigtable'
to:
- 'googleapis/toolbox-bigtable-team'
- labels:
- 'product: mssql'
to:
- 'googleapis/toolbox-cloud-sql-mssql-team'
- labels:
- 'product: mysql'
to:
- 'googleapis/toolbox-cloud-sql-mysql-team'
- labels:
- 'product: postgres'
to:
- 'googleapis/toolbox-cloud-sql-postgres-team'
- labels:
- 'product: dataplex'
to:
- 'googleapis/toolbox-dataplex-team'
- labels:
- 'product: firestore'
to:
- 'googleapis/toolbox-firestore-team'
- Genesis929
- shobsi
- jiaxunwu
- labels:
- 'product: looker'
to:
- 'googleapis/toolbox-looker-team'
- labels:
- 'product: spanner'
to:
- 'googleapis/toolbox-spanner-team'
- drstrangelooker
assign_prs:
- Yuan325
- duwenxin99
assign_prs_by:
- labels:
- 'product: alloydb'
to:
- 'googleapis/toolbox-alloydb-team'
- labels:
- 'product: bigquery'
to:
- 'googleapis/toolbox-bigquery-team'
- labels:
- 'product: bigtable'
to:
- 'googleapis/toolbox-bigtable-team'
- labels:
- 'product: mssql'
to:
- 'googleapis/toolbox-cloud-sql-mssql-team'
- labels:
- 'product: mysql'
to:
- 'googleapis/toolbox-cloud-sql-mysql-team'
- labels:
- 'product: postgres'
to:
- 'googleapis/toolbox-cloud-sql-postgres-team'
- labels:
- 'product: dataplex'
to:
- 'googleapis/toolbox-dataplex-team'
- labels:
- 'product: firestore'
to:
- 'googleapis/toolbox-firestore-team'
- labels:
- 'product: looker'
to:
- 'googleapis/toolbox-looker-team'
- labels:
- 'product: spanner'
to:
- 'googleapis/toolbox-spanner-team'
- averikitsch

View File

@@ -14,7 +14,6 @@
allowedCopyrightHolders:
- 'Google LLC'
- 'Oracle'
allowedLicenses:
- 'Apache-2.0'
sourceFileExtensions:
@@ -22,6 +21,4 @@ sourceFileExtensions:
- 'yaml'
- 'yml'
ignoreFiles:
- 'docs/en/documentation/**/*.go'
- 'docs/en/samples/**/*'
- '**/*oracle*'
- 'docs/en/getting-started/quickstart/**'

90
.github/labels.yaml vendored
View File

@@ -88,95 +88,11 @@
color: 8befd7
description: 'Status: reviewer is awaiting feedback or responses from the author before proceeding.'
- name: 'release candidate'
color: 32CD32
description: 'Use label to signal PR should be included in the next release.'
# Product Labels
- name: 'product: alloydb'
color: 5065c7
description: 'AlloyDB'
- name: 'product: bigquery'
color: 5065c7
description: 'BigQuery'
- name: 'product: bigtable'
color: 5065c7
description: 'Bigtable'
- name: 'product: cassandra'
color: 5065c7
description: 'Cassandra'
- name: 'product: clickhouse'
color: 5065c7
description: 'ClickHouse'
- name: 'product: mssql'
color: 5065c7
description: 'SQL Server'
- name: 'product: mysql'
color: 5065c7
description: 'MySQL'
- name: 'product: postgres'
color: 5065c7
description: 'PostgreSQL'
- name: 'product: couchbase'
color: 5065c7
description: 'Couchbase'
- name: 'product: dataplex'
color: 5065c7
description: 'Dataplex'
- name: 'product: dgraph'
color: 5065c7
description: 'Dgraph'
- name: 'product: elasticsearch'
color: 5065c7
description: 'Elasticsearch'
- name: 'product: firebird'
color: 5065c7
description: 'Firebird'
- name: 'product: firestore'
color: 5065c7
description: 'Firestore'
description: 'Product: Assigned to the BigQuery team.'
# Product Labels
- name: 'product: looker'
color: 5065c7
description: 'Looker'
- name: 'product: mindsdb'
color: 5065c7
description: 'MindsDB'
- name: 'product: mongodb'
color: 5065c7
description: 'MongoDB'
- name: 'product: neo4j'
color: 5065c7
description: 'Neo4j'
- name: 'product: oceanbase'
color: 5065c7
description: 'OceanBase'
- name: 'product: oracle'
color: 5065c7
description: 'Oracle'
- name: 'product: redis'
color: 5065c7
description: 'Redis'
- name: 'product: serverlessspark'
color: 5065c7
description: 'Serverless Spark'
- name: 'product: singlestore'
color: 5065c7
description: 'SingleStore'
- name: 'product: spanner'
color: 5065c7
description: 'Spanner'
- name: 'product: sqlite'
color: 5065c7
description: 'SQLite'
- name: 'product: tidb'
color: 5065c7
description: 'TiDB'
- name: 'product: trino'
color: 5065c7
description: 'Trino'
- name: 'product: valkey'
color: 5065c7
description: 'Valkey'
- name: 'product: yugabytedb'
color: 5065c7
description: 'YugabyteDB'
description: 'Product: Assigned to the Looker team.'

View File

@@ -18,35 +18,28 @@ releaseType: simple
versionFile: "cmd/version.txt"
extraFiles: [
"README.md",
"docs/en/documentation/getting-started/colab_quickstart.ipynb",
"docs/en/documentation/introduction/_index.md",
"docs/en/documentation/getting-started/mcp_quickstart/_index.md",
"docs/en/documentation/getting-started/quickstart/shared/configure_toolbox.md",
"docs/en/integrations/alloydb/_index.md",
"docs/en/integrations/alloydb/mcp_quickstart.md",
"docs/en/integrations/alloydb/ai-nl/alloydb_ai_nl.ipynb",
"docs/en/integrations/bigquery/local_quickstart.md",
"docs/en/integrations/bigquery/mcp_quickstart/_index.md",
"docs/en/integrations/bigquery/colab_quickstart_bigquery.ipynb",
"docs/en/integrations/looker/looker_gemini.md",
"docs/en/integrations/looker/looker_gemini_oauth/_index.md",
"docs/en/integrations/looker/looker_mcp_inspector/_index.md",
"docs/en/documentation/connect-to/ides/looker_mcp.md",
"docs/en/documentation/connect-to/ides/mysql_mcp.md",
"docs/en/documentation/connect-to/ides/mssql_mcp.md",
"docs/en/documentation/connect-to/ides/postgres_mcp.md",
"docs/en/documentation/connect-to/ides/neo4j_mcp.md",
"docs/en/documentation/connect-to/ides/sqlite_mcp.md",
"docs/en/documentation/connect-to/ides/oracle_mcp.md",
"gemini-extension.json",
{
"type": "json",
"path": "server.json",
"jsonpath": "$.version"
},
{
"type": "json",
"path": "server.json",
"jsonpath": "$.packages[0].identifier"
},
"docs/en/getting-started/colab_quickstart.ipynb",
"docs/en/getting-started/introduction/_index.md",
"docs/en/getting-started/local_quickstart.md",
"docs/en/getting-started/local_quickstart_js.md",
"docs/en/getting-started/local_quickstart_go.md",
"docs/en/getting-started/mcp_quickstart/_index.md",
"docs/en/samples/alloydb/_index.md",
"docs/en/samples/bigquery/local_quickstart.md",
"docs/en/samples/bigquery/mcp_quickstart/_index.md",
"docs/en/samples/bigquery/colab_quickstart_bigquery.ipynb",
"docs/en/samples/looker/looker_gemini.md",
"docs/en/samples/looker/looker_mcp_inspector.md",
"docs/en/how-to/connect-ide/alloydb_pg_mcp.md",
"docs/en/how-to/connect-ide/alloydb_pg_admin_mcp.md",
"docs/en/how-to/connect-ide/bigquery_mcp.md",
"docs/en/how-to/connect-ide/cloud_sql_pg_mcp.md",
"docs/en/how-to/connect-ide/cloud_sql_mssql_mcp.md",
"docs/en/how-to/connect-ide/cloud_sql_mysql_mcp.md",
"docs/en/how-to/connect-ide/firestore_mcp.md",
"docs/en/how-to/connect-ide/looker_mcp.md",
"docs/en/how-to/connect-ide/mysql_mcp.md",
"docs/en/how-to/connect-ide/mssql_mcp.md",
"docs/en/how-to/connect-ide/postgres_mcp.md",
"docs/en/how-to/connect-ide/spanner_mcp.md",
]

View File

@@ -8,35 +8,15 @@
':prHourlyLimitNone',
':preserveSemverRanges',
],
minimumReleaseAge: '3 days',
minimumReleaseAge: '3',
rebaseWhen: 'conflicted',
dependencyDashboardLabels: [
'type: process',
],
postUpdateOptions: [
'gomodTidy',
],
customManagers: [
{
customType: 'regex',
managerFilePatterns: [
'/^\\.github/workflows/.*\\.ya?ml$/',
],
matchStrings: [
'hugo-version:\\s*["\']?(?<currentValue>\\d+\\.\\d+\\.\\d+)["\']?',
],
depNameTemplate: 'gohugoio/hugo',
datasourceTemplate: 'github-releases',
extractVersionTemplate: '^v?(?<version>.*)$',
},
"postUpdateOptions": [
"gomodTidy"
],
packageRules: [
{
groupName: 'Hugo',
matchPackageNames: [
'gohugoio/hugo',
],
},
{
groupName: 'GitHub Actions',
matchManagers: [
@@ -44,37 +24,5 @@
],
pinDigests: true,
},
{
groupName: 'Go',
matchManagers: [
'gomod',
],
ignorePaths: [
'docs/**',
'.hugo/**',
],
},
{
groupName: 'Go Samples',
matchManagers: [
'gomod',
],
matchFileNames: [
'docs/**',
'.hugo/**',
],
},
{
groupName: 'Node',
matchManagers: [
'npm',
],
},
{
groupName: 'Pip',
matchManagers: [
'pip_requirements',
],
},
],
}

44
.github/sync-repo-settings.yaml vendored Normal file
View File

@@ -0,0 +1,44 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Synchronize repository settings from a centralized config
# https://github.com/googleapis/repo-automation-bots/tree/main/packages/sync-repo-settings
# Install: https://github.com/apps/sync-repo-settings
# Disable merge commits
rebaseMergeAllowed: true
squashMergeAllowed: true
mergeCommitAllowed: false
# Enable branch protection
branchProtectionRules:
- pattern: main
isAdminEnforced: true
requiredStatusCheckContexts:
- "cla/google"
- "lint"
- "conventionalcommits.org"
- "header-check"
# - Add required status checks like presubmit tests
- "unit tests (ubuntu-latest)"
- "unit tests (windows-latest)"
- "unit tests (macos-latest)"
- "integration-test-pr (toolbox-testing-438616)"
requiredApprovingReviewCount: 1
requiresCodeOwnerReviews: true
requiresStrictStatusChecks: true
# Set team access
permissionRules:
- team: senseai-eco
permission: admin

View File

@@ -1,27 +0,0 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Trigger presubmit tests for trusted contributors
# https://github.com/googleapis/repo-automation-bots/tree/main/packages/trusted-contribution
# Install: https://github.com/apps/trusted-contributions-gcf
trustedContributors:
- "dependabot[bot]"
- "renovate-bot"
annotations:
# Trigger Cloud Build tests
- type: comment
text: "/gcbrun"
- type: label
text: "tests: run"

View File

@@ -37,7 +37,7 @@ jobs:
runs-on: 'ubuntu-latest'
steps:
- uses: 'actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd' # v8
- uses: 'actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea' # v7
with:
script: |-
// parse test names

View File

@@ -1,45 +0,0 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: "Sync v1 docsite to Cloudflare"
permissions:
contents: read
concurrency:
group: cloudflare-sync
cancel-in-progress: true
on:
workflow_run:
workflows: ["CF: Deploy Dev Docs", "CF: Deploy Versioned Docs", "CF: Deploy Previous Version Docs"]
types: [completed]
jobs:
deploy:
runs-on: ubuntu-latest
if: ${{ github.event.workflow_run.conclusion == 'success' }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: 'cloudflare-pages'
- name: Cleanup
run: |
rm -rf .git
- name: Cloudflare Deploy
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: pages deploy . --project-name=toolbox-docs --branch=main

View File

@@ -1,137 +0,0 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: "CF: Deploy Previous Version Docs"
on:
workflow_dispatch:
inputs:
version_tag:
description: 'The old version tag to build docs for (e.g., v0.15.0)'
required: true
type: string
jobs:
build_and_deploy:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Checkout Tag (Code + Content)
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: ${{ github.event.inputs.version_tag }}
submodules: 'recursive'
fetch-depth: 0
- name: Apply Backports and Fixes
run: |
# 1. Fetch main to get the compressed GIF
git fetch origin main
git show origin/main:docs/en/documentation/configuration/toolbox-ui/edit-headers.gif > docs/en/how-to/toolbox-ui/edit-headers.gif
sed -i 's|/genai-toolbox/|/|g' .hugo/layouts/partials/navbar-version-selector.html
sed -i 's|https://googleapis.github.io/genai-toolbox|https://mcp-toolbox.dev|g' .hugo/hugo.toml
git show origin/main:.hugo/hugo.cloudflare.toml > modern_config.toml
# Graft the future version list into the old config using Node.js!
node -e "
const fs = require('fs');
const modern = fs.readFileSync('modern_config.toml', 'utf8');
let old = fs.readFileSync('.hugo/hugo.toml', 'utf8');
// Convert any old GitHub URLs to Cloudflare in the rest of the config
old = old.replace(/https:\/\/googleapis\.github\.io\/genai-toolbox/g, 'https://mcp-toolbox.dev');
// Erase the outdated version list from the old config
old = old.replace(/\[\[params\.versions\]\][\s\S]*?(?=\n\[\[|\n\[|$)/g, '');
// Extract the fully updated version list from the modern config
const modernVersions = modern.match(/\[\[params\.versions\]\][\s\S]*?(?=\n\[\[|\n\[|$)/g);
// Inject the modern versions into the old config
if (modernVersions) {
fs.writeFileSync('.hugo/hugo.toml', old + '\n\n' + modernVersions.join('\n'));
} else {
fs.writeFileSync('.hugo/hugo.toml', old);
}
"
- name: Setup Hugo and Node
uses: peaceiris/actions-hugo@75d2e84710de30f6ff7268e08f310b60ef14033f # v3
with:
hugo-version: "0.145.0"
extended: true
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: "22"
- name: Install Dependencies
run: npm ci
working-directory: .hugo
- name: Build Hugo Site for Archived Version
run: |
hugo --minify
rm -f public/releases.releases
working-directory: .hugo
env:
HUGO_BASEURL: https://mcp-toolbox.dev/${{ github.event.inputs.version_tag }}/
HUGO_RELATIVEURLS: false
HUGO_PARAMS_VERSION: ${{ github.event.inputs.version_tag }}
- name: Build Pagefind Index (Archived Version)
run: npx pagefind --site public
working-directory: .hugo
- name: Deploy to cloudflare-pages
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: .hugo/public
publish_branch: cloudflare-pages
destination_dir: ./${{ github.event.inputs.version_tag }}
keep_files: true
allow_empty_commit: true
commit_message: "docs(backport): deploy docs for ${{ github.event.inputs.version_tag }}"
- name: Clean Build Directory
run: rm -rf .hugo/public
- name: Build Hugo Site
run: hugo --minify
working-directory: .hugo
env:
HUGO_BASEURL: https://mcp-toolbox.dev/
HUGO_RELATIVEURLS: false
HUGO_PARAMS_VERSION: ${{ github.event.inputs.version_tag }}
- name: Build Pagefind Index (Root)
run: npx pagefind --site public
working-directory: .hugo
- name: Deploy to root
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: .hugo/public
publish_branch: cloudflare-pages
keep_files: true
allow_empty_commit: true
commit_message: "deploy: docs to root for ${{ github.event.inputs.version_tag }}"

View File

@@ -1,105 +0,0 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: "CF: Deploy Versioned Docs"
permissions:
contents: write
on:
release:
types: [published]
jobs:
deploy:
runs-on: ubuntu-24.04
# This shared concurrency group ensures only one docs deployment runs at a time.
concurrency:
group: cf-docs-update
cancel-in-progress: false
steps:
- name: Checkout Code at Tag
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: ${{ github.event.release.tag_name }}
- name: Get Version from Release Tag
id: get_version
env:
RELEASE_TAG: ${{ github.event.release.tag_name }}
run: echo "VERSION=${RELEASE_TAG}" >> "$GITHUB_OUTPUT"
- name: Setup Hugo
uses: peaceiris/actions-hugo@75d2e84710de30f6ff7268e08f310b60ef14033f # v3
with:
hugo-version: "0.145.0"
extended: true
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: "22"
- name: Install Dependencies
run: npm ci
working-directory: .hugo
- name: Build Hugo Site
run: |
hugo --minify --config hugo.cloudflare.toml
rm -f public/releases.releases
working-directory: .hugo
env:
HUGO_BASEURL: https://mcp-toolbox.dev/${{ steps.get_version.outputs.VERSION }}/
HUGO_RELATIVEURLS: false
HUGO_PARAMS_VERSION: ${{ steps.get_version.outputs.VERSION }}
- name: Build Pagefind Index (Versioned)
run: npx pagefind --site public
working-directory: .hugo
- name: Deploy
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: .hugo/public
publish_branch: cloudflare-pages
destination_dir: ./${{ steps.get_version.outputs.VERSION }}
keep_files: true
commit_message: "deploy: docs for ${{ steps.get_version.outputs.VERSION }}"
- name: Clean Build Directory
run: rm -rf .hugo/public
- name: Build Hugo Site
run: hugo --minify --config hugo.cloudflare.toml
working-directory: .hugo
env:
HUGO_BASEURL: https://mcp-toolbox.dev/
HUGO_RELATIVEURLS: false
HUGO_PARAMS_VERSION: ${{ steps.get_version.outputs.VERSION }}
- name: Build Pagefind Index (Root)
run: npx pagefind --site public
working-directory: .hugo
- name: Deploy to root
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: .hugo/public
publish_branch: cloudflare-pages
keep_files: true
allow_empty_commit: true
commit_message: "deploy: docs to root for ${{ steps.get_version.outputs.VERSION }}"

View File

@@ -1,10 +1,10 @@
# Copyright 2026 Google LLC
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
name: "CF: Deploy Dev Docs"
name: "docs"
permissions:
contents: write
@@ -23,8 +23,7 @@ on:
- main
paths:
- 'docs/**'
- '.github/workflows/docs*_cf.yaml'
- '.github/workflows/deploy*_cf.yaml'
- 'github/workflows/docs**'
- '.hugo/**'
# Allow triggering manually.
@@ -36,15 +35,13 @@ jobs:
defaults:
run:
working-directory: .hugo
# This shared concurrency group ensures only one docs deployment runs at a time.
concurrency:
group: cf-docs-update
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
with:
fetch-depth: 0
submodules: recursive
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
- name: Setup Hugo
uses: peaceiris/actions-hugo@75d2e84710de30f6ff7268e08f310b60ef14033f # v3
@@ -53,12 +50,12 @@ jobs:
extended: true
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
with:
node-version: "22"
- name: Cache dependencies
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@@ -66,25 +63,17 @@ jobs:
${{ runner.os }}-node-
- run: npm ci
- run: hugo --minify --config hugo.cloudflare.toml
- run: hugo --minify
env:
HUGO_BASEURL: https://mcp-toolbox.dev/dev/
HUGO_BASEURL: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/
HUGO_RELATIVEURLS: false
- name: Build Pagefind Search Index
run: npx pagefind --site public
- name: Create Staging Directory
run: |
mkdir staging
mv public staging/dev
mv staging/dev/releases.releases staging/releases.releases
- name: Push to Cloudflare Branch
- name: Deploy
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./.hugo/staging
publish_branch: cloudflare-pages
publish_dir: .hugo/public
# Do not delete previews on each production deploy.
# CSS or JS changes will require manual clean-up.
keep_files: true
commit_message: "deploy: ${{ github.event.head_commit.message }}"

View File

@@ -1,56 +0,0 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: Lint Documentation
permissions:
contents: read
on:
pull_request:
paths:
- 'docs/**'
- '.github/workflows/docs**'
- '.ci/lint-docs-*.sh'
jobs:
lint-source-pages:
name: Lint Documentation
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6
with:
python-version: '3.x'
- name: Check for large files (>24MB)
run: |
LARGE_FILES=$(find docs/ -type f -size +24M)
if [ -n "$LARGE_FILES" ]; then
echo "Error: Files exceed 24MB limit: $LARGE_FILES"
exit 1
fi
- name: Make scripts executable
run: chmod +x .ci/lint-docs-*.sh
- name: Run Structure Linter for Source Pages
run: bash .ci/lint-docs-source-page.sh
- name: Run Structure Linter for Tool Pages
run: bash .ci/lint-docs-tool-page.sh

View File

@@ -1,114 +0,0 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: "CF: Build Docs Preview"
on:
pull_request:
types: [opened, synchronize, reopened, labeled]
paths:
- 'docs/**'
- '.github/workflows/docs*_cf.yaml'
- '.hugo/**'
permissions: read-all
jobs:
build-preview:
if: "contains(github.event.pull_request.labels.*.name, 'docs: deploy-preview')"
runs-on: ubuntu-24.04
env:
PR_NUMBER: ${{ github.event.number }}
HEAD_SHA: ${{ github.event.pull_request.head.sha }}
concurrency:
group: "cf-preview-${{ github.event.number }}"
cancel-in-progress: true
defaults:
run:
working-directory: .hugo
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: ${{ env.HEAD_SHA }}
fetch-depth: 0
- name: Setup Hugo
uses: peaceiris/actions-hugo@75d2e84710de30f6ff7268e08f310b60ef14033f # v3
with:
hugo-version: "0.145.0"
extended: true
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: "22"
cache: 'npm'
cache-dependency-path: '.hugo/package-lock.json'
- run: npm ci --ignore-scripts
- run: hugo --minify --config hugo.cloudflare.toml
env:
HUGO_BASEURL: "/"
HUGO_ENVIRONMENT: preview
HUGO_RELATIVEURLS: false
- name: Build Pagefind Search Index
run: npx pagefind --site public
- name: Prepare Artifact Payload
run: |
mkdir -p ../artifact-payload
cp -r public ../artifact-payload/public
echo ${{ env.PR_NUMBER }} > ../artifact-payload/pr_number.txt
- name: Upload Artifact
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: cf-preview-data
path: artifact-payload/
retention-days: 1
- name: Deployment Link
run: |
DEPLOY_URL="https://github.com/${{ github.repository_owner }}/${{ github.event.repository.name }}/actions/workflows/docs_deploy_cf.yaml"
echo "### Build Complete" >> $GITHUB_STEP_SUMMARY
echo "The build for PR #${{ env.PR_NUMBER }} succeeded." >> $GITHUB_STEP_SUMMARY
echo "The Cloudflare deployment workflow is now starting." >> $GITHUB_STEP_SUMMARY
echo "---" >> $GITHUB_STEP_SUMMARY
echo "#### [Track Deployment Progress]($DEPLOY_URL)" >> $GITHUB_STEP_SUMMARY
remove-label:
needs: build-preview
if: "always() && contains(github.event.pull_request.labels.*.name, 'docs: deploy-preview')"
runs-on: ubuntu-24.04
permissions:
pull-requests: write
steps:
- name: Remove deploy-preview label
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
try {
await github.rest.issues.removeLabel({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
name: 'docs: deploy-preview'
});
console.log("Label 'docs: deploy-preview' removed successfully.");
} catch (error) {
console.log(`Error removing label: ${error}`);
}

View File

@@ -0,0 +1,59 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: "docs"
permissions:
contents: write
pull-requests: write
# This Workflow depends on 'github.event.number',
# not compatible with branch or manual triggers.
on:
pull_request:
types:
- closed
jobs:
clean:
if: ${{ !github.event.pull_request.head.repo.fork }}
runs-on: ubuntu-24.04
concurrency:
# Shared concurrency group wih preview staging.
group: "preview-${{ github.event.number }}"
cancel-in-progress: true
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
with:
ref: gh-pages
- name: Remove Preview
run: |
rm -Rf ./previews/PR-${{ github.event.number }}
git config user.name 'github-actions[bot]'
git config user.email 'github-actions[bot]@users.noreply.github.com'
git add -u previews/PR-${{ github.event.number }}
git commit --message "cleanup: previews/PR-${{ github.event.number }}"
git push
- name: Comment
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
with:
script: |
github.rest.issues.createComment({
issue_number: context.payload.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: "🧨 Preview deployments removed."
})

View File

@@ -1,73 +0,0 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: "CF: Cleanup PR Preview"
permissions:
pull-requests: write
# This Workflow depends on 'github.event.number',
# not compatible with branch or manual triggers.
on:
pull_request:
types:
- closed
jobs:
clean:
# Only run for PRs from the same repository to ensure secret access
if: "${{ github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name }}"
runs-on: ubuntu-24.04
concurrency:
# Shared concurrency group with preview staging.
group: "cf-preview-${{ github.event.number }}"
cancel-in-progress: true
steps:
- name: Delete Cloudflare Pages Deployments via API
env:
ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
PROJECT_NAME: toolbox-docs
BRANCH_NAME: pr-${{ github.event.number }}
run: |
echo "Fetching deployments for preview branch: $BRANCH_NAME"
# Fetch the most recent deployments from your Cloudflare project
RESPONSE=$(curl -s -X GET "https://api.cloudflare.com/client/v4/accounts/$ACCOUNT_ID/pages/projects/$PROJECT_NAME/deployments" \
-H "Authorization: Bearer $API_TOKEN")
# Use 'jq' to extract all deployment IDs that match this specific PR branch alias
IDS=$(echo "$RESPONSE" | jq -r --arg branch "$BRANCH_NAME" '.result[] | select(.deployment_trigger.metadata.branch? == $branch) | .id')
if [ -z "$IDS" ]; then
echo "No preview deployments found to clean up."
else
for id in $IDS; do
echo "Deleting Cloudflare deployment ID: $id"
curl -s -X DELETE "https://api.cloudflare.com/client/v4/accounts/$ACCOUNT_ID/pages/projects/$PROJECT_NAME/deployments/$id?force=true" \
-H "Authorization: Bearer $API_TOKEN"
done
echo "Successfully removed preview environment."
fi
- name: Comment
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
github.rest.issues.createComment({
issue_number: context.payload.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: "🧨 **Preview deployments removed.**\n\nCloudflare Pages environments for `pr-${{ github.event.number }}` have been deleted."
})

View File

@@ -0,0 +1,101 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: "docs"
permissions:
contents: write
pull-requests: write
# This Workflow depends on 'github.event.number',
# not compatible with branch or manual triggers.
on:
pull_request:
# Sync with github_actions_preview_fallback.yml on.pull_request.paths-ignore
paths:
- 'docs/**'
- 'github/workflows/docs**'
- '.hugo/**'
pull_request_target:
types: [labeled]
paths:
- 'docs/**'
- 'github/workflows/docs**'
- '.hugo/**'
jobs:
preview:
# run job on proper workflow event triggers (skip job for pull_request event
# from forks and only run pull_request_target for "docs: deploy-preview"
# label)
if: "${{ (github.event.action != 'labeled' && github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) || github.event.label.name == 'docs: deploy-preview' }}"
runs-on: ubuntu-24.04
defaults:
run:
working-directory: .hugo
concurrency:
# Shared concurrency group wih preview cleanup.
group: "preview-${{ github.event.number }}"
cancel-in-progress: true
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
with:
# Checkout the PR's HEAD commit (supports forks).
ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
- name: Setup Hugo
uses: peaceiris/actions-hugo@75d2e84710de30f6ff7268e08f310b60ef14033f # v3
with:
hugo-version: "0.145.0"
extended: true
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
with:
node-version: "22"
- name: Cache dependencies
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- run: npm ci
- run: hugo --minify
env:
HUGO_BASEURL: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/previews/PR-${{ github.event.number }}/
HUGO_ENVIRONMENT: preview
HUGO_RELATIVEURLS: false
- name: Deploy
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: .hugo/public
destination_dir: ./previews/PR-${{ github.event.number }}
commit_message: "stage: PR-${{ github.event.number }}: ${{ github.event.head_commit.message }}"
- name: Comment
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
with:
script: |
github.rest.issues.createComment({
issue_number: context.payload.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: "🔎 Preview at https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/previews/PR-${{ github.event.number }}/"
})

View File

@@ -1,110 +0,0 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: "CF: Deploy Docs Preview"
on:
workflow_run:
workflows: ["CF: Build Docs Preview"]
types:
- completed
workflow_dispatch:
inputs:
pr_number:
description: 'PR Number to deploy (Manual override)'
required: true
type: string
build_run_id:
description: 'The Run ID from the successful "CF: Build Docs Preview" workflow'
required: true
type: string
permissions:
contents: read
pull-requests: write
jobs:
deploy-preview:
if: >
github.event_name == 'workflow_dispatch' ||
(github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success')
runs-on: ubuntu-24.04
concurrency:
group: "cf-deploy-${{ github.event.inputs.pr_number || github.event.workflow_run.pull_requests[0].number }}"
cancel-in-progress: true
steps:
- name: Checkout base repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Download Artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
name: cf-preview-data
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.inputs.build_run_id || github.event.workflow_run.id }}
path: downloaded-artifact
- name: Read PR Number
id: get_pr
run: |
if [ -n "${{ github.event.inputs.pr_number }}" ]; then
PR_NUMBER="${{ github.event.inputs.pr_number }}"
else
PR_NUMBER=$(cat downloaded-artifact/pr_number.txt)
fi
if ! [[ "$PR_NUMBER" =~ ^[0-9]+$ ]]; then
echo "Error: PR number [$PR_NUMBER] is invalid."
exit 1
fi
echo "pr_number=$PR_NUMBER" >> "$GITHUB_OUTPUT"
- name: Deploy to Cloudflare Pages
id: cf_deploy
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: pages deploy downloaded-artifact/public --project-name toolbox-docs --branch pr-${{ steps.get_pr.outputs.pr_number }}
- name: Post Preview URL Comment
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const prNumber = parseInt('${{ steps.get_pr.outputs.pr_number }}', 10);
const deployUrl = '${{ steps.cf_deploy.outputs.pages-deployment-alias-url }}';
const marker = '<!-- cf-preview-comment-marker -->';
// Fetch all comments on the PR
const { data: comments } = await github.rest.issues.listComments({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
});
// Look for the invisible HTML marker
const existingComment = comments.find(c => c.body.includes(marker));
// Exit early if we've already posted a comment for this PR to avoid duplicates
if (existingComment) {
console.log("Preview link already posted on this PR. Skipping.");
return;
}
// Create the comment since it's the first deployment for this PR
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
body: `${marker}\n🚀 **Cloudflare Preview Ready!**\n\n🔎 View Preview: ${deployUrl}\n\n*(Note: Subsequent pushes to this PR will automatically update the preview at this same URL)*`
});

View File

@@ -1,104 +0,0 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: Link Checker
on:
pull_request:
permissions:
contents: read
jobs:
link-check:
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 0
- name: Identify Changed Files
id: changed-files
shell: bash
run: |
git fetch origin main
CHANGED_FILES=$(git diff --name-only --diff-filter=ACMRT origin/main...HEAD -- '*.md')
if [ -z "$CHANGED_FILES" ]; then
echo "No markdown files changed. Skipping checks."
echo "HAS_CHANGES=false" >> "$GITHUB_OUTPUT"
else
echo "--- Changed Files to Scan ---"
echo "$CHANGED_FILES"
echo "-----------------------------"
FILES_QUOTED=$(echo "$CHANGED_FILES" | sed 's/^/"/;s/$/"/' | tr '\n' ' ')
# Use EOF to write multiline or long strings to GITHUB_OUTPUT
echo "HAS_CHANGES=true" >> "$GITHUB_OUTPUT"
echo "CHECK_FILES<<EOF" >> "$GITHUB_OUTPUT"
echo "$FILES_QUOTED" >> "$GITHUB_OUTPUT"
echo "EOF" >> "$GITHUB_OUTPUT"
fi
- name: Restore lychee cache
if: steps.changed-files.outputs.HAS_CHANGES == 'true'
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: .lycheecache
key: cache-lychee-${{ github.sha }}
restore-keys: cache-lychee-
- name: Link Checker
id: lychee-check
if: steps.changed-files.outputs.HAS_CHANGES == 'true'
uses: lycheeverse/lychee-action@8646ba30535128ac92d33dfc9133794bfdd9b411 # v2
continue-on-error: true
with:
args: >
--quiet
--no-progress
--cache
--max-cache-age 1d
--exclude '^neo4j\+.*' --exclude '^bolt://.*'
--max-retries 10
${{ steps.changed-files.outputs.CHECK_FILES }}
output: lychee-report.md
format: markdown
fail: true
jobSummary: false
debug: false
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Prepare Report
if: steps.changed-files.outputs.HAS_CHANGES == 'true' && steps.lychee-check.outcome == 'failure'
run: |
echo "## Link Resolution Note" > full-report.md
echo "Local links and directory changes work differently on GitHub than on the docsite. You must ensure fixes pass the **GitHub check** and also work with **\`hugo server\`**." >> full-report.md
echo "See [Link Checking and Fixing with Lychee](https://github.com/googleapis/genai-toolbox/blob/main/DEVELOPER.md#link-checking-and-fixing-with-lychee) for more details." >> full-report.md
echo "" >> full-report.md
sed -E '/(Redirect|Redirects per input)/d' lychee-report.md >> full-report.md
- name: Display Failure Report
# Run this ONLY if the link checker failed
if: steps.lychee-check.outcome == 'failure'
run: |
# We can now simply output the prepared file to the job summary
cat full-report.md >> $GITHUB_STEP_SUMMARY
# Fail the job
exit 1

View File

@@ -1,74 +0,0 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: Link Checks
on:
schedule:
- cron: '0 0 * * 1'
jobs:
linkChecker:
runs-on: ubuntu-latest
permissions:
issues: write
steps:
- name: Checkout Repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 0
- name: Link Checker
id: lychee-check
uses: lycheeverse/lychee-action@8646ba30535128ac92d33dfc9133794bfdd9b411 # v2
continue-on-error: true
with:
args: >
--quiet
--no-progress
--exclude '^neo4j\+.*' --exclude '^bolt://.*'
README.md
docs/
output: lychee-report.md
format: markdown
fail: true
jobSummary: false
debug: false
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Prepare Report
run: |
echo "## Link Resolution Note" > full-report.md
echo "Local links and directory changes work differently on GitHub than on the docsite.You must ensure fixes pass the **GitHub check** and also work with **\`hugo server\`**." >> full-report.md
echo "See [Link Checking and Fixing with Lychee](https://github.com/googleapis/genai-toolbox/blob/main/DEVELOPER.md#link-checking-and-fixing-with-lychee) for more details." >> full-report.md
echo "" >> full-report.md
sed -E '/(Redirect|Redirects per input)/d' lychee-report.md >> full-report.md
- name: Create Issue From File
if: steps.lychee-check.outcome == 'failure'
uses: peter-evans/create-issue-from-file@fca9117c27cdc29c6c4db3b86c48e4115a786710 # v6
with:
title: Link Checker Report
content-filepath: full-report.md
labels: |
priority: p2
type: process
- name: Display Failure Report
# Run this ONLY if the link checker failed
if: steps.lychee-check.outcome == 'failure'
run: |
# We can now simply output the prepared file to the job summary
cat full-report.md >> $GITHUB_STEP_SUMMARY
# Fail the job
exit 1

View File

@@ -15,18 +15,15 @@
name: lint
on:
pull_request:
paths:
- "**"
- "!docs/**"
- "!**.md"
- "!.github/**"
- ".github/workflows/lint.yaml"
pull_request_target:
types: [labeled]
# Declare default permissions as read only.
permissions: read-all
jobs:
lint:
if: "${{ github.event.action != 'labeled' || github.event.label.name == 'tests: run' }}"
name: lint
runs-on: ubuntu-latest
concurrency:
@@ -34,23 +31,42 @@ jobs:
cancel-in-progress: true
permissions:
contents: 'read'
issues: 'write'
pull-requests: 'write'
steps:
- name: Setup Go
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0
- name: Remove PR Label
if: "${{ github.event.action == 'labeled' && github.event.label.name == 'tests: run' }}"
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
go-version: "1.25"
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
try {
await github.rest.issues.removeLabel({
name: 'tests: run',
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.payload.pull_request.number
});
} catch (e) {
console.log('Failed to remove label. Another job may have already removed it!');
}
- name: Setup Go
uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0
with:
go-version: "1.22"
- name: Checkout code
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: ${{ github.event.pull_request.head.sha }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
token: ${{ secrets.GITHUB_TOKEN }}
- name: >
Verify go mod tidy. If you're reading this and the check has
failed, run `goimports -w . && go mod tidy && golangci-lint run`
run: |
go mod tidy && git diff --exit-code
- name: golangci-lint
uses: golangci/golangci-lint-action@1e7e51e771db61008b38414a730f564565cf7c20 # v9.2.0
uses: golangci/golangci-lint-action@4afd733a84b1f43292c63897423277bb7f4313a9 # v8.0.0
with:
version: latest
args: --timeout 10m
args: --timeout 3m

View File

@@ -1,41 +0,0 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: lint
on:
pull_request:
paths:
- "docs/**"
- "**.md"
- ".github/**"
- "!.github/workflows/lint.yaml"
pull_request_target:
paths:
- "docs/**"
- "**.md"
- ".github/**"
- "!.github/workflows/lint.yaml"
permissions: read-all
jobs:
lint:
name: lint
runs-on: ubuntu-latest
steps:
- name: Skip Lint
run: |
echo "Skipping lint for documentation/config-only changes."
echo "This job exists to satisfy the required status check."

View File

@@ -1,73 +0,0 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: Publish to MCP Registry
on:
push:
tags: ["v*"] # Triggers on version tags like v1.0.0
# allow manual triggering with no inputs required
workflow_dispatch:
jobs:
publish:
runs-on: ubuntu-latest
permissions:
id-token: write # Required for OIDC authentication
contents: read
steps:
- name: Checkout code
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Wait for image in Artifact Registry
shell: bash
run: |
MAX_ATTEMPTS=10
VERSION=$(jq -r '.version' server.json)
REGISTRY_URL="https://us-central1-docker.pkg.dev/v2/database-toolbox/toolbox/toolbox/manifests/${VERSION}"
# initially sleep time to wait for the version release
sleep 3m
for i in $(seq 1 ${MAX_ATTEMPTS}); do
echo "Attempt $i: Checking for image ${REGISTRY_URL}..."
# Use curl to check the manifest header
# Using -I to fetch headers only, -s silent, -f fail fast on errors.
curl -Isf "${REGISTRY_URL}" > /dev/null
if [ $? -eq 0 ]; then
echo "✅ Image found! Continuing to next steps."
exit 0
else
echo "❌ Image not found (likely 404 error) on attempt $i."
if [ $i -lt ${MAX_ATTEMPTS} ]; then
echo "Sleeping for 5 minutes before next attempt..."
sleep 2m
else
echo "Maximum attempts reached. Image not found."
exit 1
fi
fi
done
- name: Install MCP Publisher
run: |
curl -L "https://github.com/modelcontextprotocol/registry/releases/latest/download/mcp-publisher_$(uname -s | tr '[:upper:]' '[:lower:]')_$(uname -m | sed 's/x86_64/amd64/;s/aarch64/arm64/').tar.gz" | tar xz mcp-publisher
- name: Login to MCP Registry
run: ./mcp-publisher login github-oidc
- name: Publish to MCP Registry
run: ./mcp-publisher publish

View File

@@ -29,7 +29,7 @@ jobs:
issues: 'write'
pull-requests: 'write'
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: micnncim/action-label-syncer@3abd5ab72fda571e69fffd97bd4e0033dd5f495c # v1.3.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -17,25 +17,17 @@ on:
push:
branches:
- "main"
paths:
- "**"
- "!docs/**"
- "!**.md"
- "!.github/**"
- ".github/workflows/tests.yaml"
pull_request:
paths:
- "**"
- "!docs/**"
- "!**.md"
- "!.github/**"
- ".github/workflows/tests.yaml"
pull_request_target:
types: [labeled]
# Declare default permissions as read only.
permissions: read-all
jobs:
integration:
# run job on proper workflow event triggers (skip job for pull_request event from forks and only run pull_request_target for "tests: run" label)
if: "${{ (github.event.action != 'labeled' && github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) || github.event.label.name == 'tests: run' }}"
name: unit tests
runs-on: ${{ matrix.os }}
strategy:
@@ -44,14 +36,37 @@ jobs:
fail-fast: false
permissions:
contents: "read"
issues: "write"
pull-requests: "write"
steps:
- name: Checkout code
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Remove PR label
if: "${{ github.event.action == 'labeled' && github.event.label.name == 'tests: run' }}"
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
try {
await github.rest.issues.removeLabel({
name: 'tests: run',
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.payload.pull_request.number
});
} catch (e) {
console.log('Failed to remove label. Another job may have already removed it!');
}
- name: Setup Go
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0
uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0
with:
go-version-file: 'go.mod'
go-version: "1.22"
- name: Checkout code
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: ${{ github.event.pull_request.head.sha }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
token: ${{ secrets.GITHUB_TOKEN }}
- name: Install dependencies
run: go get .
@@ -66,12 +81,11 @@ jobs:
run: |
source_dir="./internal/sources/*"
tool_dir="./internal/tools/*"
prompt_dir="./internal/prompts/*"
auth_dir="./internal/auth/*"
int_test_dir="./tests/*"
included_packages=$(go list ./... | grep -v -e "$source_dir" -e "$tool_dir" -e "$prompt_dir" -e "$auth_dir" -e "$int_test_dir")
included_packages=$(go list ./... | grep -v -e "$source_dir" -e "$tool_dir" -e "$auth_dir" -e "$int_test_dir")
go test -race -cover -coverprofile=coverage.out -v $included_packages
go test -race -v ./internal/sources/... ./internal/tools/... ./internal/prompts/... ./internal/auth/...
go test -race -v ./internal/sources/... ./internal/tools/... ./internal/auth/...
- name: Run tests without coverage
if: ${{ runner.os != 'Linux' }}

View File

@@ -1,53 +0,0 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: tests
on:
push:
branches:
- "main"
paths:
- "docs/**"
- "**.md"
- ".github/**"
- "!.github/workflows/tests.yaml"
pull_request:
paths:
- "docs/**"
- "**.md"
- ".github/**"
- "!.github/workflows/tests.yaml"
pull_request_target:
types: [labeled]
paths:
- "docs/**"
- "**.md"
- ".github/**"
- "!.github/workflows/tests.yaml"
permissions: read-all
jobs:
integration:
name: unit tests
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [macos-latest, windows-latest, ubuntu-latest]
steps:
- name: Skip Tests
run: |
echo "Skipping unit tests for documentation/config-only changes."
echo "This job exists to satisfy the required status check."

5
.gitignore vendored
View File

@@ -13,16 +13,11 @@ node_modules
# hugo
.hugo/public/
.hugo/resources/_gen
.hugo/static/pagefind/
.hugo_build.lock
# coverage
.coverage
# python
__pycache__/
# executable
genai-toolbox
toolbox
toolbox.exe

View File

@@ -23,6 +23,8 @@ linters:
exclusions:
presets:
- std-error-handling
issues:
fix: true
formatters:
enable:
- goimports

View File

@@ -1,22 +1 @@
/* ==========================================================================
PROJECT SCSS HUB
Custom theme overrides and UI components.
========================================================================== */
@import 'components/typography';
@import 'components/layout';
@import 'components/header';
@import 'components/sidebar';
@import 'components/callouts';
@import 'components/secondary_nav';
@import 'components/search';
@import 'td/code-dark';
// Make tabs scrollable horizontally instead of wrapping
.nav-tabs {
flex-wrap: nowrap;
white-space: nowrap;
overflow-x: auto;
overflow-y: hidden;
}
@import 'td/code-dark';

View File

@@ -1,26 +0,0 @@
/* ==========================================================================
CALLOUTS & NOTICES
Styling for alerts, tips, and admonition blocks.
========================================================================== */
.td-content .alert code,
.td-content .notice code,
.td-content .admonition code {
background-color: #ffffff !important;
color: rgba(32, 33, 36, 0.95) !important;
padding: 0.2rem 0.4rem !important;
border-radius: 4px !important;
border: 1px solid rgba(0, 0, 0, 0.05) !important;
font-weight: 500 !important;
}
html[data-bs-theme="dark"] .td-content .alert code,
body.dark .td-content .alert code,
html[data-bs-theme="dark"] .td-content .notice code,
body.dark .td-content .notice code,
html[data-bs-theme="dark"] .td-content .admonition code,
body.dark .td-content .admonition code {
background-color: rgba(255, 255, 255, 0.15) !important;
color: #ffffff !important;
border-color: rgba(255, 255, 255, 0.1) !important;
}

View File

@@ -1,213 +0,0 @@
/* ==========================================================================
HEADER & NAVIGATION
Primary navbar, secondary tabbed nav, utilities, and mobile layouts.
========================================================================== */
/* Main Header Structure */
header { position: sticky !important; top: 0; z-index: 1060; width: 100%; background-color: var(--bs-body-bg, #ffffff); transform: translateZ(0); }
header, .td-navbar { z-index: 1060 !important; }
.td-navbar {
position: relative !important; width: 100% !important;
/* Header Utility Buttons */
.navbar-nav {
gap: 0.35rem; align-items: center;
li.td-light-dark-menu > button,
li.nav-item:has(a[href*="github"]) > a,
li.nav-item:has(a[href*="Releases"], .dropdown-toggle):not(.td-light-dark-menu) > a {
background-color: transparent !important; background-image: none !important; border: 1.5px solid transparent !important;
box-shadow: none !important; outline: none !important; color: rgba(255, 255, 255, 0.85) !important;
font-weight: 500 !important; font-size: 0.95rem !important; text-decoration: none !important; white-space: nowrap;
display: flex !important; align-items: center !important; justify-content: center !important;
transition: all 0.2s cubic-bezier(0.4, 0, 0.2, 1) !important;
i, svg { font-size: 1.15rem !important; fill: rgba(255, 255, 255, 0.85) !important; transition: all 0.2s ease-in-out !important; }
&:hover {
background-color: #ffffff !important; color: $primary !important;
box-shadow: 0 4px 10px rgba(0, 0, 0, 0.1), 0 0 0 2px rgba(255, 255, 255, 0.2) !important;
transform: translateY(-1px) !important; opacity: 1;
i, svg { fill: $primary !important; opacity: 1; }
}
&:active { transform: translateY(0px) !important; background-color: rgba(255, 255, 255, 0.15) !important; }
}
li.nav-item:has(a[href*="github"]) > a,
li.nav-item:has(a[href*="Releases"], .dropdown-toggle):not(.td-light-dark-menu) > a {
border-radius: 50px !important; padding: 0.35rem 1.1rem !important;
i, svg { margin-right: 0.4rem !important; }
}
li.nav-item:has(a[href*="Releases"], .dropdown-toggle):not(.td-light-dark-menu) > a.dropdown-toggle::after {
display: inline-block !important; margin-left: 0.5rem !important; vertical-align: middle !important;
border-top: 0.4em solid; border-right: 0.4em solid transparent; border-bottom: 0; border-left: 0.4em solid transparent;
transition: transform 0.2s ease;
}
li.nav-item.show > a.dropdown-toggle::after,
a.dropdown-toggle.show::after { transform: rotate(180deg); }
li.td-light-dark-menu > button {
width: 38px !important; height: 38px !important; padding: 0 !important; border-radius: 50% !important;
&::after { display: none !important; }
i, svg { margin: 0 !important; width: 1.15rem !important; height: 1.15rem !important; }
}
}
/* Universal Dropdowns */
.dropdown-menu {
z-index: 1065 !important; margin-top: 0.6rem !important; background-color: #ffffff !important;
border: 1px solid rgba(0, 0, 0, 0.08) !important; border-radius: 12px !important;
box-shadow: 0 12px 34px rgba(0, 0, 0, 0.15) !important; padding: 0.6rem !important;
min-width: 200px !important; max-height: 60vh !important; overflow-y: auto !important;
scrollbar-width: thin; scrollbar-color: rgba(0, 0, 0, 0.2) rgba(0, 0, 0, 0.05);
animation: dropdownFadeIn 0.25s cubic-bezier(0.16, 1, 0.3, 1) forwards !important; transform-origin: top right;
&::-webkit-scrollbar { width: 8px; display: block !important; }
&::-webkit-scrollbar-track { background: rgba(0, 0, 0, 0.05) !important; border-radius: 10px; margin: 5px; }
&::-webkit-scrollbar-thumb {
background: rgba(0, 0, 0, 0.2) !important; border-radius: 10px; border: 2px solid transparent; background-clip: content-box;
&:hover { background: rgba(0, 0, 0, 0.35) !important; background-clip: content-box; }
}
}
.dropdown-item {
border-radius: 8px !important; padding: 0.6rem 1rem !important; color: #495057 !important;
margin-bottom: 4px !important; transition: all 0.2s ease !important; display: flex !important;
align-items: center !important; font-weight: 500 !important;
i, svg { margin-right: 12px !important; width: 16px !important; text-align: center; }
&:hover, &:focus {
background-color: rgba(68, 132, 244, 0.08) !important; color: #4484f4 !important; transform: translateX(4px) !important;
}
&:not(.active-version):not(.active):not(:has(i)):not(:has(svg)) {
opacity: 0.75 !important; font-weight: 400 !important; transition: opacity 0.2s ease !important;
&:hover { opacity: 1 !important; }
}
&.active-version, &.active {
background-color: rgba(68, 132, 244, 0.08) !important; color: #4484f4 !important; font-weight: 600 !important;
position: relative; padding-left: 2rem !important; border-left: 4px solid #4484f4 !important;
&:hover { background-color: rgba(68, 132, 244, 0.12) !important; transform: translateX(4px) !important; }
}
}
}
/* Tabbed Secondary Navbar */
#secondary-nav {
position: fixed; top: 4rem; left: 0; right: 0; height: 52px; z-index: 1055 !important;
background: linear-gradient(rgba(0, 0, 0, 0.08), rgba(0, 0, 0, 0.08)), $primary;
box-shadow: inset 0 4px 12px rgba(0, 0, 0, 0.15); border-bottom: 1px solid rgba(0, 0, 0, 0.1);
display: flex; align-items: center; position: relative !important; top: 0 !important;
width: 100%; max-width: 100vw; box-sizing: border-box;
.container-fluid {
display: flex; align-items: center; width: 100%; height: 100%; overflow-x: auto !important; flex-wrap: nowrap !important;
scrollbar-width: none; -ms-overflow-style: none; -webkit-overflow-scrolling: touch;
&::-webkit-scrollbar { display: none; }
}
.sec-nav-list {
display: flex; margin: 0; padding: 0 1.5rem; list-style: none; gap: 28px;
align-items: center; height: 100%; width: auto; flex-shrink: 0 !important; flex-wrap: nowrap; overflow: visible !important;
}
.sec-nav-icons { display: flex; gap: 1.5rem; list-style: none; margin: 0 0 0 auto; padding: 0 1.5rem 0 2rem; align-items: center; flex-shrink: 0 !important; }
li { height: 100%; display: flex; align-items: center; }
a {
color: rgba(255, 255, 255, 0.7) !important; font-weight: 500; text-decoration: none;
font-size: 14.5px; letter-spacing: 0.2px; height: 100%; display: flex; align-items: center;
padding: 0 2px; border-bottom: 2px solid transparent; margin-bottom: 0; transition: color 0.15s ease, border-color 0.15s ease; white-space: nowrap;
&:hover { color: #ffffff !important; }
&.active { color: #ffffff !important; font-weight: 600; border-bottom: 2px solid #ffffff; }
}
}
/* Dark Mode Overrides */
html[data-bs-theme="dark"], body.dark {
.td-navbar .navbar-nav {
li.td-light-dark-menu > button,
li.nav-item:has(a[href*="github"]) > a,
li.nav-item:has(a[href*="Releases"], .dropdown-toggle):not(.td-light-dark-menu) > a {
background-color: transparent !important; box-shadow: none !important; color: #e8eaed !important;
i, svg { fill: #e8eaed !important; }
&:hover {
background-color: #303134 !important; color: #f8f9fa !important; border-color: #8ab4f8 !important;
box-shadow: 0 4px 10px rgba(0, 0, 0, 0.3), 0 0 0 2px rgba(138, 180, 248, 0.15) !important;
i, svg { fill: #f8f9fa !important; }
}
}
}
.td-navbar .dropdown-menu {
background-color: #202124 !important; border-color: rgba(255, 255, 255, 0.12) !important;
box-shadow: 0 12px 40px rgba(0, 0, 0, 0.6) !important; scrollbar-color: rgba(255, 255, 255, 0.2) rgba(255, 255, 255, 0.05);
}
.td-navbar .dropdown-item {
color: #e8eaed !important;
&:hover, &:focus { background-color: rgba(138, 180, 248, 0.12) !important; color: #8ab4f8 !important; }
&.active-version, &.active {
background-color: rgba(138, 180, 248, 0.12) !important; color: #8ab4f8 !important; border-left-color: #8ab4f8 !important;
&::before { color: #8ab4f8; }
&:hover { background-color: rgba(138, 180, 248, 0.18) !important; }
}
}
#secondary-nav {
background: linear-gradient(rgba(0, 0, 0, 0.15), rgba(0, 0, 0, 0.15)), $primary; box-shadow: inset 0 4px 15px rgba(0, 0, 0, 0.3); border-bottom: 1px solid rgba(255, 255, 255, 0.05);
a { color: rgba(255, 255, 255, 0.7) !important; &:hover { color: #ffffff !important; } &.active { color: #ffffff !important; border-bottom-color: #ffffff; } }
}
}
/* Desktop Adjustments */
@media (min-width: 992px) {
.td-navbar .custom-pagefind-wrapper {
position: relative !important; left: 50% !important; top: 50% !important; transform: translate(-50%, -50%) !important;
width: 100% !important; max-width: 500px !important; margin: 0 !important; z-index: 1060 !important;
transition: opacity 0.15s ease-out, transform 0.15s ease-out, box-shadow 0.15s ease-out !important;
}
.td-navbar .pagefind-ui__drawer { left: 0 !important; right: 0 !important; width: 100% !important; max-width: 100% !important; }
}
/* Mobile Layout & Scaling */
@media (max-width: 991.98px) {
header, .td-navbar { position: relative !important; }
.td-navbar { height: auto !important; padding-bottom: 0.5rem !important; flex-wrap: nowrap !important; justify-content: space-between !important; min-height: 4rem !important; }
header .navbar-toggler { display: inline-flex !important; z-index: 1080 !important; margin-left: auto !important; }
.td-navbar .container-fluid { display: flex !important; flex-wrap: wrap !important; justify-content: space-between !important; align-items: center !important; }
.td-navbar .navbar-nav, .td-navbar .navbar-toggler { flex-direction: row !important; order: 2 !important; width: auto !important; margin-left: auto !important; }
.td-navbar .custom-pagefind-wrapper { display: none !important; }
.td-navbar .dropdown-menu {
background-color: #ffffff !important; box-shadow: 0 10px 30px rgba(0, 0, 0, 0.15) !important; border: 1px solid rgba(0, 0, 0, 0.08) !important;
min-width: 180px !important; padding: 0.5rem !important; position: absolute !important; z-index: 1065 !important;
}
.td-navbar .navbar-nav .dropdown-menu { right: 0 !important; left: auto !important; margin-top: 0.5rem !important; }
.td-navbar .dropdown-item { padding: 0.6rem 1rem !important; font-size: 0.95rem !important; }
#secondary-nav { position: relative !important; top: 0 !important; margin-top: 0 !important; height: auto !important; min-height: 52px; padding: 10px 0 !important; z-index: 1050 !important; clear: both !important; }
#secondary-nav .container-fluid { padding: 0 15px !important; }
#secondary-nav .sec-nav-list { padding: 0 !important; gap: 12px !important; }
#secondary-nav .sec-nav-icons { margin-left: 20px !important; padding: 0 !important; }
#secondary-nav li { height: auto !important; }
#secondary-nav a { height: 34px !important; padding: 0 12px !important; background: rgba(0, 0, 0, 0.1); border-radius: 20px; border-bottom: none !important; }
#secondary-nav a.active { background: #ffffff !important; color: $primary !important; }
}
@media (max-width: 767.98px) {
.td-navbar .navbar-nav { gap: 0.25rem !important; }
.td-navbar .navbar-nav li.nav-item:has(a[href*="github"], a[href*="Releases"]) span { display: none !important; }
.td-navbar .navbar-nav li.nav-item:has(a[href*="github"]) > a,
.td-navbar .navbar-nav li.nav-item:has(a[href*="Releases"], .dropdown-toggle):not(.td-light-dark-menu) > a { padding: 0.25rem 0.6rem !important; font-size: 0.85rem !important; }
.td-navbar .navbar-nav li.nav-item:has(a[href*="github"]) > a i,
.td-navbar .navbar-nav li.nav-item:has(a[href*="github"]) > a svg,
.td-navbar .navbar-nav li.nav-item:has(a[href*="Releases"], .dropdown-toggle):not(.td-light-dark-menu) > a i,
.td-navbar .navbar-nav li.nav-item:has(a[href*="Releases"], .dropdown-toggle):not(.td-light-dark-menu) > a svg { font-size: 1rem !important; margin-right: 0 !important; }
.td-navbar .navbar-nav a.dropdown-toggle::after { display: none !important; }
.td-navbar .navbar-nav li.td-light-dark-menu > button { width: 32px !important; height: 32px !important; }
.td-navbar .navbar-nav li.td-light-dark-menu > button i,
.td-navbar .navbar-nav li.td-light-dark-menu > button svg { width: 1rem !important; height: 1rem !important; }
#secondary-nav a { font-size: 13px !important; padding: 0 10px !important; height: 30px !important; }
#secondary-nav .sec-nav-list { gap: 8px !important; }
}

View File

@@ -1,27 +0,0 @@
/* ==========================================================================
LAYOUT ALIGNMENT & STACKING CONTEXTS
Manages global spacing and z-index hierarchy.
========================================================================== */
/*Global Reset */
body {
padding-top: 0 !important;
}
/* Zero-Spacing Elements */
.td-main, .td-outer, .td-sidebar {
margin-top: 0 !important;
padding-top: 0 !important;
}
/* =Elements Requiring Top Padding */
main[role="main"], #td-section-nav, .td-sidebar-toc {
margin-top: 0 !important;
padding-top: 1.5rem !important;
}
/* Z-Index Hierarchy */
.td-main, .td-content, .td-sidebar-toc, .td-page-meta, .td-toc {
position: relative;
z-index: 10 !important;
}

View File

@@ -1,245 +0,0 @@
/* ==========================================================================
PAGEFIND SEARCH STYLES & MODAL
========================================================================== */
:root {
--pagefind-ui-scale: 0.9;
--pagefind-ui-primary: #0d6efd;
}
@keyframes simpleSlideDown {
from { opacity: 0; transform: translateY(-10px); }
to { opacity: 1; transform: translateY(0); }
}
/* Base Modal Backdrop & Body State */
#global-search-backdrop {
position: fixed; top: 0; left: 0; width: 100vw; height: 100vh;
background-color: rgba(0, 0, 0, 0.7);
backdrop-filter: blur(3px); -webkit-backdrop-filter: blur(3px);
z-index: 2147483646 !important; /* Max value minus 1 */
opacity: 0; pointer-events: none; transition: none;
transition: opacity 0.1s ease-in-out;
&.active { opacity: 1; pointer-events: auto; }
}
body.global-search-active {
/* Explicitly target Docsy's layout structure to avoid blurring injected modals */
header, main, footer, .td-main, .td-outer, .td-sidebar, .td-sidebar-toc {
filter: blur(6px) !important;
opacity: 0.4 !important;
pointer-events: none !important;
transition: none !important;
}
}
/* Main Component Scoping */
body {
.custom-pagefind-wrapper {
position: relative;
z-index: 9999 !important;
/* Modal Pop-out State */
&.active-modal {
position: fixed !important;
top: 12vh !important;
left: 50% !important;
transform: translateX(-50%) !important;
width: 90vw !important;
max-width: 650px !important;
z-index: 2147483647 !important; /* Absolute max value possible */
margin: 0 !important;
padding: 0 !important;
.pagefind-ui__drawer {
position: absolute !important;
top: 100% !important;
left: 0 !important;
right: 0 !important;
width: 100% !important;
max-width: 100% !important;
max-height: 70vh !important;
z-index: 999999 !important;
animation: simpleSlideDown 0.2s ease-out forwards !important;
overflow-y: auto !important;
overscroll-behavior: contain !important;
scrollbar-width: thin;
scrollbar-color: rgba(0, 0, 0, 0.2) transparent;
box-shadow: none !important;
&::-webkit-scrollbar { width: 6px; }
&::-webkit-scrollbar-track { background: transparent; }
&::-webkit-scrollbar-thumb { background: rgba(0, 0, 0, 0.2); border-radius: 10px; }
}
}
}
/* Pagefind Internal Element Overrides */
.pagefind-ui__form {
--pagefind-ui-text: #ffffff !important;
position: relative !important;
box-sizing: border-box !important;
&:focus-within { --pagefind-ui-text: #4484f4 !important; }
&::before {
top: 50% !important; transform: translateY(-50%) !important; margin-top: 0 !important;
width: 18px !important; height: 18px !important; left: 14px !important;
}
}
.pagefind-ui__search-input {
background-color: rgba(0, 0, 0, 0.1) !important;
border: 1.5px solid transparent !important;
box-shadow: inset 0 2px 6px rgba(0, 0, 0, 0.15) !important;
color: #ffffff !important; border-radius: 50px !important; font-weight: 400 !important;
transition: all 0.2s ease-in-out !important; height: 38px !important; font-size: 0.95rem !important;
padding-left: 40px !important; padding-right: 36px !important; box-sizing: border-box !important;
&::placeholder { color: #ffffff !important; opacity: 0.9 !important; }
&:focus {
background-color: #ffffff !important; border-color: #4484f4 !important; color: #212529 !important; outline: 0 !important;
box-shadow: 0 4px 10px rgba(0, 0, 0, 0.1), 0 0 0 4px rgba(68, 132, 244, 0.15) !important;
&::placeholder { color: #6c757d !important; opacity: 1 !important; }
}
}
.pagefind-ui__search-clear {
position: absolute !important; top: 50% !important; transform: translateY(-50%) !important; right: 12px !important;
width: 24px !important; height: 24px !important; min-height: 24px !important; padding: 0 !important; margin: 0 !important;
background: transparent !important; border: none !important; outline: none !important; box-shadow: none !important;
font-size: 0 !important; color: transparent !important; display: flex !important; align-items: center !important;
justify-content: center !important; overflow: hidden !important; cursor: pointer !important; z-index: 99 !important;
&::after {
content: "\00d7" !important; font-size: 22px !important; font-weight: 300 !important;
line-height: 1 !important; color: var(--pagefind-ui-text) !important; display: block !important;
}
}
.pagefind-ui__drawer {
position: absolute !important; top: 100% !important; width: 500px !important;
max-width: calc(100vw - 2rem) !important; margin-top: 12px !important; z-index: 99999 !important;
background-color: #ffffff !important; border: 1px solid #dee2e6 !important; border-radius: 12px !important;
padding: 1.5rem !important;
max-height: 80vh !important; overflow-y: auto !important; overscroll-behavior: contain !important;
box-sizing: border-box !important;
overflow-y: auto !important;
overflow-x: hidden !important;
overflow-wrap: break-word !important;
word-wrap: break-word !important;
word-break: break-word !important;
white-space: normal !important;
mark {
background-color: rgba(68, 132, 244, 0.15) !important; color: #1a73e8 !important; padding: 0 3px !important;
border-radius: 3px !important; font-weight: 600 !important; box-shadow: none !important;
}
}
.pagefind-ui__result-title, .pagefind-ui__result-excerpt, .pagefind-ui__message {
color: #212529 !important;
}
.pagefind-ui__button { margin-top: 1rem !important; }
.pagefind-ui__hidden { display: none !important; }
}
/* =======================================================
RESPONSIVE LAYOUTS
======================================================= */
body {
// DESKTOP SEARCH BAR
@media (min-width: 992px) {
.td-navbar .custom-pagefind-wrapper:not(.active-modal) {
position: absolute !important;
left: 50% !important;
top: 50% !important;
transform: translate(-50%, -50%) !important;
width: 100% !important;
max-width: 500px !important;
margin: 0 !important;
z-index: 1060 !important;
}
.td-navbar .pagefind-ui__drawer { left: 0 !important; right: 0 !important; width: 100% !important; max-width: 100% !important; }
.td-sidebar .td-search, .td-sidebar .custom-pagefind-wrapper { display: none !important; }
}
// MOBILE NATIVE STACK
@media (max-width: 991.98px) {
.td-navbar .custom-pagefind-wrapper, .td-navbar .container-fluid > div.d-none.d-lg-block { display: none !important; }
.td-sidebar .td-search, .td-sidebar .custom-pagefind-wrapper:not(.active-modal) {
display: block !important; width: 100% !important; max-width: 100% !important; margin-top: 1rem !important; margin-bottom: 1rem !important;
}
}
// LEFT SIDEBAR TOGGLE ALIGNMENT & MOBILE MODAL RESIZING
@media (max-width: 767.98px) {
.td-sidebar__search {
display: flex !important; flex-direction: row !important; align-items: center !important; justify-content: space-between !important;
width: 100% !important; margin-top: 60px !important; margin-bottom: 1rem !important; padding: 0 15px !important; z-index: 10 !important;
.custom-pagefind-wrapper:not(.active-modal) { width: 75% !important; flex: 0 0 75% !important; margin: 0 !important; }
}
.custom-pagefind-wrapper.active-modal {
top: 15px !important;
width: calc(100vw - 30px) !important;
.pagefind-ui__drawer {
max-height: calc(100dvh - 100px) !important;
}
}
.td-sidebar__toggle {
display: flex !important; align-items: center !important; justify-content: flex-end !important; width: 20% !important;
height: auto !important; background-color: transparent !important; border: none !important; box-shadow: none !important;
color: inherit !important; font-size: 1rem !important; padding: 0 !important;
&::after { content: none !important; }
}
}
}
/* =======================================================
DARK MODE STYLING
======================================================= */
html[data-bs-theme="dark"], body.td-dark {
#global-search-backdrop { background-color: rgba(0, 0, 0, 0.8); }
.pagefind-ui__form { --pagefind-ui-text: #ffffff !important; &:focus-within { --pagefind-ui-text: #8ab4f8 !important; } }
.pagefind-ui__search-input {
background-color: rgba(0, 0, 0, 0.2) !important; border-color: transparent !important;
box-shadow: inset 0 2px 6px rgba(0, 0, 0, 0.3) !important; color: #ffffff !important;
&::placeholder { color: #ffffff !important; opacity: 0.9 !important; }
&:focus {
background-color: #303134 !important; border-color: #8ab4f8 !important; color: #f8f9fa !important;
box-shadow: 0 4px 10px rgba(0, 0, 0, 0.3), 0 0 0 4px rgba(138, 180, 248, 0.15) !important;
&::placeholder { color: #e8eaed !important; opacity: 1 !important; }
}
}
.pagefind-ui__drawer {
background-color: #202124 !important; border-color: #3c4043 !important;
box-shadow: none !important;
mark { background-color: rgba(138, 180, 248, 0.2) !important; color: #8ab4f8 !important; }
}
.pagefind-ui__result-title, .pagefind-ui__result-excerpt, .pagefind-ui__message { color: #e8eaed !important; }
.pagefind-ui__button { color: #e8eaed !important; border-color: #5f6368 !important; background-color: #303134 !important; }
// Mobile sidebar toggle dark mode
@media (max-width: 767.98px) {
.td-sidebar__toggle { color: #e8eaed !important; background-color: transparent !important; border: none !important; &:hover { background-color: transparent !important; opacity: 0.8 !important; } }
}
}

View File

@@ -1,43 +0,0 @@
/* ==========================================================================
Secondary Navigation Header Styles
========================================================================== */
/* Base Layout */
.sec-nav-container { display: flex; align-items: center; height: 100%; width: 100%; overflow-x: auto; scrollbar-width: none; -ms-overflow-style: none; }
.sec-nav-container::-webkit-scrollbar { display: none; }
.sec-nav-list { display: flex; align-items: center; list-style: none; padding: 0; margin: 0; gap: 1.25rem; flex-shrink: 0; }
.sec-nav-icons { display: flex; gap: 1.5rem; list-style: none; margin: 0 0 0 auto; padding: 0 1rem 0 2rem; align-items: center; flex-shrink: 0; }
/* Typography & Links */
.sec-nav-item-left { display: flex; align-items: center; gap: 0.4rem; font-size: 0.95rem; font-weight: 500; color: rgba(255, 255, 255, 0.75) !important; text-decoration: none; transition: all 0.2s; white-space: nowrap; height: 100%; border-bottom: 2px solid transparent; padding-bottom: 2px; }
.sec-nav-item-right { display: flex; align-items: center; gap: 0.5rem; font-size: 0.95rem; font-weight: 700; color: rgba(255, 255, 255, 0.85) !important; text-decoration: none; transition: all 0.2s; white-space: nowrap; }
/* Active States */
.sec-nav-list a:hover, .sec-nav-list a.active { color: #ffffff !important; opacity: 1; }
.sec-nav-list a.active { font-weight: 600; border-bottom: 2px solid #ffffff !important; }
.sec-nav-icons a:hover { color: #ffffff !important; opacity: 1; }
/* Icons */
.pop-icon { display: inline-flex; align-items: center; justify-content: center; width: 28px; height: 28px; border-radius: 8px; font-size: 1rem; transition: opacity 0.2s ease; }
.pop-icon.fa-discord { background: #5865F2 !important; color: #ffffff !important; box-shadow: 0 4px 10px rgba(88, 101, 242, 0.35) !important; }
.pop-icon.fa-medium { background: #000000 !important; color: #ffffff !important; box-shadow: 0 4px 10px rgba(0, 0, 0, 0.25) !important; }
.sec-nav-icons a:hover .pop-icon { opacity: 0.85 !important; }
/* Dark Mode Overrides */
html[data-bs-theme="dark"] .sec-nav-item-left { color: rgba(255, 255, 255, 0.75) !important; }
html[data-bs-theme="dark"] .sec-nav-item-right { color: rgba(255, 255, 255, 0.85) !important; }
html[data-bs-theme="dark"] .sec-nav-list a.active { color: #ffffff !important; background-color: transparent !important; border-bottom: 2px solid #ffffff !important; font-weight: 600 !important; }
html[data-bs-theme="dark"] .pop-icon.fa-discord { background: #5865F2 !important; color: #ffffff !important; box-shadow: 0 4px 10px rgba(88, 101, 242, 0.35) !important; }
html[data-bs-theme="dark"] .pop-icon.fa-medium { background: #f8f9fa !important; color: #202124 !important; box-shadow: 0 4px 10px rgba(255, 255, 255, 0.15) !important; }
html[data-bs-theme="dark"] .dropdown-item.active-version { color: #8ab4f8 !important; background-color: rgba(138, 180, 248, 0.15) !important; border-left-color: #8ab4f8 !important; }
/* Responsive Layout */
@media (max-width: 991.98px) {
.sec-nav-icons { margin-left: 0; padding-left: 1rem; }
.sec-nav-list { gap: 1rem; }
.sec-nav-item-left, .sec-nav-item-right { font-size: 0.9rem; }
html[data-bs-theme="dark"] #secondary-nav .sec-nav-list a.active,
body.dark #secondary-nav .sec-nav-list a.active {
color: #4484f4 !important; background-color: #ffffff !important; border-bottom: none !important;
}
}

View File

@@ -1,164 +0,0 @@
/* ==========================================================================
SIDEBARS (LEFT NAVIGATION & RIGHT TOC)
Handles positioning, tree navigation, and page metadata.
========================================================================== */
/* =================================================================
LAYOUT & SCROLLING
================================================================= */
@media (min-width: 768px) {
.td-sidebar, .td-sidebar-toc {
position: sticky !important;
top: calc(4rem + 52px) !important;
height: calc(100vh - 4rem - 52px) !important;
overflow-y: auto !important;
padding-bottom: 2rem !important;
scrollbar-width: none; -ms-overflow-style: none;
&::-webkit-scrollbar { display: none; }
}
.td-sidebar-toc .td-toc, .td-sidebar-toc .td-page-meta {
position: static !important; top: auto !important;
height: auto !important; overflow-y: visible !important;
}
}
/* =================================================================
LEFT SIDEBAR NAVIGATION
================================================================= */
.td-sidebar__search {
display: none !important; padding: 0 !important; margin: 0 !important;
height: 0 !important; border: none !important;
}
.td-sidebar-nav, #td-section-nav { margin-top: 1.5rem !important; padding-top: 0 !important; }
#td-section-nav {
.ul-1 > li:not(.active-path), > .ul-0 > li > a.tree-root { display: none !important; }
> .ul-0 > li > ul.ul-1 { margin-top: 0 !important; padding-top: 0 !important; padding-left: 0 !important; margin-left: -1rem !important; }
.ul-1 > li.active-path {
> a, > label, > input { display: none !important; }
> ul { padding-left: 0 !important; margin-top: 0 !important; margin-left: -1rem !important; }
> ul > li > a, > ul > li > label > a { padding-left: 0 !important; }
}
}
.td-sidebar .form-control {
font-family: var(--bs-font-sans-serif) !important; border: 1px solid transparent;
background-color: #f1f3f4; border-radius: 8px; padding: 0.5rem 1rem;
transition: all 0.2s ease; font-size: 0.9rem;
&:focus { background-color: #ffffff; border-color: #4484f4; box-shadow: 0 0 0 3px rgba(68, 132, 244, 0.2); }
}
.td-sidebar-nav {
li > label, li > a {
font-family: var(--bs-font-sans-serif) !important; font-size: 0.95rem !important; font-weight: 400 !important;
display: flex !important; align-items: center; justify-content: space-between; width: 100%;
padding: 0.45rem 0.75rem !important; margin-bottom: 2px; border-radius: 6px; color: #5f6368 !important;
transition: all 0.15s ease-in-out; cursor: pointer;
&:hover { background-color: #f1f3f4; color: #202124 !important; text-decoration: none; }
&.active { color: #4484f4 !important; background-color: transparent !important; font-weight: 600 !important; }
}
li.active-path > label, li.active-path > a:not(.active) {
color: #4484f4 !important; background-color: transparent !important; font-weight: 600 !important;
}
li > label > a { padding: 0 !important; margin: 0 !important; color: inherit !important; flex-grow: 1; }
.with-child > label::after { margin-left: 0.5rem; }
}
/* Left Sidebar Dark Mode */
html[data-bs-theme="dark"], body.dark {
.td-sidebar-nav li > label, .td-sidebar-nav li > a {
color: #9aa0a6 !important;
&:hover { background-color: rgba(255, 255, 255, 0.05); color: #e8eaed !important; }
&.active { color: #8ab4f8 !important; }
}
.td-sidebar-nav li.active-path > label, .td-sidebar-nav li.active-path > a:not(.active) { color: #8ab4f8 !important; }
.td-sidebar .form-control {
background-color: #303134; color: #e8eaed;
&:focus { background-color: #202124; border-color: #8ab4f8; box-shadow: 0 0 0 3px rgba(138, 180, 248, 0.2); }
}
}
/* =================================================================
INTEGRATIONS SIDEBAR LOCKS (Tools & Samples Roots Only)
================================================================= */
/* LOCK ALL sections inside the integrations directory */
a.td-sidebar-link__section[href*="/integrations/"] {
pointer-events: none !important;
cursor: default !important;
}
/* UNLOCK the top-level "/integrations/" root folder itself so it can be toggled */
a.td-sidebar-link__section[href$="/integrations/"] {
pointer-events: auto !important;
cursor: pointer !important;
}
/* UNLOCK any nested sub-folders inside tools or samples (if they exist) */
a.td-sidebar-link__section[href*="/integrations/"][href*="/tools/"],
a.td-sidebar-link__section[href*="/integrations/"][href*="/samples/"] {
pointer-events: auto !important;
cursor: pointer !important;
}
/* RE-LOCK exactly the "tools/" and "samples/" parent folders */
a.td-sidebar-link__section[href*="/integrations/"][href$="/tools/"],
a.td-sidebar-link__section[href*="/integrations/"][href$="/samples/"] {
pointer-events: none !important;
cursor: default !important;
}
/* =================================================================
RIGHT SIDEBAR (TABLE OF CONTENTS)
================================================================= */
.td-sidebar-toc, .td-page-meta {
&, a, li, span, #TableOfContents {
font-family: var(--bs-font-sans-serif) !important; font-weight: 400 !important; letter-spacing: -0.01em;
}
a {
font-size: 0.95rem !important; color: #5f6368 !important; text-decoration: none !important;
display: flex !important; align-items: center; padding: 0.45rem 0.75rem !important;
margin-bottom: 2px; border-radius: 6px; transition: all 0.15s ease-in-out;
i, svg { color: inherit !important; fill: currentColor !important; margin-right: 8px; }
&:hover { background-color: #f1f3f4; color: #202124 !important; }
&.active { color: #4484f4 !important; background-color: transparent !important; font-weight: 600 !important; }
}
}
.td-sidebar-toc #TableOfContents {
li a.active { font-weight: 600 !important; }
ul { padding-left: 0; list-style: none; margin: 0; }
ul ul { padding-left: 1rem; margin-left: 0.75rem; border-left: 1px solid rgba(0,0,0,0.05); }
}
.td-page-meta {
margin-bottom: 1.5rem !important; padding-bottom: 1rem; border-bottom: 1px solid rgba(0,0,0,0.05);
}
/* Right Sidebar Dark Mode */
html[data-bs-theme="dark"], body.dark {
.td-sidebar-toc a, .td-page-meta a {
color: #9aa0a6 !important;
&:hover { background-color: rgba(255, 255, 255, 0.05); color: #e8eaed !important; }
&.active { color: #8ab4f8 !important; }
}
.td-page-meta { border-bottom-color: rgba(255,255,255,0.1); }
.td-sidebar-toc #TableOfContents ul ul { border-left-color: rgba(255,255,255,0.1); }
}
/* =================================================================
HIDe ELEMENTS (Tags & Meta)
================================================================= */
.td-toc, .td-page-meta {
.taxonomy, .td-tags, [class*="taxonomy"], h5.taxonomy-tree-header, ul.taxonomy-terms {
display: none !important;
}
}

View File

@@ -1,37 +0,0 @@
/* ==========================================================================
GLOBAL TYPOGRAPHY & SCALING
Defines core fonts, root variables, and base element typography.
========================================================================== */
@import url('https://fonts.googleapis.com/css2?family=JetBrains+Mono:wght@400;500&family=Readex+Pro:wght@400;500;600;700&display=swap');
html {
font-size: 16px !important;
scroll-behavior: smooth;
}
:root {
--bs-font-sans-serif: 'Readex Pro', -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif !important;
--bs-body-font-family: var(--bs-font-sans-serif) !important;
--bs-font-monospace: 'JetBrains Mono', SFMono-Regular, Menlo, Monaco, Consolas, monospace !important;
--header-offset: 50px;
}
body {
font-family: var(--bs-font-sans-serif) !important;
line-height: 1.6;
-webkit-font-smoothing: antialiased;
}
.td-main, .td-sidebar, .navbar, .td-content {
font-family: var(--bs-font-sans-serif) !important;
}
code, pre, kbd, samp {
font-family: var(--bs-font-monospace) !important;
font-size: 0.9em;
}
h1[id], h2[id], h3[id], h4[id], h5[id], h6[id] {
scroll-margin-top: calc(var(--header-offset) + 10px) !important;
}

View File

@@ -1,176 +0,0 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
title = 'MCP Toolbox for Databases'
relativeURLs = false
languageCode = 'en-us'
defaultContentLanguage = "en"
defaultContentLanguageInSubdir = false
enableGitInfo = true
enableRobotsTXT = true
ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quickstart/go"]
[languages]
[languages.en]
languageName ="English"
weight = 1
[module]
proxy = "direct"
[module.hugoVersion]
extended = true
min = "0.146.0"
[[module.mounts]]
source = "../docs/en"
target = 'content'
[[module.imports]]
path = "github.com/google/docsy"
disable = false
[[module.imports]]
path = "github.com/martignoni/hugo-notice"
[params]
description = "MCP Toolbox for Databases is an open source MCP server for databases. It enables you to develop tools easier, faster, and more securely by handling the complexities such as connection pooling, authentication, and more."
copyright = "Google LLC"
github_repo = "https://github.com/googleapis/genai-toolbox"
github_project_repo = "https://github.com/googleapis/genai-toolbox"
github_subdir = "docs"
offlineSearch = false
version_menu = "Releases"
disableMigrationBanner = true
releases_url = "/releases.releases"
global_logo_url = "/"
pagefind = true
version = "dev"
[params.ui]
ul_show = 100
showLightDarkModeMenu = true
breadcrumb_disable = false
sidebar_menu_foldable = true
sidebar_menu_compact = false
[params.ui.feedback]
enable = true
yes = 'Glad to hear it! Please <a href="https://github.com/googleapis/genai-toolbox/issues/new">tell us how we can improve</a>.'
no = 'Sorry to hear that. Please <a href="https://github.com/googleapis/genai-toolbox/issues/new">tell us how we can improve</a>.'
[params.ui.readingtime]
enable = true
[[params.versions]]
version = "dev"
url = "https://mcp-toolbox.dev/dev/"
# Add a new version block here before every release
# The order of versions in this file is mirrored into the dropdown
[[params.versions]]
version = "v0.31.0"
url = "https://mcp-toolbox.dev/v0.31.0/"
[[params.versions]]
version = "v0.30.0"
url = "https://mcp-toolbox.dev/v0.30.0/"
[[params.versions]]
version = "v0.29.0"
url = "https://mcp-toolbox.dev/v0.29.0/"
[[params.versions]]
version = "v0.28.0"
url = "https://mcp-toolbox.dev/v0.28.0/"
[[params.versions]]
version = "v0.27.0"
url = "https://mcp-toolbox.dev/v0.27.0/"
[[params.versions]]
version = "v0.26.0"
url = "https://mcp-toolbox.dev/v0.26.0/"
[[params.versions]]
version = "v0.25.0"
url = "https://mcp-toolbox.dev/v0.25.0/"
[[params.versions]]
version = "v0.24.0"
url = "https://mcp-toolbox.dev/v0.24.0/"
[[params.versions]]
version = "v0.23.0"
url = "https://mcp-toolbox.dev/v0.23.0/"
[[params.versions]]
version = "v0.22.0"
url = "https://mcp-toolbox.dev/v0.22.0/"
[[params.versions]]
version = "v0.21.0"
url = "https://mcp-toolbox.dev/v0.21.0/"
[[params.versions]]
version = "v0.20.0"
url = "https://mcp-toolbox.dev/v0.20.0/"
[[params.versions]]
version = "v0.19.1"
url = "https://mcp-toolbox.dev/v0.19.1/"
[[params.versions]]
version = "v0.18.0"
url = "https://mcp-toolbox.dev/v0.18.0/"
[[params.versions]]
version = "v0.17.0"
url = "https://mcp-toolbox.dev/v0.17.0/"
[[menu.main]]
name = "GitHub"
weight = 50
url = "https://github.com/googleapis/genai-toolbox"
pre = "<i class='fa-brands fa-github'></i>"
[markup.goldmark.renderer]
unsafe= true
[markup.highlight]
noClasses = false
style = "tango"
[outputFormats]
[outputFormats.LLMS]
mediaType = "text/plain"
baseName = "llms"
isPlainText = true
root = true
[outputFormats.LLMS-FULL]
mediaType = "text/plain"
baseName = "llms-full"
isPlainText = true
root = true
[outputFormats.releases]
baseName = 'releases'
isPlainText = true
mediaType = 'text/releases'
[mediaTypes."text/releases"]
suffixes = ["releases"]
[outputs]
home = ["HTML", "RSS", "LLMS", "LLMS-FULL", "releases"]
[services]
[services.googleAnalytics]
id = "G-GLSV9KD8BF"

View File

@@ -1,5 +1,5 @@
title = 'MCP Toolbox for Databases'
relativeURLs = false
relativeURLs = true
languageCode = 'en-us'
defaultContentLanguage = "en"
@@ -35,128 +35,13 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
github_repo = "https://github.com/googleapis/genai-toolbox"
github_project_repo = "https://github.com/googleapis/genai-toolbox"
github_subdir = "docs"
offlineSearch = false
version_menu = "Releases"
releases_url = "/genai-toolbox/releases.releases"
global_logo_url = "/genai-toolbox/"
version = "dev"
pagefind = true
offlineSearch = true
[params.ui]
ul_show = 100
showLightDarkModeMenu = true
breadcrumb_disable = false
breadcrumb_disable = true
sidebar_menu_foldable = true
sidebar_menu_compact = false
[params.ui.feedback]
enable = true
yes = 'Glad to hear it! Please <a href="https://github.com/googleapis/genai-toolbox/issues/new">tell us how we can improve</a>.'
no = 'Sorry to hear that. Please <a href="https://github.com/googleapis/genai-toolbox/issues/new">tell us how we can improve</a>.'
[params.ui.readingtime]
enable = true
[[params.versions]]
version = "dev"
url = "https://googleapis.github.io/genai-toolbox/dev/"
# Add a new version block here before every release
# The order of versions in this file is mirrored into the dropdown
[[params.versions]]
version = "v0.31.0"
url = "https://googleapis.github.io/genai-toolbox/v0.31.0/"
[[params.versions]]
version = "v0.30.0"
url = "https://googleapis.github.io/genai-toolbox/v0.30.0/"
[[params.versions]]
version = "v0.29.0"
url = "https://googleapis.github.io/genai-toolbox/v0.29.0/"
[[params.versions]]
version = "v0.28.0"
url = "https://googleapis.github.io/genai-toolbox/v0.28.0/"
[[params.versions]]
version = "v0.27.0"
url = "https://googleapis.github.io/genai-toolbox/v0.27.0/"
[[params.versions]]
version = "v0.26.0"
url = "https://googleapis.github.io/genai-toolbox/v0.26.0/"
[[params.versions]]
version = "v0.25.0"
url = "https://googleapis.github.io/genai-toolbox/v0.25.0/"
[[params.versions]]
version = "v0.24.0"
url = "https://googleapis.github.io/genai-toolbox/v0.24.0/"
[[params.versions]]
version = "v0.23.0"
url = "https://googleapis.github.io/genai-toolbox/v0.23.0/"
[[params.versions]]
version = "v0.22.0"
url = "https://googleapis.github.io/genai-toolbox/v0.22.0/"
[[params.versions]]
version = "v0.21.0"
url = "https://googleapis.github.io/genai-toolbox/v0.21.0/"
[[params.versions]]
version = "v0.20.0"
url = "https://googleapis.github.io/genai-toolbox/v0.20.0/"
[[params.versions]]
version = "v0.19.1"
url = "https://googleapis.github.io/genai-toolbox/v0.19.1/"
[[params.versions]]
version = "v0.18.0"
url = "https://googleapis.github.io/genai-toolbox/v0.18.0/"
[[params.versions]]
version = "v0.17.0"
url = "https://googleapis.github.io/genai-toolbox/v0.17.0/"
[[params.versions]]
version = "v0.16.0"
url = "https://googleapis.github.io/genai-toolbox/v0.16.0/"
[[params.versions]]
version = "v0.15.0"
url = "https://googleapis.github.io/genai-toolbox/v0.15.0/"
[[params.versions]]
version = "v0.14.0"
url = "https://googleapis.github.io/genai-toolbox/v0.14.0/"
[[params.versions]]
version = "v0.13.0"
url = "https://googleapis.github.io/genai-toolbox/v0.13.0/"
[[params.versions]]
version = "v0.12.0"
url = "https://googleapis.github.io/genai-toolbox/v0.12.0/"
[[params.versions]]
version = "v0.11.0"
url = "https://googleapis.github.io/genai-toolbox/v0.11.0/"
[[params.versions]]
version = "v0.10.0"
url = "https://googleapis.github.io/genai-toolbox/v0.10.0/"
[[params.versions]]
version = "v0.9.0"
url = "https://googleapis.github.io/genai-toolbox/v0.9.0/"
[[params.versions]]
version = "v0.8.0"
url = "https://googleapis.github.io/genai-toolbox/v0.8.0/"
[[menu.main]]
name = "GitHub"
@@ -182,18 +67,6 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
baseName = "llms-full"
isPlainText = true
root = true
[outputFormats.releases]
baseName = 'releases'
isPlainText = true
mediaType = 'text/releases'
[mediaTypes."text/releases"]
suffixes = ["releases"]
[outputs]
home = ["HTML", "RSS", "LLMS", "LLMS-FULL", "releases"]
# Google Analytics ID
[services]
[services.googleAnalytics]
id = "G-GLSV9KD8BF"
home = ["HTML", "RSS", "LLMS", "LLMS-FULL"]

View File

@@ -1,11 +0,0 @@
<a class="dropdown-item" href="{{ .Site.Params.global_logo_url | default `/` }}">latest</a>
{{ if .Site.Params.versions -}}
{{ $path := "" -}}
{{ if .Site.Params.version_menu_pagelinks -}}
{{ $path = .Page.RelPermalink -}}
{{ end -}}
{{ range .Site.Params.versions -}}
<a class="dropdown-item" href="{{ .url }}{{ $path }}">{{ .version }}</a>
{{ end -}}
{{ end -}}

View File

@@ -1,11 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<title>{{ .Title }}</title>
<link rel="canonical" href="{{ .Params.external_url }}"/>
<meta http-equiv="refresh" content="0;url={{ .Params.external_url }}"/>
</head>
<body>
<p>Redirecting you to <a href="{{ .Params.external_url }}">{{ .Params.external_url }}</a>...</p>
</body>
</html>

View File

@@ -1,26 +0,0 @@
{{ define "main" }}
<div class="td-content">
<h1>{{ .Title }}</h1>
{{ with .Params.description }}<div class="lead">{{ . | markdownify }}</div>{{ end }}
<header class="article-meta">
{{ partial "taxonomy_terms_article_wrapper.html" . }}
{{ if (and (not .Params.hide_readingtime) (.Site.Params.ui.readingtime.enable)) }}
{{ partial "reading-time.html" . }}
{{ end }}
</header>
{{ .Content }}
{{ partial "section-index.html" . }}
{{ partial "pager.html" . }}
{{ if (and (not .Params.hide_feedback) (.Site.Params.ui.feedback.enable) (.Site.Config.Services.GoogleAnalytics.ID)) }}
<hr class="mt-5 mb-4">
<div class="mt-4">
{{ partial "feedback.html" . }}
<br />
{{ end }}
{{ if (.Site.Params.DisqusShortname) }}
<br />
{{ partial "disqus-comment.html" . }}
{{ end }}
{{ partial "page-meta-lastmod.html" . }}
</div>
{{ end }}

View File

@@ -1,26 +0,0 @@
{{ define "main" }}
<div class="td-content">
<h1>{{ .Title }}</h1>
{{ with .Params.description }}<div class="lead">{{ . | markdownify }}</div>{{ end }}
<header class="article-meta">
{{ partial "taxonomy_terms_article_wrapper.html" . }}
{{ if (and (not .Params.hide_readingtime) (.Site.Params.ui.readingtime.enable)) }}
{{ partial "reading-time.html" . }}
{{ end }}
</header>
{{ .Content }}
{{ partial "section-index.html" . }}
{{ partial "pager.html" . }}
{{ if (and (not .Params.hide_feedback) (.Site.Params.ui.feedback.enable) (.Site.Config.Services.GoogleAnalytics.ID)) }}
<hr class="mt-5 mb-4">
<div class="mt-4">
{{ partial "feedback.html" . }}
<br />
{{ end }}
{{ if (.Site.Params.DisqusShortname) }}
<br />
{{ partial "disqus-comment.html" . }}
{{ end }}
{{ partial "page-meta-lastmod.html" . }}
</div>
{{ end }}

View File

@@ -1,22 +1,14 @@
{{- define "print_full_node" }}
{{ .Site.Params.description }}
========================================================================
## {{ .page.Title }}
========================================================================
**Hierarchy:** {{ if .page.Parent }}{{ range .page.Ancestors.Reverse }}{{ .Title }} > {{ end }}{{ end }}{{ .page.Title }}
**URL:** {{ .page.Permalink }}
**Description:** {{ .page.Description | default "None" }}
{{ .page.RawContent }}
{{ if .page.Pages }}
{{- range .page.Pages.ByWeight }}
{{ template "print_full_node" (dict "page" .) }}
{{- end }}
{{- end }}
{{- end -}}
{{ partial "llms-header.html" (dict "Site" .Site "TitleSuffix" "Complete Documentation") }}
{{- range .Site.Sections.ByWeight }}
{{ template "print_full_node" (dict "page" .) }}
{{- end }}
{{ range .Site.Sections }}
# {{ .Title }}
{{ .Description }}
{{ range .Pages }}
# {{ .Title }}
{{ .Description }}
{{ .RawContent }}
{{ range .Pages }}
# {{ .Title }}
{{ .Description }}
{{ .RawContent }}
{{end }}{{ end }}{{ end }}

View File

@@ -1,17 +1,9 @@
{{- define "print_index_node" }}
{{ .indent }}- [{{ .page.Title }}]({{ .page.Permalink }}): {{ .page.Description | default "No description provided." }}
{{- $nextIndent := printf "%s " .indent }}
{{- if .page.Pages }}
{{- range .page.Pages.ByWeight }}
{{ template "print_index_node" (dict "page" . "indent" $nextIndent) }}
{{- end }}
{{- end }}
{{- end -}}
# {{ .Site.Title }}
{{ partial "llms-header.html" (dict "Site" .Site "TitleSuffix" "Documentation Index") }}
> {{ .Site.Params.description }}
## Documentation Map
## Docs
{{ range .Site.Sections }}
### {{ .Title }}
{{ range .Site.Sections.ByWeight -}}
{{ template "print_index_node" (dict "page" . "indent" "") }}
{{- end }}
{{ .Description }}{{ range .Pages }}- [{{ .Title }}]({{ .Permalink }}): {{ .Description }}{{ range .Pages }} - [{{ .Title }}]({{ .Permalink }}): {{ .Description }}{{end }}{{ end }}{{ end }}

View File

@@ -1,245 +0,0 @@
{{- $navLinks := slice -}}
{{- $targetSections := slice -}}
{{- /* Resolve root navigation section */ -}}
{{- $docsRoot := site.GetPage "/docs" -}}
{{- if $docsRoot -}}
{{- $targetSections = $docsRoot.Sections.ByWeight -}}
{{- else -}}
{{- $targetSections = site.Home.Sections.ByWeight -}}
{{- end -}}
{{- /* Determine active top-level directory */ -}}
{{- if .FirstSection -}}
{{- $topLevelFolder := .FirstSection.Section -}}
{{- if or (findRE `^v[0-9]+` $topLevelFolder) (eq $topLevelFolder "dev") -}}
{{- $targetSections = .FirstSection.Sections.ByWeight -}}
{{- end -}}
{{- end -}}
{{- /* Construct navigation link dataset */ -}}
{{- range $targetSections -}}
{{- if .Title -}}
{{- $manualLink := .Params.manualLink | default "" -}}
{{- $icon := .Params.icon | default "" -}}
{{- $isRightSide := ne $manualLink "" -}}
{{- $navLinks = $navLinks | append (dict "name" .Title "url" .RelPermalink "redirect" $manualLink "icon" $icon "isRight" $isRightSide) -}}
{{- end -}}
{{- end -}}
{{- /* Compile shared tools data for sidebar injection */ -}}
{{- $dynamicLinks := dict -}}
{{- range .Site.Pages -}}
{{- if .Params.shared_tools -}}
{{- $targetURL := .RelPermalink -}}
{{- $targetData := slice -}}
{{- range .Params.shared_tools -}}
{{- $sourceSection := site.GetPage .source -}}
{{- if $sourceSection -}}
{{- $links := slice -}}
{{- range $sourceSection.Pages -}}
{{- $links = $links | append (dict "title" .Title "url" .RelPermalink) -}}
{{- end -}}
{{- $targetData = $targetData | append (dict "header" .header "links" $links) -}}
{{- end -}}
{{- end -}}
{{- if gt (len $targetData) 0 -}}
{{- $dynamicLinks = merge $dynamicLinks (dict $targetURL $targetData) -}}
{{- end -}}
{{- end -}}
{{- end -}}
<div id="nav-data-container" data-nav="{{ $navLinks | jsonify }}" style="display: none;"></div>
<script>
window.dynamicSidebarLinks = {{ $dynamicLinks | jsonify | safeJS }};
</script>
<script>
document.addEventListener("DOMContentLoaded", function() {
// Add links to GitHub issues for documentation feedback and feature requests
const docIssueLink = document.querySelector('.td-page-meta__issue');
if (docIssueLink) {
const url = new URL(docIssueLink.href);
url.searchParams.set('template', 'feature_request.yml');
docIssueLink.href = url.toString();
}
const projIssueLink = document.querySelector('.td-page-meta__project-issue');
if (projIssueLink) {
const url = new URL(projIssueLink.href);
url.searchParams.set('template', 'bug_report.yml');
projIssueLink.href = url.toString();
}
const container = document.getElementById('nav-data-container');
const currentPath = window.location.pathname;
const header = document.querySelector('header');
if (!container || !header || document.getElementById('secondary-nav')) return;
let links = [];
try {
links = JSON.parse(container.dataset.nav);
} catch (e) {
console.error("Failed to parse nav data", e);
return;
}
// Configure brand logo routing
const logoLink = document.querySelector('.navbar-brand');
if (logoLink) {
logoLink.href = "{{ site.Params.global_logo_url | default `/` }}";
}
// Sidebar link centering logic
const sidebar = document.querySelector('#td-section-nav') || document.querySelector('.td-sidebar-nav') || document.querySelector('.td-sidebar');
if (sidebar) {
const activeLink = sidebar.querySelector('.active');
if (activeLink) {
const sidebarRect = sidebar.getBoundingClientRect();
const activeRect = activeLink.getBoundingClientRect();
sidebar.scrollTop = activeRect.top - sidebarRect.top + sidebar.scrollTop - (sidebar.clientHeight / 2) + (activeRect.height / 2);
}
}
// Build secondary navigation UI
let leftNavItems = '';
let rightNavItems = '';
const versionMatch = currentPath.match(/^\/(v\d+\.\d+\.\d+|dev)\//);
const activePrefix = versionMatch ? versionMatch[0] : '/';
links.forEach(link => {
if (!link.name || !link.url) return;
let rawUrl = link.isRight ? link.redirect : link.url;
// Only strip double slashes if it is an internal relative link
let finalUrl = rawUrl.startsWith('http') ? rawUrl : rawUrl.replace(/\/+/g, '/');
// Apply active version prefix mapping
if (!link.isRight && activePrefix !== '/' && !finalUrl.startsWith(activePrefix)) {
finalUrl = activePrefix + finalUrl.replace(/^\//, '');
}
const iconHtml = link.icon ? `<i class="${link.icon}${link.isRight ? ' pop-icon' : ''}" ${!link.isRight ? 'style="font-size: 1.05rem;"' : ''}></i> ` : '';
if (link.isRight) {
rightNavItems += `
<li>
<a href="${finalUrl}" target="_blank" rel="noopener noreferrer" class="sec-nav-item-right">
${iconHtml}<span class="sec-nav-text">${link.name}</span>
</a>
</li>`;
} else {
const isActive = currentPath.startsWith(finalUrl) ? ' active' : '';
leftNavItems += `
<li>
<a class="sec-nav-item-left${isActive}" href="${finalUrl}">
${iconHtml}<span>${link.name}</span>
</a>
</li>`;
}
});
header.insertAdjacentHTML('beforeend', `
<div id="secondary-nav">
<div class="container-fluid sec-nav-container">
<ul class="sec-nav-list">${leftNavItems}</ul>
<ul class="sec-nav-icons">${rightNavItems}</ul>
</div>
</div>
`);
// ======================================================================
// Configure release version button labeling & Active Highlight
// ======================================================================
try {
const relBtn = Array.from(document.querySelectorAll('.td-navbar a.dropdown-toggle'))
.find(el => el.textContent.includes('Releases') || el.textContent.match(/v\d+\./) || el.textContent.includes('dev'));
if (relBtn) {
let label = currentPath.includes('/dev/') ? "dev" : (versionMatch ? versionMatch[1].replace(/\//g, '') : "Releases");
// Update the text on the dropdown button itself
for (let n of relBtn.childNodes) {
if (n.nodeType === 3 && n.textContent.trim().length > 1) {
n.textContent = ' ' + label;
break;
}
}
if (label !== "Releases") {
const waitForLinks = setInterval(() => {
const dropdownItems = document.querySelectorAll('.dropdown-menu .dropdown-item');
if (dropdownItems.length > 0) {
clearInterval(waitForLinks);
dropdownItems.forEach(item => {
const href = item.getAttribute('href');
if (href && (href.includes(`/${label}/`) || href.endsWith(`/${label}`))) {
item.classList.add('active-version');
}
});
}
}, 50); // Check every 50 milliseconds
// Safety fallback: Stop checking after 3 seconds so it doesn't run forever
setTimeout(() => clearInterval(waitForLinks), 3000);
}
}
} catch (e) { console.warn("Releases Labeler skipped.", e); }
// Inject shared tools into sidebar DOM
const sidebarData = window.dynamicSidebarLinks;
if (sidebarData && Object.keys(sidebarData).length > 0) {
Object.keys(sidebarData).forEach(targetUrl => {
const targetLink = document.querySelector(`.td-sidebar-nav a[href="${targetUrl}"]`) ||
document.querySelector(`.td-sidebar-nav a[href="${targetUrl.replace(/\/$/, '')}"]`);
if (!targetLink) return;
const parentLi = targetLink.closest('li');
if (!parentLi) return;
let innerUl = parentLi.querySelector('ul');
if (!innerUl) {
innerUl = document.createElement('ul');
innerUl.className = "ul-2 list-unstyled pb-2 pl-3";
innerUl.style.marginLeft = "1rem";
parentLi.appendChild(innerUl);
parentLi.classList.add('with-child');
}
sidebarData[targetUrl].forEach(group => {
const groupLi = document.createElement('li');
groupLi.className = "with-child";
const isGroupActive = group.links.some(link => currentPath === link.url);
const subUl = document.createElement('ul');
subUl.className = "ul-3 list-unstyled pb-2 pl-3";
subUl.style.marginLeft = "1rem";
subUl.style.display = isGroupActive ? "block" : "none";
if (group.header) {
const headerLabel = document.createElement('label');
headerLabel.style.cursor = "pointer";
headerLabel.innerHTML = `<span style="font-weight: 600; text-transform: uppercase; font-size: 0.85rem; letter-spacing: 0.5px;">${group.header}</span>`;
headerLabel.addEventListener('click', function(e) {
e.preventDefault();
subUl.style.display = subUl.style.display === "none" ? "block" : "none";
});
groupLi.appendChild(headerLabel);
}
group.links.forEach(link => {
const linkLi = document.createElement('li');
linkLi.innerHTML = `<a href="${link.url}" class="td-sidebar-link${currentPath === link.url ? ' active' : ''}" title="${link.title}">${link.title}</a>`;
subUl.appendChild(linkLi);
});
groupLi.appendChild(subUl);
innerUl.appendChild(groupLi);
});
});
}
});
</script>

View File

@@ -1,4 +0,0 @@
<script src='{{ "js/w3.js" | relURL }}'></script>
{{ if not .Site.Params.disableMigrationBanner }}
<script src="{{ "js/custom-layout.js" | relURL }}"></script>
{{ end }}

View File

@@ -1,58 +0,0 @@
# {{ .Site.Title }} - {{ .TitleSuffix }}
> {{ .Site.Params.description }}
**DOCUMENTATION VERSION:** {{ .Site.Params.version | default "Latest" }}
**BASE URL:** {{ .Site.BaseURL }}
**GENERATED ON:** {{ now.Format "2006-01-02T15:04:05Z07:00" }}
---
### System Directives for AI Models
**Role:** You are an expert Developer Advocate and Integration Engineer for the **MCP (Model Context Protocol) Toolbox for Databases**.
**Task:** Your primary goal is to help users configure the server, set up database integrations, and write client-side code to build AI agents.
**Strict Guidelines:**
1. **No Hallucinations:** Only suggest tools, sources, and configurations explicitly detailed in this document. Do not invent arbitrary REST endpoints.
2. **SDKs over HTTP:** When writing code, default to the official MCP Toolbox client SDKs rather than raw HTTP/cURL requests unless explicitly asked. Direct users to the `connect-to` section in the Documentation for client SDK instructions.
3. **Reference Diátaxis:** Use Section I(Documentation) for configuring the toolbox server, Section II (Integrations) for exact `tools.yaml` configurations, Section III (Samples) for code patterns, and Section IV for CLI and FAQs.
### Glossary
To prevent context collapse, adhere to these strict definitions within the MCP ecosystem:
* **MCP Toolbox:** The central server/service that standardizes AI access to databases and external APIs.
* **Source:** A configured backend connection to an external system (e.g., PostgreSQL, BigQuery, HTTP).
* **Tool:** A single, atomic capability exposed to the LLM (e.g., `bigquery-sql-query`), executed against a Source.
* **Toolset:** A logical, grouped collection of Tools.
* **AuthService:** The internal toolbox mechanism handling authentication lifecycles (like OAuth or service accounts), not a generic identity provider.
* **Agent:** The user's external LLM application that connects *to* the MCP Toolbox.
### Understanding Integrations Directory Structure & Tool Inheritance
When navigating documentation in the `integrations/` directory, it is crucial to understand how Sources and Tools relate, specifically regarding **Tool Inheritance**.
* **Source Pages (`source.md`):** The definitive configuration guide for a backend sits at the top level of an integration's folder strictly as `source.md` (e.g., `integrations/alloydb/source.md`). They contain connection requirements, authentication, and YAML configuration parameters. *(Note: `_index.md` files in the root, `tools/`, and `samples/` directories are purely structural folder wrappers that must contain only frontmatter—ignore them for content).*
* **Native Tools:** Specific capabilities built directly for a Source. If a Source has native tools, they are located in a `tools/` sub-directory (e.g., `integrations/alloydb/tools/alloydb-sql.md`).
* **Inherited Tools (Shared Tools):** Managed or compatible databases (e.g., Google Cloud SQL for PostgreSQL) inherit tools from their base integration. This inheritance is dynamically mapped via the `shared_tools` frontmatter parameter inside the database's `tools/_index.md` file (which contains no body content). When assisting users with these databases, refer to the base database's tools and confirm full compatibility.
### Global Environment & Prerequisites
* **Configuration:** `tools.yaml` is the ultimate source of truth for server configuration.
* **Database:** PostgreSQL 16+ and the `psql` client.
* **Language Requirements:**
{{- with .Site.GetPage "/documentation/getting-started/local_quickstart" }}
{{- $match := findRE "(?i)Python\\s+\\(\\d+\\.\\d+\\s+or\\s+higher\\)" .RawContent 1 }}
* Python: {{ if $match }}{{ index $match 0 | replaceRE "[\\[\\]]" "" }}{{ else }}Refer to Python Quickstart{{ end }}
{{- end }}
{{- with .Site.GetPage "/documentation/getting-started/local_quickstart_js" }}
{{- $match := findRE "(?i)Node\\.js \\(v\\d+ or higher\\)" .RawContent 1 }}
* JavaScript/Node: {{ if $match }}{{ index $match 0 }}{{ else }}Refer to JS Quickstart{{ end }}
{{- end }}
{{- with .Site.GetPage "/documentation/getting-started/local_quickstart_go" }}
{{- $match := findRE "(?i)Go \\(v\\d+\\.\\d+\\.\\d+ or higher\\)" .RawContent 1 }}
* Go: {{ if $match }}{{ index $match 0 }}{{ else }}Refer to Go Quickstart{{ end }}
{{- end }}
### The Diátaxis Narrative Framework
This documentation is structured following the Diátaxis framework to assist in clear navigation and understanding:
* **Section I: Documentation:** (Explanation) Theoretical context, high-level understanding, and universal How-To Guides.
* **Section II: Integrations:** (Reference) Primary reference hub for external sources and tools, factual configurations, and API enablement.
* **Section III: Samples:** (Tutorials) Code patterns and examples. **Note for AI:** Sample code is distributed across three main areas:
1. **Quickstarts:** Located in `documentation/getting-started/`.
2. **Integration-Specific Samples:** Nested within their respective `integrations/<source_name>/samples/` directories.
3. **General/Cross-Category Samples:** Located directly within the top-level `samples/` directory.
* **Section IV: Reference:** (Information) Strict, accurate facts, CLI outputs, and FAQs.
---

View File

@@ -1,67 +0,0 @@
{{ if .Site.Params.versions -}}
<a class="nav-link dropdown-toggle" href="#" id="navbarDropdown" role="button" data-bs-toggle="dropdown" aria-expanded="false">
{{ .Site.Params.version_menu }}
</a>
<div class="dropdown-menu" id="version-dropdown-menu" aria-labelledby="navbarDropdown">
<div w3-include-html="{{ .Site.Params.releases_url }}" w3-include-html-default='<a class="dropdown-item" href="{{ .Site.Params.global_logo_url }}dev/">dev</a>'></div>
<script>
w3.includeHTML(function() {
const basePath = "{{ site.BaseURL | relURL }}";
function stripBase(path) {
if (path.startsWith(basePath)) return path.substring(basePath.length);
if (basePath === "/" && path.startsWith("/")) return path.substring(1);
return path.replace(/^\//, '');
}
document.getElementById('version-dropdown-menu').addEventListener('click', function(e) {
const link = e.target.closest('a.dropdown-item');
if (!link) return;
e.preventDefault();
let targetPath = link.pathname;
if (!targetPath.endsWith('/')) targetPath += '/';
let cleanCurrentPath = stripBase(window.location.pathname);
const allLinks = document.querySelectorAll('#version-dropdown-menu a.dropdown-item');
let currentVersionPrefix = "";
allLinks.forEach(a => {
let cleanVPath = stripBase(a.pathname);
if (!cleanVPath.endsWith('/')) cleanVPath += '/';
if (cleanVPath !== "" && cleanVPath !== "/" && cleanCurrentPath.startsWith(cleanVPath)) {
if (cleanVPath.length > currentVersionPrefix.length) {
currentVersionPrefix = cleanVPath;
}
}
});
let deepPath = cleanCurrentPath;
if (currentVersionPrefix !== "") {
deepPath = cleanCurrentPath.substring(currentVersionPrefix.length);
}
deepPath = deepPath.replace(/^\//, '');
const fullTargetPath = targetPath + deepPath;
// Perform a HEAD request to check if the deep route exists
fetch(fullTargetPath, { method: 'HEAD' })
.then(response => {
if (response.ok) {
// Page exists! Redirect to deep path
window.location.href = fullTargetPath;
} else {
// 404 or other error: Fallback to the version root
window.location.href = targetPath;
}
})
.catch(() => {
// If the fetch fails entirely, fallback to root
window.location.href = targetPath;
});
});
});
</script>
</div>
{{ end -}}

View File

@@ -1,187 +0,0 @@
{{ $curr := . }}
{{ $next := "" }}
{{ $prev := "" }}
{{ $max_skip_attempts := 5 }}
{{/* 1. Calculate Previous Page */}}
{{ if .Parent }}
{{ $siblings := .Parent.Pages.ByWeight }}
{{ $currIndex := -1 }}
{{ range $index, $page := $siblings }}
{{ if eq $page.RelPermalink $curr.RelPermalink }}
{{ $currIndex = $index }}
{{ end }}
{{ end }}
{{ if gt $currIndex 0 }}
{{ $prev = index $siblings (sub $currIndex 1) }}
{{ else }}
{{ if ne .Parent.Type "home" }}
{{ $prev = .Parent }}
{{ end }}
{{ end }}
{{ end }}
{{/* 2. Calculate Next Page */}}
{{ if and .IsNode (gt (len .Pages) 0) }}
{{ $next = index .Pages.ByWeight 0 }}
{{ else }}
{{ if .Parent }}
{{ $siblings := .Parent.Pages.ByWeight }}
{{ $currIndex := -1 }}
{{ range $index, $page := $siblings }}
{{ if eq $page.RelPermalink $curr.RelPermalink }}
{{ $currIndex = $index }}
{{ end }}
{{ end }}
{{ if lt $currIndex (sub (len $siblings) 1) }}
{{ $next = index $siblings (add $currIndex 1) }}
{{ else }}
{{ $p := .Parent }}
{{ $foundNext := false }}
{{ range seq 3 }}
{{ if and (not $foundNext) $p }}
{{ if $p.Parent }}
{{ $pSiblings := $p.Parent.Pages.ByWeight }}
{{ $pIndex := -1 }}
{{ range $index, $page := $pSiblings }}
{{ if eq $page.RelPermalink $p.RelPermalink }}
{{ $pIndex = $index }}
{{ end }}
{{ end }}
{{ if and (ge $pIndex 0) (lt $pIndex (sub (len $pSiblings) 1)) }}
{{ $next = index $pSiblings (add $pIndex 1) }}
{{ $foundNext = true }}
{{ else }}
{{ $p = $p.Parent }}
{{ end }}
{{ else }}
{{ $p = false }}
{{ end }}
{{ end }}
{{ end }}
{{ end }}
{{ end }}
{{ end }}
{{/* 3. Apply Integration Directory Filters */}}
{{ range seq $max_skip_attempts }}
{{ if $prev }}
{{ $isLockedPrev := false }}
{{ if and $prev.IsNode (in $prev.RelPermalink "/integrations/") }}
{{ if or (strings.HasSuffix $prev.RelPermalink "/tools/") (strings.HasSuffix $prev.RelPermalink "/samples/") (and $prev.Parent (strings.HasSuffix $prev.Parent.RelPermalink "/integrations/")) }}
{{ $isLockedPrev = true }}
{{ end }}
{{ end }}
{{ if $isLockedPrev }}
{{ $steppingOut := strings.HasPrefix $curr.RelPermalink $prev.RelPermalink }}
{{ if and (not $steppingOut) (gt (len $prev.Pages) 0) }}
{{ $prev = index $prev.Pages.ByWeight (sub (len $prev.Pages) 1) }}
{{ else }}
{{ if $prev.Parent }}
{{ if eq $prev.Parent.RelPermalink "/integrations/" }}
{{ $prev = $prev.Parent }}
{{ else }}
{{ $sibs := $prev.Parent.Pages.ByWeight }}
{{ $idx := -1 }}
{{ range $i, $p := $sibs }}{{ if eq $p.RelPermalink $prev.RelPermalink }}{{ $idx = $i }}{{ end }}{{ end }}
{{ if gt $idx 0 }}
{{ $prev = index $sibs (sub $idx 1) }}
{{ else }}
{{ $prev = $prev.Parent }}
{{ end }}
{{ end }}
{{ else }}
{{ $prev = "" }}
{{ end }}
{{ end }}
{{ else }}
{{ break }}
{{ end }}
{{ end }}
{{ end }}
{{ range seq $max_skip_attempts }}
{{ if $next }}
{{ $isLockedNext := false }}
{{ if and $next.IsNode (in $next.RelPermalink "/integrations/") }}
{{ if or (strings.HasSuffix $next.RelPermalink "/tools/") (strings.HasSuffix $next.RelPermalink "/samples/") (and $next.Parent (strings.HasSuffix $next.Parent.RelPermalink "/integrations/")) }}
{{ $isLockedNext = true }}
{{ end }}
{{ end }}
{{ if $isLockedNext }}
{{ if gt (len $next.Pages) 0 }}
{{ $next = index $next.Pages.ByWeight 0 }}
{{ else }}
{{ $sibs := $next.Parent.Pages.ByWeight }}
{{ $idx := -1 }}
{{ range $i, $p := $sibs }}{{ if eq $p.RelPermalink $next.RelPermalink }}{{ $idx = $i }}{{ end }}{{ end }}
{{ if lt $idx (sub (len $sibs) 1) }}
{{ $next = index $sibs (add $idx 1) }}
{{ else }}
{{ $p := $next.Parent }}
{{ $foundNextSibling := false }}
{{ range seq 3 }}
{{ if and (not $foundNextSibling) $p }}
{{ if $p.Parent }}
{{ $pSibs := $p.Parent.Pages.ByWeight }}
{{ $pIdx := -1 }}
{{ range $index, $page := $pSibs }}{{ if eq $page.RelPermalink $p.RelPermalink }}{{ $pIdx = $index }}{{ end }}{{ end }}
{{ if and (ge $pIdx 0) (lt $pIdx (sub (len $pSibs) 1)) }}
{{ $next = index $pSibs (add $pIdx 1) }}
{{ $foundNextSibling = true }}
{{ else }}
{{ $p = $p.Parent }}
{{ end }}
{{ else }}
{{ $p = false }}
{{ end }}
{{ end }}
{{ end }}
{{ if not $foundNextSibling }}{{ $next = "" }}{{ end }}
{{ end }}
{{ end }}
{{ else }}
{{ break }}
{{ end }}
{{ end }}
{{ end }}
{{/* 4. Render Navigation */}}
{{ if or $prev $next }}
<nav class="mt-5 pt-4 border-top d-flex justify-content-between" aria-label="Page navigation">
<div>
{{ with $prev }}
<a href="{{ .RelPermalink }}" class="text-decoration-none">
<small class="text-muted d-block mb-1">&laquo; Previous</small>
<span class="text-body">{{ .Title }}</span>
</a>
{{ end }}
</div>
<div class="text-end">
{{ with $next }}
<a href="{{ .RelPermalink }}" class="text-decoration-none">
<small class="text-muted d-block mb-1">Next &raquo;</small>
<span class="text-body">{{ .Title }}</span>
</a>
{{ end }}
</div>
</nav>
{{ end }}

View File

@@ -1,137 +0,0 @@
{{ .Scratch.Set "docsy-search" 0 }}
{{ if .Site.Params.pagefind }}
{{ .Scratch.Add "docsy-search" 1 }}
<div class="td-search custom-pagefind-wrapper">
<div class="pagefind-search-container"></div>
</div>
<link href="{{ relURL "pagefind/pagefind-ui.css" }}" rel="stylesheet">
<script src="{{ relURL "pagefind/pagefind-ui.js" }}"></script>
<script>
if (!window.pagefindScriptLoaded) {
window.pagefindScriptLoaded = true;
window.addEventListener('DOMContentLoaded', () => {
/* Initialize Pagefind UI */
document.querySelectorAll('.pagefind-search-container').forEach(el => {
new PagefindUI({
element: el,
showSubResults: true,
showImages: false,
resetStyles: false,
debounceTimeoutMs: 300,
bundlePath: "{{ relURL "pagefind/" }}",
processResult: function (result) {
const ignoredSegments = ["en", "docs", "dev", "genai-toolbox"];
let path = result.url.replace(/^https?:\/\/[^\/]+/, "").split('#')[0].replace(/\/$/, "");
let parts = path.split("/").filter(p => p && !ignoredSegments.includes(p));
parts.pop();
if (parts.length > 0) {
let breadcrumb = parts.map(part => part.replace(/[-_]/g, ' ').replace(/\b\w/g, l => l.toUpperCase())).join(' ');
if (!result.rawTitle) result.rawTitle = result.meta.title;
result.meta.title = breadcrumb + ' ' + result.rawTitle;
}
return result;
}
});
});
/* Modal Behavior Logic */
setTimeout(() => {
let backdrop = document.getElementById('global-search-backdrop');
if (!backdrop) {
backdrop = document.createElement('div');
backdrop.id = 'global-search-backdrop';
document.body.appendChild(backdrop);
}
let isClosing = false;
let activeWrapper = null;
let placeholder = document.createElement('div');
placeholder.style.display = 'none';
function openModal(input) {
if (activeWrapper || isClosing) return;
const wrapper = input.closest('.custom-pagefind-wrapper');
if (!wrapper) return;
activeWrapper = wrapper;
wrapper.parentNode.insertBefore(placeholder, wrapper);
document.body.appendChild(wrapper);
backdrop.classList.add('active');
document.body.classList.add('global-search-active');
wrapper.classList.add('active-modal');
setTimeout(() => {
input.focus();
const len = input.value.length;
input.setSelectionRange(len, len);
}, 10);
}
function closeModal(keepFocus = false) {
if (!activeWrapper) return;
isClosing = true;
backdrop.classList.remove('active');
document.body.classList.remove('global-search-active');
activeWrapper.classList.remove('active-modal');
if (placeholder.parentNode) {
placeholder.parentNode.insertBefore(activeWrapper, placeholder);
}
const input = activeWrapper.querySelector('.pagefind-ui__search-input');
if (!keepFocus) {
const clearBtn = activeWrapper.querySelector('.pagefind-ui__search-clear');
if (clearBtn) clearBtn.click();
if (input) input.blur();
} else {
if (input) setTimeout(() => input.focus(), 10);
}
activeWrapper = null;
setTimeout(() => { isClosing = false; }, 50);
}
/* Event Listeners */
document.addEventListener('input', (e) => {
if (e.target.classList.contains('pagefind-ui__search-input')) {
e.target.value.trim() !== '' ? openModal(e.target) : closeModal(true);
}
});
backdrop.addEventListener('click', () => closeModal(false));
document.addEventListener('mousedown', (e) => {
if (!activeWrapper) return;
const clickedX = e.target.closest('.pagefind-ui__search-clear');
const clickedResult = e.target.closest('.pagefind-ui__result-link');
const isClickInside = activeWrapper.contains(e.target);
if (clickedResult) return;
if (clickedX || (!isClickInside && e.target !== backdrop)) closeModal(false);
});
document.addEventListener('keydown', (e) => {
if (e.key === 'Escape') closeModal(false);
});
}, 500);
});
}
</script>
{{ else if .Site.Params.offlineSearch }}
{{ .Scratch.Set "docsy-search" 1 }}
<div class="td-search custom-pagefind-wrapper flex-grow-1 order-last order-lg-1 w-100 mt-3 mt-lg-0">
<div class="pagefind-search-container w-100"></div>
</div>
{{ end }}

View File

@@ -1,47 +0,0 @@
{{/* Automatically identify the "Native" source (Grandparent source.md, since this is strictly used inside tools/) */}}
{{ $nativeSource := .Page.Parent.Parent.GetPage "source.md" }}
{{ if not $nativeSource }}
{{ $nativeSource = .Page.Parent.Parent.GetPage "source" }}
{{ end }}
<div class="compatibility-section">
<p>This tool can be used with the following database sources:</p>
<table>
<thead>
<tr>
<th>Source Name</th>
</tr>
</thead>
<tbody>
{{/* Display the Native Source automatically */}}
{{ if $nativeSource }}
<tr>
<td><a href="{{ $nativeSource.RelPermalink }}">{{ $nativeSource.Title }}</a></td>
</tr>
{{ end }}
{{/* Process additional sources passed via the "others" parameter */}}
{{ $others := .Get "others" }}
{{ if $others }}
{{ range split $others "," }}
{{ $path := trim . " " }}
{{ $cleanPath := trim $path "/" }}
{{ $remotePage := site.GetPage (printf "%s/source.md" $cleanPath) }}
{{ if $remotePage }}
<tr>
<td><a href="{{ $remotePage.RelPermalink }}">{{ $remotePage.Title }}</a></td>
</tr>
{{ else }}
<tr>
<td style="color: red;">⚠️ Source not found at path: {{ $path }}</td>
</tr>
{{ end }}
{{ end }}
{{ end }}
</tbody>
</table>
</div>

View File

@@ -1,13 +1,2 @@
{{ $file := .Get 0 }}
{{ $lang := .Get 1 }}
{{ $content := (printf "%s%s" .Page.File.Dir $file) | readFile | replaceRE "^---[\\s\\S]+?---" "" | replaceRE "\r\n"
"\n" | strings.TrimRight "\n" }}
{{ if $lang }}
```{{ $lang }}
{{ $content | safeHTML }}
```
{{ else }}
{{ $content | safeHTML }}
{{ end }}
{{ (printf "%s%s" .Page.File.Dir $file) | readFile | replaceRE "^---[\\s\\S]+?---" "" | safeHTML }}

View File

@@ -1,98 +0,0 @@
<div class="db-index-list">
{{/* Loop through all sub-folders, sorted alphabetically */}}
{{ range .Page.Pages.ByTitle }}
{{ $displayTitle := .Title }}
{{ $targetLink := .RelPermalink }}
{{ $displayDesc := .Description }}
{{ with .GetPage "source.md" }}
{{ $targetLink = .RelPermalink }}
{{ $displayDesc = .Description }}
{{ end }}
<a href="{{ $targetLink }}" class="db-index-row">
<div class="db-index-content">
<div class="db-index-header">
<span class="db-index-title">{{ $displayTitle }}</span>
<svg class="db-index-arrow" xmlns="http://www.w3.org/2000/svg" width="18" height="18" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<line x1="5" y1="12" x2="19" y2="12"></line>
<polyline points="12 5 19 12 12 19"></polyline>
</svg>
</div>
<div class="db-index-desc">{{ $displayDesc | default "Explore this integration." | plainify | truncate 120 }}</div>
</div>
</a>
{{ end }}
</div>
<style>
.db-index-list {
display: flex;
flex-direction: column;
margin-top: 2rem;
margin-bottom: 3rem;
border-top: 1px solid rgba(0, 0, 0, 0.08); /* Top bounding line */
}
.db-index-row {
padding: 1.25rem 0;
border-bottom: 1px solid rgba(0, 0, 0, 0.08);
text-decoration: none !important;
display: block;
background: transparent;
transition: all 0.2s ease;
}
.db-index-row:hover .db-index-title {
color: #4484f4;
}
.db-index-row:hover .db-index-arrow {
opacity: 1;
transform: translateX(0);
color: #4484f4;
}
.db-index-header {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 0.3rem;
}
.db-index-title {
font-size: 1.1rem;
font-weight: 600;
color: #202124;
transition: color 0.2s ease;
}
.db-index-arrow {
opacity: 0;
transform: translateX(-10px);
color: #5f6368;
transition: all 0.25s cubic-bezier(0.25, 0.8, 0.25, 1);
}
.db-index-desc {
font-size: 0.95rem;
color: #5f6368;
line-height: 1.5;
padding-right: 2rem;
}
/* Dark Mode Adjustments */
html[data-bs-theme="dark"] .db-index-list,
body.dark .db-index-list { border-top-color: rgba(255, 255, 255, 0.1); }
html[data-bs-theme="dark"] .db-index-row,
body.dark .db-index-row { border-bottom-color: rgba(255, 255, 255, 0.1); }
html[data-bs-theme="dark"] .db-index-title,
body.dark .db-index-title { color: #e8eaed; }
html[data-bs-theme="dark"] .db-index-desc,
body.dark .db-index-desc { color: #9aa0a6; }
html[data-bs-theme="dark"] .db-index-row:hover .db-index-title,
html[data-bs-theme="dark"] .db-index-row:hover .db-index-arrow { color: #8ab4f8; }
</style>

View File

@@ -1,100 +0,0 @@
{{ $integrations := site.GetPage "section" "integrations" }}
<div class="db-index-list">
{{/*Loop through all database folders alphabetically */}}
{{ range $integrations.Pages.ByTitle }}
{{ $db := . }}
{{/* Only render a row if this database has a 'prebuilt-configs' folder */}}
{{ with $db.GetPage "prebuilt-configs" }}
{{ $displayTitle := $db.Title }}
{{ $targetLink := .RelPermalink }}
{{ $displayDesc := .Description | default (printf "Explore prebuilt configurations for %s." $db.Title) }}
<a href="{{ $targetLink }}" class="db-index-row">
<div class="db-index-content">
<div class="db-index-header">
<span class="db-index-title">{{ $displayTitle }}</span>
<svg class="db-index-arrow" xmlns="http://www.w3.org/2000/svg" width="18" height="18" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<line x1="5" y1="12" x2="19" y2="12"></line>
<polyline points="12 5 19 12 12 19"></polyline>
</svg>
</div>
<div class="db-index-desc">{{ $displayDesc | plainify | truncate 120 }}</div>
</div>
</a>
{{ end }}
{{ end }}
</div>
<style>
.db-index-list {
display: flex;
flex-direction: column;
margin-top: 2rem;
margin-bottom: 3rem;
border-top: 1px solid rgba(0, 0, 0, 0.08);
}
.db-index-row {
padding: 1.25rem 0;
border-bottom: 1px solid rgba(0, 0, 0, 0.08);
text-decoration: none !important;
display: block;
background: transparent;
transition: all 0.2s ease;
}
.db-index-row:hover .db-index-title {
color: #4484f4;
}
.db-index-row:hover .db-index-arrow {
opacity: 1;
transform: translateX(0);
color: #4484f4;
}
.db-index-header {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 0.3rem;
}
.db-index-title {
font-size: 1.1rem;
font-weight: 600;
color: #202124;
transition: color 0.2s ease;
}
.db-index-arrow {
opacity: 0;
transform: translateX(-10px);
color: #5f6368;
transition: all 0.25s cubic-bezier(0.25, 0.8, 0.25, 1);
}
.db-index-desc {
font-size: 0.95rem;
color: #5f6368;
line-height: 1.5;
padding-right: 2rem;
}
/* Dark Mode Adjustments */
html[data-bs-theme="dark"] .db-index-list,
body.dark .db-index-list { border-top-color: rgba(255, 255, 255, 0.1); }
html[data-bs-theme="dark"] .db-index-row,
body.dark .db-index-row { border-bottom-color: rgba(255, 255, 255, 0.1); }
html[data-bs-theme="dark"] .db-index-title,
body.dark .db-index-title { color: #e8eaed; }
html[data-bs-theme="dark"] .db-index-desc,
body.dark .db-index-desc { color: #9aa0a6; }
html[data-bs-theme="dark"] .db-index-row:hover .db-index-title,
html[data-bs-theme="dark"] .db-index-row:hover .db-index-arrow { color: #8ab4f8; }
</style>

View File

@@ -1,99 +0,0 @@
{{/* Set Database Title */}}
{{ $dbTitle := .Page.Title }}
{{/* Gather & Pre-filter Native Tools (from the local "tools" sub-folder) */}}
{{ $validNativeTools := slice }}
{{ $localTools := .Page.GetPage "tools" }}
{{ if $localTools }}
{{ range $localTools.RegularPages }}
{{ if not .Params.is_wrapper }}
{{ $validNativeTools = $validNativeTools | append . }}
{{ end }}
{{ end }}
{{ end }}
{{/* Track if we've printed the main H3 header yet */}}
{{ $headerPrinted := false }}
{{/* Display Native Tools ONLY if valid ones exist */}}
{{ if gt (len $validNativeTools) 0 }}
<h3>{{ $dbTitle }} Tools</h3>
{{ $headerPrinted = true }}
<table>
<thead>
<tr>
<th>Tool Name</th>
<th>Description</th>
</tr>
</thead>
<tbody>
{{ range $validNativeTools }}
<tr>
<td><a href="{{ .RelPermalink }}"><strong>{{ .Title }}</strong></a></td>
<td>{{ .Description | default "No description provided." }}</td>
</tr>
{{ end }}
</tbody>
</table>
{{ end }}
{{/* Gather & Display Inherited Tools */}}
{{ $dirsParam := .Get "dirs" }}
{{ if $dirsParam }}
{{ range split $dirsParam "," }}
{{ $dirPath := trim . " " }}
{{ $targetDir := site.GetPage $dirPath }}
{{ if $targetDir }}
{{/* Since $targetDir is the "tools" folder, the DB name is simply its parent */}}
{{ $remoteDbTitle := $targetDir.Parent.Title }}
{{/* Pre-filter Inherited Tools */}}
{{ $validExternalTools := slice }}
{{ range $targetDir.RegularPages }}
{{ if not .Params.is_wrapper }}
{{ $validExternalTools = $validExternalTools | append . }}
{{ end }}
{{ end }}
{{ if gt (len $validExternalTools) 0 }}
{{/* Print the main H3 if the native tools block didn't already print it */}}
{{ if not $headerPrinted }}
<h3>{{ $dbTitle }} Tools</h3>
{{ $headerPrinted = true }}
{{ end }}
<p><em><strong>{{ $dbTitle }}</strong> maintains full compatibility with <strong>{{ $remoteDbTitle }}</strong>, allowing you to use the following tools with this connection:</em></p>
<table>
<thead>
<tr>
<th>Tool Name</th>
<th>Description</th>
</tr>
</thead>
<tbody>
{{ range $validExternalTools }}
<tr>
<td><a href="{{ .RelPermalink }}"><strong>{{ .Title }}</strong></a></td>
<td>{{ .Description | default "No description provided." }}</td>
</tr>
{{ end }}
</tbody>
</table>
{{ end }}
{{ else }}
<p style="color: red;"><em>Warning: Tool directory '{{ $dirPath }}' not found.</em></p>
{{ end }}
{{ end }}
{{ end }}
{{/* Fallback if absolutely NO tools exist anywhere */}}
{{ if and (not $headerPrinted) (not $dirsParam) }}
<p><em>No tools found to display.</em></p>
{{ end }}

View File

@@ -1,19 +0,0 @@
<div class="td-sidebar-link td-sidebar-link__page alert alert-warning shadow-sm" role="alert">
<h4 class="alert-heading">⚠️ Production Security Warning</h4>
<p><strong>Secure your deployment:</strong> By default, Toolbox allows all hosts (<code>--allowed-hosts</code>) and all origins (<code>--allowed-origins</code>). While convenient for local development, this is <strong>insecure for production</strong>.</p>
<hr>
<ul class="mb-0">
<li class="mb-2">
<strong>Prevent DNS Rebinding:</strong> Use the <code>--allowed-hosts</code> flag to specify a list of hosts for validation.
<div class="mt-1"><small><em>Example:</em></small> <code>command: ["--config", "/config/tools.yaml", "--address", "0.0.0.0", "--allowed-hosts", "localhost:5000"]</code></div>
</li>
<li>
<strong>Implement CORS:</strong> Use the <code>--allowed-origins</code> flag to specify a list of origins permitted to access the server.
<div class="mt-1"><small><em>Example:</em></small> <code>command: ["--config", "/config/tools.yaml", "--address", "0.0.0.0", "--allowed-origins", "https://foo.bar"]</code></div>
</li>
</ul>
<p class="mt-3 mb-0 small opacity-75">Note: The server issues a warning in the logs if these are set to the wildcard <code>*</code>.</p>
</div>

View File

@@ -20,26 +20,24 @@
{{ errorf "File %q not found (referenced in %s)" $path .Page.File.Path }}
{{ end }}
{{ $content := readFile $path | replaceRE "\r\n" "\n" }}
{{ $content := readFile $path }}
{{ $start_tag := printf "[START %s]" $region }}
{{ $end_tag := printf "[END %s]" $region }}
{{ $lines := slice }}
{{ $snippet := "" }}
{{ $in_snippet := false }}
{{ range split $content "\n" }}
{{ if $in_snippet }}
{{ if in . $end_tag }}
{{ $in_snippet = false }}
{{ else }}
{{ $lines = $lines | append . }}
{{ $snippet = printf "%s%s\n" $snippet . }}
{{ end }}
{{ else if in . $start_tag }}
{{ $in_snippet = true }}
{{ end }}
{{ end }}
{{ $snippet := delimit $lines "\n" }}
{{ if eq (trim $snippet "") "" }}
{{ errorf "Region %q not found or empty in file %s (referenced in %s)" $region $file .Page.File.Path }}
{{ end }}
@@ -47,7 +45,5 @@
{{ if eq $lang "text" }}
{{ $snippet | markdownify }}
{{ else }}
```{{ $lang }}
{{ $snippet | safeHTML }}
```
{{ highlight (trim $snippet "\n") $lang "" }}
{{ end }}

View File

@@ -1,274 +0,0 @@
{{- /* Fetch and sort samples */ -}}
{{- $samples := (where .Site.Pages "Params.is_sample" true).ByTitle -}}
{{- /* Extract unique filters */ -}}
{{- $allFilters := slice -}}
{{- range $samples -}}
{{- if .Params.sample_filters -}}
{{- range .Params.sample_filters -}}
{{- $allFilters = $allFilters | append . -}}
{{- end -}}
{{- end -}}
{{- end -}}
{{- $uniqueFilters := $allFilters | uniq | sort -}}
{{- /* Render UI */ -}}
<div class="samples-wrapper">
<!-- Search -->
<div class="samples-search-wrapper">
<svg class="search-icon" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<circle cx="11" cy="11" r="8"></circle>
<line x1="21" y1="21" x2="16.65" y2="16.65"></line>
</svg>
<input type="text" id="sampleSearchInput" placeholder="Search tutorials, integrations, or keywords...">
</div>
<!-- Filters -->
<div class="samples-sample-filters-wrapper" id="sampleTagFilters">
<button class="sample-tag-btn active" data-tag="all">All</button>
{{- range $uniqueFilters }}
<button class="sample-tag-btn" data-tag="{{ . | lower }}">{{ . }}</button>
{{- end }}
</div>
<!-- Grid -->
<div class="samples-grid" id="samplesGrid">
{{- range $samples }}
{{- $pageFilters := default slice .Params.sample_filters }}
<div class="sample-card visible"
data-title="{{ .Title | lower }}"
data-desc="{{ .Description | default "" | plainify | lower }}"
data-sample-filters="{{ delimit $pageFilters `,` | lower }}">
<div class="card-accent"></div>
<div class="card-content">
<h4><a href="{{ .RelPermalink }}" class="card-title-link">{{ .Title }}</a></h4>
<div class="card-desc">{{ .Description | default "Learn how to build this integration." | markdownify }}</div>
</div>
<div class="sample-card-sample-filters">
{{- range $pageFilters }}
<span class="badge">{{ . }}</span>
{{- end }}
</div>
</div>
{{- end }}
</div>
<!-- Empty State -->
<div id="samplesEmptyState" class="samples-empty-state" style="display: none;">
<svg xmlns="http://www.w3.org/2000/svg" width="48" height="48" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round" class="empty-icon">
<circle cx="11" cy="11" r="8"></circle>
<line x1="21" y1="21" x2="16.65" y2="16.65"></line>
</svg>
<h3>No samples found</h3>
<p>Try adjusting your search or filters.</p>
</div>
</div>
<style>
/* Scoped Variables */
.samples-wrapper {
--sh-primary: #4484f4;
--sh-primary-hover: #3367d6;
--sh-bg: #ffffff;
--sh-card-bg: #ffffff;
--sh-border: rgba(0, 0, 0, 0.08);
--sh-border-hover: rgba(68, 132, 244, 0.3);
--sh-text-main: #202124;
--sh-text-muted: #5f6368;
--sh-tag-bg: #f1f3f4;
--sh-tag-text: #3c4043;
--sh-shadow-sm: 0 2px 6px rgba(0,0,0,0.04);
--sh-shadow-lg: 0 12px 24px rgba(0,0,0,0.08);
margin-top: 2rem;
margin-bottom: 5rem;
font-family: inherit;
}
/* Dark Mode */
html[data-bs-theme="dark"] .samples-wrapper,
body.dark .samples-wrapper {
--sh-bg: #121212;
--sh-card-bg: #1e1e1e;
--sh-border: rgba(255, 255, 255, 0.1);
--sh-border-hover: rgba(138, 180, 248, 0.5);
--sh-text-main: #e8eaed;
--sh-text-muted: #9aa0a6;
--sh-tag-bg: rgba(255, 255, 255, 0.05);
--sh-tag-text: #e8eaed;
--sh-primary: #8ab4f8;
--sh-shadow-sm: 0 4px 12px rgba(0,0,0,0.2);
--sh-shadow-lg: 0 12px 30px rgba(0,0,0,0.4);
}
/* Search */
.samples-search-wrapper { position: relative; margin: 0 auto 2rem auto; max-width: 600px; }
.search-icon {
position: absolute; left: 1.25rem; top: 50%; transform: translateY(-50%);
width: 20px; height: 20px; color: var(--sh-text-muted); transition: color 0.3s ease;
}
.samples-search-wrapper input {
width: 100%; padding: 1rem 1rem 1rem 3.25rem; border: 1px solid var(--sh-border);
border-radius: 16px; font-size: 1.05rem; background: var(--sh-card-bg);
color: var(--sh-text-main); box-shadow: var(--sh-shadow-sm);
transition: all 0.3s cubic-bezier(0.25, 0.8, 0.25, 1); outline: none;
}
.samples-search-wrapper input:focus { border-color: var(--sh-primary); box-shadow: 0 0 0 4px rgba(68, 132, 244, 0.15); }
.samples-search-wrapper input:focus + .search-icon,
.samples-search-wrapper input:not(:placeholder-shown) ~ .search-icon {
color: var(--sh-primary);
}
/* Filters */
.samples-sample-filters-wrapper { display: flex; flex-wrap: wrap; gap: 0.6rem; margin-bottom: 2.5rem; }
.sample-tag-btn {
padding: 0.5rem 1.2rem; border: 1px solid var(--sh-border); background: var(--sh-card-bg);
color: var(--sh-text-muted); border-radius: 30px; cursor: pointer; font-size: 0.9rem;
font-weight: 500; transition: all 0.25s ease; box-shadow: var(--sh-shadow-sm);
}
.sample-tag-btn:hover { border-color: var(--sh-primary); color: var(--sh-primary); transform: translateY(-1px); }
.sample-tag-btn.active {
background: var(--sh-primary); border-color: var(--sh-primary); color: #ffffff !important;
box-shadow: 0 4px 12px rgba(68, 132, 244, 0.3);
}
/* Grid */
.samples-grid { display: grid; grid-template-columns: repeat(auto-fill, minmax(340px, 1fr)); gap: 1.75rem; }
.sample-card {
position: absolute;
display: flex; flex-direction: column; background: var(--sh-card-bg);
border: 1px solid var(--sh-border); border-radius: 16px; padding: 1.75rem;
overflow: hidden; transition: all 0.3s cubic-bezier(0.25, 0.8, 0.25, 1);
box-shadow: var(--sh-shadow-sm); opacity: 0; transform: scale(0.95);
pointer-events: none; visibility: hidden;
}
/* Visibility Toggle */
.sample-card.visible { opacity: 1; transform: scale(1); pointer-events: auto; position: relative; visibility: visible; }
.sample-card:hover { transform: translateY(-6px); box-shadow: var(--sh-shadow-lg); border-color: var(--sh-border-hover); }
/* Card Accent */
.card-accent {
position: absolute; top: 0; left: 0; right: 0; height: 4px;
background: linear-gradient(90deg, #4484f4, #80a7e9); opacity: 0.7; transition: opacity 0.3s ease;
}
.sample-card:hover .card-accent { opacity: 1; }
/* Card Content */
.card-content { flex-grow: 1; display: flex; flex-direction: column; }
.sample-card h4 { margin-top: 0.5rem; margin-bottom: 0.75rem; font-size: 1.25rem; font-weight: 600; line-height: 1.4; color: var(--sh-text-main); }
/* Card Click Overlay */
.card-title-link { text-decoration: none; color: inherit; }
.card-title-link::after { content: ''; position: absolute; inset: 0; z-index: 1; }
.card-desc { font-size: 0.95rem; color: var(--sh-text-muted); margin-bottom: 1.5rem; line-height: 1.6; position: relative; z-index: 2; }
.card-desc a { color: var(--sh-primary); text-decoration: none; font-weight: 500; }
.card-desc a:hover { text-decoration: underline; }
.card-desc p { margin-bottom: 0; }
/* Card Tags */
.sample-card-sample-filters { display: flex; flex-wrap: wrap; gap: 0.5rem; position: relative; z-index: 2; margin-top: auto; }
.sample-card-sample-filters .badge {
background: var(--sh-tag-bg); color: var(--sh-tag-text); padding: 0.35rem 0.75rem;
border-radius: 6px; font-size: 0.75rem; font-weight: 600; letter-spacing: 0.3px;
border: 1px solid var(--sh-border);
}
/* Empty State */
.samples-empty-state { text-align: center; padding: 4rem 1rem; color: var(--sh-text-muted); animation: fadeIn 0.4s ease forwards; }
.empty-icon { color: var(--sh-border); margin-bottom: 1rem; width: 64px; height: 64px; }
.samples-empty-state h3 { color: var(--sh-text-main); margin-bottom: 0.5rem; }
@keyframes fadeIn { from { opacity: 0; transform: translateY(10px); } to { opacity: 1; transform: translateY(0); } }
</style>
<script>
document.addEventListener("DOMContentLoaded", () => {
const searchInput = document.getElementById('sampleSearchInput');
const tagButtons = document.querySelectorAll('.sample-tag-btn');
const cards = document.querySelectorAll('.sample-card');
const emptyState = document.getElementById('samplesEmptyState');
const btnAll = document.querySelector('.sample-tag-btn[data-tag="all"]');
const activeTags = new Set();
const filterSamples = () => {
const query = searchInput ? searchInput.value.toLowerCase().trim() : "";
let visibleCount = 0;
cards.forEach(card => {
const title = card.getAttribute('data-title') || "";
const desc = card.getAttribute('data-desc') || "";
const sampleFilters = card.getAttribute('data-sample-filters') || "";
const cardTagsArray = sampleFilters ? sampleFilters.split(',') : [];
const matchesSearch = title.includes(query) || desc.includes(query) || sampleFilters.includes(query);
// Match all selected tags (AND logic)
const matchesTag = activeTags.size === 0 || Array.from(activeTags).every(tag => cardTagsArray.includes(tag));
if (matchesSearch && matchesTag) {
card.classList.add('visible');
visibleCount++;
} else {
card.classList.remove('visible');
}
});
emptyState.style.display = visibleCount === 0 ? 'block' : 'none';
};
// Bind search
if (searchInput) {
searchInput.addEventListener('input', filterSamples);
}
// Bind tag toggles
tagButtons.forEach(btn => {
btn.addEventListener('click', (e) => {
const selectedTag = e.currentTarget.getAttribute('data-tag');
if (selectedTag === 'all') {
// Reset filters
activeTags.clear();
tagButtons.forEach(b => b.classList.remove('active'));
e.currentTarget.classList.add('active');
} else {
// Toggle individual tag
if (activeTags.has(selectedTag)) {
activeTags.delete(selectedTag);
e.currentTarget.classList.remove('active');
} else {
activeTags.add(selectedTag);
e.currentTarget.classList.add('active');
}
// Update 'All' button state
if (btnAll) {
if (activeTags.size === 0) {
btnAll.classList.add('active');
} else {
btnAll.classList.remove('active');
}
}
}
filterSamples();
});
});
});
</script>

View File

@@ -1,5 +1,5 @@
{
"name": ".hugo",
"name": "docs2",
"lockfileVersion": 3,
"requires": true,
"packages": {

View File

@@ -1,155 +0,0 @@
/**
* Custom Layout Interactivity
* Handles dynamic offsets, DOM repositioning, and UI enhancements.
*/
document.addEventListener('DOMContentLoaded', function() {
// ==========================================================================
// DYNAMIC STYLES INJECTION
// ==========================================================================
var styleTag = document.createElement('style');
styleTag.innerHTML = `
.td-navbar .dropdown-menu {
z-index: 9999 !important;
}
.theme-banner-wrapper {
position: sticky;
z-index: 20;
padding-top: 15px;
padding-bottom: 5px;
margin-bottom: 2rem;
background-color: var(--bs-body-bg, #ffffff);
}
.theme-migration-banner {
background-color: #ebf3fc;
border: 1px solid #80a7e9;
color: #1c3a6b;
border-radius: 4px;
padding: 15px;
text-align: center;
width: 100%;
box-shadow: 0 4px 6px rgba(0,0,0,0.05);
}
.theme-migration-banner a {
color: #4484f4;
text-decoration: underline;
font-weight: bold;
}
/* DARK MODE STYLING */
html[data-bs-theme="dark"] .theme-banner-wrapper,
body.dark .theme-banner-wrapper,
html.dark-mode .theme-banner-wrapper {
background-color: var(--bs-body-bg, #20252b);
}
html[data-bs-theme="dark"] .theme-migration-banner,
body.dark .theme-migration-banner,
html.dark-mode .theme-migration-banner {
background-color: #1a273b;
color: #e6efff;
box-shadow: 0 4px 6px rgba(0,0,0,0.3);
}
html[data-bs-theme="dark"] .theme-migration-banner a,
body.dark .theme-migration-banner a,
html.dark-mode .theme-migration-banner a {
color: #80a7e9;
}
@media (prefers-color-scheme: dark) {
html:not([data-bs-theme="light"]):not(.light) .theme-banner-wrapper {
background-color: var(--bs-body-bg, #20252b);
}
html:not([data-bs-theme="light"]):not(.light) .theme-migration-banner {
background-color: #1a273b;
color: #e6efff;
box-shadow: 0 4px 6px rgba(0,0,0,0.3);
}
html:not([data-bs-theme="light"]):not(.light) .theme-migration-banner a {
color: #80a7e9;
}
}
/* Disable Sticky Banner on Mobile */
@media (max-width: 991.98px) {
.theme-banner-wrapper {
position: relative !important;
top: auto !important;
z-index: 1;
}
}
`;
document.head.appendChild(styleTag);
// ==========================================================================
// MIGRATION BANNER & HEADER OFFSET CALCULATOR
// ==========================================================================
function updateHeaderOffset() {
var mainNav = document.querySelector('.td-navbar');
var secondaryNav = document.getElementById('secondary-nav');
var migrationWrapper = document.getElementById('migration-banner-wrapper');
var h1 = mainNav ? mainNav.offsetHeight : 0;
var h2 = secondaryNav ? secondaryNav.offsetHeight : 0;
var totalHeight = h1 + h2;
document.documentElement.style.setProperty('--header-offset', totalHeight + 'px');
if (migrationWrapper) {
migrationWrapper.style.top = totalHeight + 'px';
}
}
// Create the Wrapper
var wrapper = document.createElement('div');
wrapper.id = 'migration-banner-wrapper';
wrapper.className = 'theme-banner-wrapper';
// Create the Banner
var banner = document.createElement('div');
banner.className = 'theme-migration-banner';
banner.innerHTML = '⚠️ <strong>Archived Docs:</strong> Visit <a href="https://mcp-toolbox.dev/">mcp-toolbox.dev</a> for the latest version.';
wrapper.appendChild(banner);
// Inject the wrapper into the center information column
var contentArea = document.querySelector('.td-content') || document.querySelector('main');
if (contentArea) {
contentArea.prepend(wrapper);
} else {
console.warn("Could not find the main content column to inject the banner.");
}
// Initialize the dynamic offset
updateHeaderOffset();
// Re-calculate on window resize
window.addEventListener('resize', updateHeaderOffset);
// Use ResizeObserver to detect header height changes
if (window.ResizeObserver) {
const ro = new ResizeObserver(updateHeaderOffset);
const navToWatch = document.querySelector('.td-navbar');
const secNavToWatch = document.getElementById('secondary-nav');
if (navToWatch) ro.observe(navToWatch);
if (secNavToWatch) ro.observe(secNavToWatch);
}
// ==========================================================================
// BREADCRUMBS REPOSITIONING
// ==========================================================================
var breadcrumbs = document.querySelector('.td-breadcrumbs') || document.querySelector('nav[aria-label="breadcrumb"]');
var pageTitle = document.querySelector('.td-content h1');
if (breadcrumbs && pageTitle) {
pageTitle.parentNode.insertBefore(breadcrumbs, pageTitle);
breadcrumbs.style.marginTop = "1rem";
breadcrumbs.style.marginBottom = "2rem";
}
});

View File

@@ -1,98 +0,0 @@
document.addEventListener('DOMContentLoaded', function() {
// Setup CSS for the wrapper and the banner
var styleTag = document.createElement('style');
styleTag.innerHTML = `
.td-navbar .dropdown-menu {
z-index: 9999 !important;
}
.theme-banner-wrapper {
position: sticky;
z-index: 20;
padding-top: 15px; /* This is your gap! */
padding-bottom: 5px; /* Breathing room below the banner */
/* Uses Bootstrap's native body background variable, with white as fallback */
background-color: var(--bs-body-bg, #ffffff);
}
.theme-migration-banner {
background-color: #ebf3fc;
border: 1px solid #80a7e9;
color: #1c3a6b;
border-radius: 4px;
padding: 15px;
text-align: center;
width: 100%;
box-shadow: 0 4px 6px rgba(0,0,0,0.05);
}
.theme-migration-banner a {
color: #4484f4;
text-decoration: underline;
font-weight: bold;
}
/* DARK MODE STYLING */
html[data-bs-theme="dark"] .theme-banner-wrapper,
body.dark .theme-banner-wrapper,
html.dark-mode .theme-banner-wrapper {
/* Uses Docsy's dark mode background fallback if var fails */
background-color: var(--bs-body-bg, #20252b);
}
html[data-bs-theme="dark"] .theme-migration-banner,
body.dark .theme-migration-banner,
html.dark-mode .theme-migration-banner {
background-color: #1a273b;
color: #e6efff;
box-shadow: 0 4px 6px rgba(0,0,0,0.3);
}
html[data-bs-theme="dark"] .theme-migration-banner a,
body.dark .theme-migration-banner a,
html.dark-mode .theme-migration-banner a {
color: #80a7e9;
}
/* Fallback for OS-level dark mode */
@media (prefers-color-scheme: dark) {
html:not([data-bs-theme="light"]):not(.light) .theme-banner-wrapper {
background-color: var(--bs-body-bg, #20252b);
}
html:not([data-bs-theme="light"]):not(.light) .theme-migration-banner {
background-color: #1a273b;
color: #e6efff;
box-shadow: 0 4px 6px rgba(0,0,0,0.3);
}
html:not([data-bs-theme="light"]):not(.light) .theme-migration-banner a {
color: #80a7e9;
}
}
`;
document.head.appendChild(styleTag);
// Create the Wrapper
var wrapper = document.createElement('div');
wrapper.id = 'migration-banner-wrapper';
wrapper.className = 'theme-banner-wrapper';
// Create the Banner
var banner = document.createElement('div');
banner.className = 'theme-migration-banner';
banner.innerHTML = '⚠️ <strong>Archived Docs:</strong> Visit <a href="https://mcp-toolbox.dev/">mcp-toolbox.dev</a> for the latest version.';
wrapper.appendChild(banner);
// Inject the wrapper into the center information column
var contentArea = document.querySelector('.td-content') || document.querySelector('main');
if (contentArea) {
contentArea.prepend(wrapper);
} else {
console.warn("Could not find the main content column to inject the banner.");
}
// Calculate navbar height synchronously to correctly offset the sticky wrapper
var navbar = document.querySelector('.td-navbar');
var navbarHeight = navbar ? navbar.offsetHeight : 64;
wrapper.style.top = navbarHeight + 'px';
});

View File

@@ -1,405 +0,0 @@
/* W3.JS 1.04 April 2019 by w3schools.com */
"use strict";
var w3 = {};
w3.hide = function (sel) {
w3.hideElements(w3.getElements(sel));
};
w3.hideElements = function (elements) {
var i, l = elements.length;
for (i = 0; i < l; i++) {
w3.hideElement(elements[i]);
}
};
w3.hideElement = function (element) {
w3.styleElement(element, "display", "none");
};
w3.show = function (sel, a) {
var elements = w3.getElements(sel);
if (a) {w3.hideElements(elements);}
w3.showElements(elements);
};
w3.showElements = function (elements) {
var i, l = elements.length;
for (i = 0; i < l; i++) {
w3.showElement(elements[i]);
}
};
w3.showElement = function (element) {
w3.styleElement(element, "display", "block");
};
w3.addStyle = function (sel, prop, val) {
w3.styleElements(w3.getElements(sel), prop, val);
};
w3.styleElements = function (elements, prop, val) {
var i, l = elements.length;
for (i = 0; i < l; i++) {
w3.styleElement(elements[i], prop, val);
}
};
w3.styleElement = function (element, prop, val) {
element.style.setProperty(prop, val);
};
w3.toggleShow = function (sel) {
var i, x = w3.getElements(sel), l = x.length;
for (i = 0; i < l; i++) {
if (x[i].style.display == "none") {
w3.styleElement(x[i], "display", "block");
} else {
w3.styleElement(x[i], "display", "none");
}
}
};
w3.addClass = function (sel, name) {
w3.addClassElements(w3.getElements(sel), name);
};
w3.addClassElements = function (elements, name) {
var i, l = elements.length;
for (i = 0; i < l; i++) {
w3.addClassElement(elements[i], name);
}
};
w3.addClassElement = function (element, name) {
var i, arr1, arr2;
arr1 = element.className.split(" ");
arr2 = name.split(" ");
for (i = 0; i < arr2.length; i++) {
if (arr1.indexOf(arr2[i]) == -1) {element.className += " " + arr2[i];}
}
};
w3.removeClass = function (sel, name) {
w3.removeClassElements(w3.getElements(sel), name);
};
w3.removeClassElements = function (elements, name) {
var i, l = elements.length, arr1, arr2, j;
for (i = 0; i < l; i++) {
w3.removeClassElement(elements[i], name);
}
};
w3.removeClassElement = function (element, name) {
var i, arr1, arr2;
arr1 = element.className.split(" ");
arr2 = name.split(" ");
for (i = 0; i < arr2.length; i++) {
while (arr1.indexOf(arr2[i]) > -1) {
arr1.splice(arr1.indexOf(arr2[i]), 1);
}
}
element.className = arr1.join(" ");
};
w3.toggleClass = function (sel, c1, c2) {
w3.toggleClassElements(w3.getElements(sel), c1, c2);
};
w3.toggleClassElements = function (elements, c1, c2) {
var i, l = elements.length;
for (i = 0; i < l; i++) {
w3.toggleClassElement(elements[i], c1, c2);
}
};
w3.toggleClassElement = function (element, c1, c2) {
var t1, t2, t1Arr, t2Arr, j, arr, allPresent;
t1 = (c1 || "");
t2 = (c2 || "");
t1Arr = t1.split(" ");
t2Arr = t2.split(" ");
arr = element.className.split(" ");
if (t2Arr.length == 0) {
allPresent = true;
for (j = 0; j < t1Arr.length; j++) {
if (arr.indexOf(t1Arr[j]) == -1) {allPresent = false;}
}
if (allPresent) {
w3.removeClassElement(element, t1);
} else {
w3.addClassElement(element, t1);
}
} else {
allPresent = true;
for (j = 0; j < t1Arr.length; j++) {
if (arr.indexOf(t1Arr[j]) == -1) {allPresent = false;}
}
if (allPresent) {
w3.removeClassElement(element, t1);
w3.addClassElement(element, t2);
} else {
w3.removeClassElement(element, t2);
w3.addClassElement(element, t1);
}
}
};
w3.getElements = function (id) {
if (typeof id == "object") {
return [id];
} else {
return document.querySelectorAll(id);
}
};
w3.filterHTML = function(id, sel, filter) {
var a, b, c, i, ii, iii, hit;
a = w3.getElements(id);
for (i = 0; i < a.length; i++) {
b = a[i].querySelectorAll(sel);
for (ii = 0; ii < b.length; ii++) {
hit = 0;
if (b[ii].innerText.toUpperCase().indexOf(filter.toUpperCase()) > -1) {
hit = 1;
}
c = b[ii].getElementsByTagName("*");
for (iii = 0; iii < c.length; iii++) {
if (c[iii].innerText.toUpperCase().indexOf(filter.toUpperCase()) > -1) {
hit = 1;
}
}
if (hit == 1) {
b[ii].style.display = "";
} else {
b[ii].style.display = "none";
}
}
}
};
w3.sortHTML = function(id, sel, sortvalue) {
var a, b, i, ii, y, bytt, v1, v2, cc, j;
a = w3.getElements(id);
for (i = 0; i < a.length; i++) {
for (j = 0; j < 2; j++) {
cc = 0;
y = 1;
while (y == 1) {
y = 0;
b = a[i].querySelectorAll(sel);
for (ii = 0; ii < (b.length - 1); ii++) {
bytt = 0;
if (sortvalue) {
v1 = b[ii].querySelector(sortvalue).innerText;
v2 = b[ii + 1].querySelector(sortvalue).innerText;
} else {
v1 = b[ii].innerText;
v2 = b[ii + 1].innerText;
}
v1 = v1.toLowerCase();
v2 = v2.toLowerCase();
if ((j == 0 && (v1 > v2)) || (j == 1 && (v1 < v2))) {
bytt = 1;
break;
}
}
if (bytt == 1) {
b[ii].parentNode.insertBefore(b[ii + 1], b[ii]);
y = 1;
cc++;
}
}
if (cc > 0) {break;}
}
}
};
w3.slideshow = function (sel, ms, func) {
var i, ss, x = w3.getElements(sel), l = x.length;
ss = {};
ss.current = 1;
ss.x = x;
ss.ondisplaychange = func;
if (!isNaN(ms) || ms == 0) {
ss.milliseconds = ms;
} else {
ss.milliseconds = 1000;
}
ss.start = function() {
ss.display(ss.current)
if (ss.ondisplaychange) {ss.ondisplaychange();}
if (ss.milliseconds > 0) {
window.clearTimeout(ss.timeout);
ss.timeout = window.setTimeout(ss.next, ss.milliseconds);
}
};
ss.next = function() {
ss.current += 1;
if (ss.current > ss.x.length) {ss.current = 1;}
ss.start();
};
ss.previous = function() {
ss.current -= 1;
if (ss.current < 1) {ss.current = ss.x.length;}
ss.start();
};
ss.display = function (n) {
w3.styleElements(ss.x, "display", "none");
w3.styleElement(ss.x[n - 1], "display", "block");
}
ss.start();
return ss;
};
w3.includeHTML = function(cb) {
var z, i, elmnt, file, xhttp;
z = document.getElementsByTagName("*");
for (i = 0; i < z.length; i++) {
elmnt = z[i];
file = elmnt.getAttribute("w3-include-html");
if (file) {
xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function() {
if (this.readyState == 4) {
if (this.status == 200) {elmnt.innerHTML = this.responseText;}
if (this.status == 404) {
if (elmnt.getAttribute("w3-include-html-default")) {
elmnt.innerHTML = elmnt.getAttribute("w3-include-html-default");
}
else { elmnt.innerHTML = "Page not found."; }
}
elmnt.removeAttribute("w3-include-html");
w3.includeHTML(cb);
}
}
xhttp.open("GET", file, true);
xhttp.send();
return;
}
}
if (cb) cb();
};
w3.getHttpData = function (file, func) {
w3.http(file, function () {
if (this.readyState == 4 && this.status == 200) {
func(this.responseText);
}
});
};
w3.getHttpObject = function (file, func) {
w3.http(file, function () {
if (this.readyState == 4 && this.status == 200) {
func(JSON.parse(this.responseText));
}
});
};
w3.displayHttp = function (id, file) {
w3.http(file, function () {
if (this.readyState == 4 && this.status == 200) {
w3.displayObject(id, JSON.parse(this.responseText));
}
});
};
w3.http = function (target, readyfunc, xml, method) {
var httpObj;
if (!method) {method = "GET"; }
if (window.XMLHttpRequest) {
httpObj = new XMLHttpRequest();
} else if (window.ActiveXObject) {
httpObj = new ActiveXObject("Microsoft.XMLHTTP");
}
if (httpObj) {
if (readyfunc) {httpObj.onreadystatechange = readyfunc;}
httpObj.open(method, target, true);
httpObj.send(xml);
}
};
w3.getElementsByAttribute = function (x, att) {
var arr = [], arrCount = -1, i, l, y = x.getElementsByTagName("*"), z = att.toUpperCase();
l = y.length;
for (i = -1; i < l; i += 1) {
if (i == -1) {y[i] = x;}
if (y[i].getAttribute(z) !== null) {arrCount += 1; arr[arrCount] = y[i];}
}
return arr;
};
w3.dataObject = {},
w3.displayObject = function (id, data) {
var htmlObj, htmlTemplate, html, arr = [], a, l, rowClone, x, j, i, ii, cc, repeat, repeatObj, repeatX = "";
htmlObj = document.getElementById(id);
htmlTemplate = init_template(id, htmlObj);
html = htmlTemplate.cloneNode(true);
arr = w3.getElementsByAttribute(html, "w3-repeat");
l = arr.length;
for (j = (l - 1); j >= 0; j -= 1) {
cc = arr[j].getAttribute("w3-repeat").split(" ");
if (cc.length == 1) {
repeat = cc[0];
} else {
repeatX = cc[0];
repeat = cc[2];
}
arr[j].removeAttribute("w3-repeat");
repeatObj = data[repeat];
if (repeatObj && typeof repeatObj == "object" && repeatObj.length != "undefined") {
i = 0;
for (x in repeatObj) {
i += 1;
rowClone = arr[j];
rowClone = w3_replace_curly(rowClone, "element", repeatX, repeatObj[x]);
a = rowClone.attributes;
for (ii = 0; ii < a.length; ii += 1) {
a[ii].value = w3_replace_curly(a[ii], "attribute", repeatX, repeatObj[x]).value;
}
(i === repeatObj.length) ? arr[j].parentNode.replaceChild(rowClone, arr[j]) : arr[j].parentNode.insertBefore(rowClone, arr[j]);
}
} else {
console.log("w3-repeat must be an array. " + repeat + " is not an array.");
continue;
}
}
html = w3_replace_curly(html, "element");
htmlObj.parentNode.replaceChild(html, htmlObj);
function init_template(id, obj) {
var template;
template = obj.cloneNode(true);
if (w3.dataObject.hasOwnProperty(id)) {return w3.dataObject[id];}
w3.dataObject[id] = template;
return template;
}
function w3_replace_curly(elmnt, typ, repeatX, x) {
var value, rowClone, pos1, pos2, originalHTML, lookFor, lookForARR = [], i, cc, r;
rowClone = elmnt.cloneNode(true);
pos1 = 0;
while (pos1 > -1) {
originalHTML = (typ == "attribute") ? rowClone.value : rowClone.innerHTML;
pos1 = originalHTML.indexOf("{{", pos1);
if (pos1 === -1) {break;}
pos2 = originalHTML.indexOf("}}", pos1 + 1);
lookFor = originalHTML.substring(pos1 + 2, pos2);
lookForARR = lookFor.split("||");
value = undefined;
for (i = 0; i < lookForARR.length; i += 1) {
lookForARR[i] = lookForARR[i].replace(/^\s+|\s+$/gm, ''); //trim
if (x) {value = x[lookForARR[i]];}
if (value == undefined && data) {value = data[lookForARR[i]];}
if (value == undefined) {
cc = lookForARR[i].split(".");
if (cc[0] == repeatX) {value = x[cc[1]]; }
}
if (value == undefined) {
if (lookForARR[i] == repeatX) {value = x;}
}
if (value == undefined) {
if (lookForARR[i].substr(0, 1) == '"') {
value = lookForARR[i].replace(/"/g, "");
} else if (lookForARR[i].substr(0,1) == "'") {
value = lookForARR[i].replace(/'/g, "");
}
}
if (value != undefined) {break;}
}
if (value != undefined) {
r = "{{" + lookFor + "}}";
if (typ == "attribute") {
rowClone.value = rowClone.value.replace(r, value);
} else {
w3_replace_html(rowClone, r, value);
}
}
pos1 = pos1 + 1;
}
return rowClone;
}
function w3_replace_html(a, r, result) {
var b, l, i, a, x, j;
if (a.hasAttributes()) {
b = a.attributes;
l = b.length;
for (i = 0; i < l; i += 1) {
if (b[i].value.indexOf(r) > -1) {b[i].value = b[i].value.replace(r, result);}
}
}
x = a.getElementsByTagName("*");
l = x.length;
a.innerHTML = a.innerHTML.replace(r, result);
}
};

View File

@@ -1,46 +0,0 @@
# Ignore documentation placeholders and generic example domains
^https?://([a-zA-Z0-9-]+\.)?example\.com(:\d+)?(/.*)?$
^http://example\.net
# Shields.io badges often trigger rate limits or intermittent 503s
^https://img\.shields\.io/.*
# PDF files are ignored as lychee cannot reliably parse internal PDF links
\.pdf$
# Standard mailto: protocol is not a web URL
^mailto:
# Ignore local development endpoints that won't resolve in CI/CD environments
^https?://(127\.0\.0\.1|localhost)(:\d+)?(/.*)?$
# Placeholder for Google Cloud Run service discovery
https://cloud-run-url.app/
# DGraph Cloud and private instance endpoints
https://xxx.cloud.dgraph.io/
https://cloud.dgraph.io/login
https://dgraph.io/docs
https://play.dgraph.io/
# MySQL Community downloads and main site (often protected by bot mitigation)
^https?://(.*\.)?mysql\.com/.*
# Claude desktop download link
https://claude.ai/download
# Google Cloud Run product page
https://cloud.google.com/run/*
https://console.cloud.google.com/*
# These specific deep links are known to cause redirect loops or 403s in automated scrapers
https://dev.mysql.com/doc/refman/8.4/en/sql-prepared-statements.html
https://dev.mysql.com/doc/refman/8.4/en/user-names.html
# npmjs links can occasionally trigger rate limiting during high-frequency CI builds
^https?://(www\.)?npmjs\.com/.*
https://www.oceanbase.com/
# Ignore social media and blog profiles to reduce external request overhead
https://medium.com/@mcp_toolbox

View File

@@ -1 +0,0 @@
GEMINI.md

View File

@@ -1,544 +1,5 @@
# Changelog
## [0.31.0](https://github.com/googleapis/genai-toolbox/compare/v0.30.0...v0.31.0) (2026-03-26)
### ⚠ BREAKING CHANGES
* release upgraded docsite ([#2831](https://github.com/googleapis/genai-toolbox/issues/2831))
* **http:** sanitize non-2xx error output ([#2654](https://github.com/googleapis/genai-toolbox/issues/2654))
* add a new `enable-api` flag ([#2846](https://github.com/googleapis/genai-toolbox/issues/2846))
* remove deprecations and update tools-file flag ([#2806](https://github.com/googleapis/genai-toolbox/issues/2806))
### Features
* Add a new `enable-api` flag ([#2846](https://github.com/googleapis/genai-toolbox/issues/2846)) ([7a070da](https://github.com/googleapis/genai-toolbox/commit/7a070dae4f1833671649ea605f36659675d402a9))
* **auth:** Add generic `authService` type for MCP ([#2619](https://github.com/googleapis/genai-toolbox/issues/2619)) ([f6678f8](https://github.com/googleapis/genai-toolbox/commit/f6678f8e29aa3346f4f73ce33cec37b4753d6947))
* **auth:** Add Protected Resource Metadata endpoint ([#2698](https://github.com/googleapis/genai-toolbox/issues/2698)) ([b53dcf2](https://github.com/googleapis/genai-toolbox/commit/b53dcf20694599f8b961c501a532bd122630b6f4))
* **auth:** Support manual PRM override ([#2717](https://github.com/googleapis/genai-toolbox/issues/2717)) ([283e4e3](https://github.com/googleapis/genai-toolbox/commit/283e4e33172571e4b20fa6a3ea0cfc632a565e6a))
* **dataplex:** Add support for lookup context tool. ([#2744](https://github.com/googleapis/genai-toolbox/issues/2744)) ([facb69d](https://github.com/googleapis/genai-toolbox/commit/facb69d01fe0c7ff9e2e1c40804dd00762e508a6))
* Remove deprecations and update tools-file flag ([#2806](https://github.com/googleapis/genai-toolbox/issues/2806)) ([ab64c95](https://github.com/googleapis/genai-toolbox/commit/ab64c9514a467d92a4547eda5a4ecdd08f86b0c9))
### Bug Fixes
* **ci:** Remove search index generation from preview deployment workflow ([#2859](https://github.com/googleapis/genai-toolbox/issues/2859)) ([f8891b8](https://github.com/googleapis/genai-toolbox/commit/f8891b82fcaaef240e1031cd9f784749d91d4210))
* **docs:** Skip empty folders in pagination & reduce PR comment noise ([#2853](https://github.com/googleapis/genai-toolbox/issues/2853)) ([9ebd93a](https://github.com/googleapis/genai-toolbox/commit/9ebd93a8ecb9bae673aa77a859803629fc7a4e1d))
* **http:** Sanitize non-2xx error output ([#2654](https://github.com/googleapis/genai-toolbox/issues/2654)) ([5bef954](https://github.com/googleapis/genai-toolbox/commit/5bef954507c8e23b6c9b0eb2551265e4be32b452))
* **skills:** Fix integer parameter parsing through agent skills ([#2847](https://github.com/googleapis/genai-toolbox/issues/2847)) ([4564efe](https://github.com/googleapis/genai-toolbox/commit/4564efe75436b4081d9f3d1f7c912bc64c13f850))
### Documentation
* Release upgraded docsite ([#2831](https://github.com/googleapis/genai-toolbox/issues/2831)) ([5b25ce0](https://github.com/googleapis/genai-toolbox/commit/5b25ce081235b21c884e27057cd4a2fa4d0d7c0e))
## [0.30.0](https://github.com/googleapis/genai-toolbox/compare/v0.29.0...v0.30.0) (2026-03-20)
### Features
* **cli:** Add migrate subcommand ([#2679](https://github.com/googleapis/genai-toolbox/issues/2679)) ([12171f7](https://github.com/googleapis/genai-toolbox/commit/12171f7a02bcd34ce647db10abdb79bb2dac7ace))
* **cli:** Add serve subcommand ([#2550](https://github.com/googleapis/genai-toolbox/issues/2550)) ([1e2c7c7](https://github.com/googleapis/genai-toolbox/commit/1e2c7c7804c67bebf5e2ee9b67c6feb6f05292fd))
* **skill:** One skill per toolset ([#2733](https://github.com/googleapis/genai-toolbox/issues/2733)) ([5b85c65](https://github.com/googleapis/genai-toolbox/commit/5b85c65960dba9bfaf4cadca6d44532a153976e1))
* **source/oracledb:** Add Oracle DB for MCP tools and configurations, updated tools and documentation ([#2625](https://github.com/googleapis/genai-toolbox/issues/2625)) ([e350fc7](https://github.com/googleapis/genai-toolbox/commit/e350fc7879182aaf592a70c3509ed061164b3913))
* **tools/looker:** Support git_branch tools for looker. ([#2718](https://github.com/googleapis/genai-toolbox/issues/2718)) ([70ed8a0](https://github.com/googleapis/genai-toolbox/commit/70ed8a0dcb8e654b748a6e3e1c5ef283c26006da))
* **tools/dataplex-search-entries:** Add `scope` support to search_entries tool ([#2740](https://github.com/googleapis/genai-toolbox/issues/2740)) ([10af468](https://github.com/googleapis/genai-toolbox/commit/10af4682ccd51070463604124293968944d05017))
### Bug Fixes
* **cloudloggingadmin:** Increase log injesting time and add auth test ([#2772](https://github.com/googleapis/genai-toolbox/issues/2772)) ([50b4457](https://github.com/googleapis/genai-toolbox/commit/50b4457095ec4ac881b3b12719da24d35141f65d))
* **oracle:** Normalize encoded proxy usernames in go-ora DSN ([#2469](https://github.com/googleapis/genai-toolbox/issues/2469)) ([b1333cd](https://github.com/googleapis/genai-toolbox/commit/b1333cd27117655f8ab09f222721e14bea74b487))
* **postgres:** Update execute-sql tool to avoid multi-statements parameter ([#2707](https://github.com/googleapis/genai-toolbox/issues/2707)) ([58bc772](https://github.com/googleapis/genai-toolbox/commit/58bc772f882f0d9e00f403e73fbec812dd8a03ac))
* **skills:** Improve flag validation and silence unit test output ([#2759](https://github.com/googleapis/genai-toolbox/issues/2759)) ([f3da6aa](https://github.com/googleapis/genai-toolbox/commit/f3da6aa5e23b609a1ac9ecc098bccea02f2388ab))
* **test:** Address flaky healthcare integration test run ([#2742](https://github.com/googleapis/genai-toolbox/issues/2742)) ([9590821](https://github.com/googleapis/genai-toolbox/commit/9590821bc7d86c5cbacd29b21d4f85b427a87db4))
### Reverts
* **ci:** Implement conditional sharding logic in integration tests ([#2763](https://github.com/googleapis/genai-toolbox/issues/2763)) ([1528d7c](https://github.com/googleapis/genai-toolbox/commit/1528d7c38dfaa30bdecbe59c79ba926fa6d18356))
## [0.29.0](https://github.com/googleapis/genai-toolbox/compare/v0.28.0...v0.29.0) (2026-03-13)
### ⚠ BREAKING CHANGES
* **source/alloydb:** restructure prebuilt toolsets ([#2639](https://github.com/googleapis/genai-toolbox/issues/2639))
* **source/spanner:** restructure prebuilt toolsets ([#2641](https://github.com/googleapis/genai-toolbox/issues/2641))
* **source/dataplex:** restructure prebuilt toolsets ([#2640](https://github.com/googleapis/genai-toolbox/issues/2640))
* **source/oss-db:** restructure prebuilt toolsets ([#2638](https://github.com/googleapis/genai-toolbox/issues/2638))
* **source/cloudsql:** restructure prebuilt toolsets ([#2635](https://github.com/googleapis/genai-toolbox/issues/2635))
* **source/bigquery:** restructure prebuilt toolsets ([#2637](https://github.com/googleapis/genai-toolbox/issues/2637))
* **source/firestore:** restructure prebuilt toolsets ([#2636](https://github.com/googleapis/genai-toolbox/issues/2636))
* telemetry metrics updates as per semantic convention ([#2566](https://github.com/googleapis/genai-toolbox/issues/2566))
### Features
* Add user agent to embeddings generation ([#2572](https://github.com/googleapis/genai-toolbox/issues/2572)) ([287251a](https://github.com/googleapis/genai-toolbox/commit/287251a0cfed4d24617e5d0d957026a94f65d820))
* **skill:** Attach user agent metadata for generated skill ([#2697](https://github.com/googleapis/genai-toolbox/issues/2697)) ([9598a6a](https://github.com/googleapis/genai-toolbox/commit/9598a6a32597b9c9abdb0f20c06d86a01b0d011f))
* **skills:** Add additional-notes flag to generate skills command ([#2696](https://github.com/googleapis/genai-toolbox/issues/2696)) ([73bf962](https://github.com/googleapis/genai-toolbox/commit/73bf962459b76872f748248bb5e289be232a30b6))
* **skill:** Update skill generation logic ([#2646](https://github.com/googleapis/genai-toolbox/issues/2646)) ([c233eee](https://github.com/googleapis/genai-toolbox/commit/c233eee98cd9621526cb286245f3874f5bd6e7da))
* **source/alloydb:** Restructure prebuilt toolsets ([#2639](https://github.com/googleapis/genai-toolbox/issues/2639)) ([5f3f063](https://github.com/googleapis/genai-toolbox/commit/5f3f063fc7335e47e35fa1a4f93616abbd7959d5))
* **source/bigquery:** Restructure prebuilt toolsets ([#2637](https://github.com/googleapis/genai-toolbox/issues/2637)) ([dc984ba](https://github.com/googleapis/genai-toolbox/commit/dc984badd79f54ff423713a763648c6a6880a640))
* **sources/bigquery:** Support custom oauth header name ([#2564](https://github.com/googleapis/genai-toolbox/issues/2564)) ([d3baf77](https://github.com/googleapis/genai-toolbox/commit/d3baf77d61ab30d97edc93587e6f0365b8523fee))
* **source/cloudsql:** Restructure prebuilt toolsets ([#2635](https://github.com/googleapis/genai-toolbox/issues/2635)) ([99613dc](https://github.com/googleapis/genai-toolbox/commit/99613dcc7a06bd3a2324d20e1ef41404cf6fd9d5))
* **source/dataplex:** Restructure prebuilt toolsets ([#2640](https://github.com/googleapis/genai-toolbox/issues/2640)) ([acb9a80](https://github.com/googleapis/genai-toolbox/commit/acb9a80cf2438e04c76cf10267b1c9ca9227da0b))
* **source/firestore:** Restructure prebuilt toolsets ([#2636](https://github.com/googleapis/genai-toolbox/issues/2636)) ([22ab7b9](https://github.com/googleapis/genai-toolbox/commit/22ab7b9365eab21bfa04da64574fadbd0746f669))
* **source/oss-db:** Restructure prebuilt toolsets ([#2638](https://github.com/googleapis/genai-toolbox/issues/2638)) ([5196c6a](https://github.com/googleapis/genai-toolbox/commit/5196c6a78eb256ec83d847385c69bfebece48c87))
* **source/spanner:** Restructure prebuilt toolsets ([#2641](https://github.com/googleapis/genai-toolbox/issues/2641)) ([ea2b698](https://github.com/googleapis/genai-toolbox/commit/ea2b698b03517c400bbaef27f56c4d3abead8b2c))
* Telemetry metrics updates as per semantic convention ([#2566](https://github.com/googleapis/genai-toolbox/issues/2566)) ([131d764](https://github.com/googleapis/genai-toolbox/commit/131d764f895c14908e29914b3c0c273d91a2654f))
* **tools/mongodb:** Add tool annotations to MongoDB tools for improved LLM understanding ([#2219](https://github.com/googleapis/genai-toolbox/issues/2219)) ([b7a5f80](https://github.com/googleapis/genai-toolbox/commit/b7a5f80b42b3c1564870e2868aeab87d82a85d39))
* **tools/serverless-spark:** Add get_session_template tool ([#2308](https://github.com/googleapis/genai-toolbox/issues/2308)) ([a136e16](https://github.com/googleapis/genai-toolbox/commit/a136e169b3551a14b081624d7f50e1c32f0fb857))
* **tools/serverless-spark:** Add list/get sessions tools ([#2576](https://github.com/googleapis/genai-toolbox/issues/2576)) ([a554298](https://github.com/googleapis/genai-toolbox/commit/a554298535444671228fc08f6e3139d199a8b6b4))
### Bug Fixes
* Improve list locks integration test for postgres ([#2279](https://github.com/googleapis/genai-toolbox/issues/2279)) ([d9ebe5d](https://github.com/googleapis/genai-toolbox/commit/d9ebe5d4bf1b7ca04cae47386b36c38ca5b77b8a))
* **mcp:** Guard nil SSE session lookup and return 400 for missing session ([#2681](https://github.com/googleapis/genai-toolbox/issues/2681)) ([f66189f](https://github.com/googleapis/genai-toolbox/commit/f66189fe43cb711da3a041fa31edbacd7bbc7153))
* **oracle:** Update oracle-execute-sql tool interface to match source signature ([#2627](https://github.com/googleapis/genai-toolbox/issues/2627)) ([81699a3](https://github.com/googleapis/genai-toolbox/commit/81699a375b7e5af37945f4124aa4c5f2a1a9f7a6))
* Return AllParams for GetParameter() for tools with templateParameter([#2734](https://github.com/googleapis/genai-toolbox/issues/2734)) ([bfd7ba6](https://github.com/googleapis/genai-toolbox/commit/bfd7ba601a52294fa71623d88af71bd0288bf887))
* **server/mcp:** Scope defer span.End inside loop iteration ([#2558](https://github.com/googleapis/genai-toolbox/issues/2558)) ([c88a62d](https://github.com/googleapis/genai-toolbox/commit/c88a62dcf4c16118ae706cc43d18cad827e7496d)), closes [#2549](https://github.com/googleapis/genai-toolbox/issues/2549)
* **skill:** Fix env variable propagation ([#2645](https://github.com/googleapis/genai-toolbox/issues/2645)) ([5271368](https://github.com/googleapis/genai-toolbox/commit/52713687208994c423da64333cb0a04fb483f794))
* **sources/looker:** Looker and looker-dev prebuilt tools should share one source definition. ([#2620](https://github.com/googleapis/genai-toolbox/issues/2620)) ([df7f2fd](https://github.com/googleapis/genai-toolbox/commit/df7f2fd7d5b75211dbbbd471c84f0ec5097ca7ad))
* **telemetry:** Histogram buckets from OTel standard to MCP standards ([#2729](https://github.com/googleapis/genai-toolbox/issues/2729)) ([87cd4a0](https://github.com/googleapis/genai-toolbox/commit/87cd4a0bf48605225ef25ca554cc787def976d11))
* **ui:** Remove module from script ([#2703](https://github.com/googleapis/genai-toolbox/issues/2703)) ([6943ab6](https://github.com/googleapis/genai-toolbox/commit/6943ab6839d21da7b6a4241700c7891c6f4a9b2c))
* Update toolset attributes naming ([#2554](https://github.com/googleapis/genai-toolbox/issues/2554)) ([3d6ae4e](https://github.com/googleapis/genai-toolbox/commit/3d6ae4eeaf5acfbde83374a244573edd8fc9012b))
## [0.28.0](https://github.com/googleapis/genai-toolbox/compare/v0.27.0...v0.28.0) (2026-03-02)
### Features
* Add polling system to dynamic reloading ([#2466](https://github.com/googleapis/genai-toolbox/issues/2466)) ([fcaac9b](https://github.com/googleapis/genai-toolbox/commit/fcaac9bb957226ee3db1baea24330f337ba88ab7))
* Added basic template for sdks doc migrate ([#1961](https://github.com/googleapis/genai-toolbox/issues/1961)) ([87f2eaf](https://github.com/googleapis/genai-toolbox/commit/87f2eaf79cdecca7b939151e1543eccf2f812a69))
* **dataproc:** Add dataproc source and list/get clusters/jobs tools ([#2407](https://github.com/googleapis/genai-toolbox/issues/2407)) ([cc05e57](https://github.com/googleapis/genai-toolbox/commit/cc05e5745d1c25a6088702b827cd098250164b7e))
* **sources/postgres:** Add configurable pgx query execution mode ([#2477](https://github.com/googleapis/genai-toolbox/issues/2477)) ([57b77bc](https://github.com/googleapis/genai-toolbox/commit/57b77bca09ce6ee260bd64af9be5fcef593e9acb))
* **sources/redis:** Add TLS support for Redis connections ([#2432](https://github.com/googleapis/genai-toolbox/issues/2432)) ([d6af290](https://github.com/googleapis/genai-toolbox/commit/d6af2907fd2dca5a6751d7d42090dd7ebb8ccd48))
* **tools/looker:** Enable Get All Lookml Tests, Run LookML Tests, and Create View From Table tools for Looker ([#2522](https://github.com/googleapis/genai-toolbox/issues/2522)) ([e01139a](https://github.com/googleapis/genai-toolbox/commit/e01139a90268f8587b9823be1157259c1bcbfd66))
* **tools/looker:** Tools to list/create/delete directories ([#2488](https://github.com/googleapis/genai-toolbox/issues/2488)) ([0036d8c](https://github.com/googleapis/genai-toolbox/commit/0036d8c35844c3de2079cb5b2479781e8938525b))
* **ui:** Make tool list panel resizable ([#2253](https://github.com/googleapis/genai-toolbox/issues/2253)) ([276cf60](https://github.com/googleapis/genai-toolbox/commit/276cf604a2bb41861639ed6881557e38dd97a614))
### Bug Fixes
* **ci:** Add path for forked PR unit test runs ([#2540](https://github.com/googleapis/genai-toolbox/issues/2540)) ([04dd2a7](https://github.com/googleapis/genai-toolbox/commit/04dd2a77603c7babf01da724dfb77808e3f25fe5))
* Deflake alloydb omni ([#2431](https://github.com/googleapis/genai-toolbox/issues/2431)) ([62b8309](https://github.com/googleapis/genai-toolbox/commit/62b830987d65c3573214d04e50742476097ee9e9))
* **docs/adk:** Resolve dependency duplication ([#2418](https://github.com/googleapis/genai-toolbox/issues/2418)) ([4d44abb](https://github.com/googleapis/genai-toolbox/commit/4d44abb4638926ca50b0fa4dcf10a03e7fab657f))
* **docs/langchain:** Fix core at 0.3.0 and align compatible dependencies ([#2426](https://github.com/googleapis/genai-toolbox/issues/2426)) ([36edfd3](https://github.com/googleapis/genai-toolbox/commit/36edfd3d506e839c092d04cbca1799b5ebc38160))
* Enforce required validation for explicit null parameter values ([#2519](https://github.com/googleapis/genai-toolbox/issues/2519)) ([d5e9512](https://github.com/googleapis/genai-toolbox/commit/d5e9512a237e658f9b9127fdd8c174ec023c3310))
* **oracle:** Enable DML operations and resolve incorrect array type error ([#2323](https://github.com/googleapis/genai-toolbox/issues/2323)) ([72146a4](https://github.com/googleapis/genai-toolbox/commit/72146a4b1605bcdd3e1038106bfb1f899e677e39))
* **server/mcp:** Guard nil dereference in sseManager.get ([#2557](https://github.com/googleapis/genai-toolbox/issues/2557)) ([e534196](https://github.com/googleapis/genai-toolbox/commit/e534196303c2b8d9b6e599ac25add337e6fc9b8f)), closes [#2548](https://github.com/googleapis/genai-toolbox/issues/2548)
* **tests/postgres:** Implement uuid-based isolation and reliable resource cleanup ([#2377](https://github.com/googleapis/genai-toolbox/issues/2377)) ([8a96fb1](https://github.com/googleapis/genai-toolbox/commit/8a96fb1a8874baa3688e566f3dea8a0912fcf2df))
* **tests/postgres:** Restore list_schemas test and implement dynamic owner ([#2521](https://github.com/googleapis/genai-toolbox/issues/2521)) ([7041e79](https://github.com/googleapis/genai-toolbox/commit/7041e797347f337d6f7f44ca051ae31acd58babe))
* **tests:** Resolve LlamaIndex dependency conflict in JS quickstart ([#2597](https://github.com/googleapis/genai-toolbox/issues/2597)) ([ac11f5a](https://github.com/googleapis/genai-toolbox/commit/ac11f5af9c7bcf228d667e1b8e08b5dc49ad91a0))
## [0.27.0](https://github.com/googleapis/genai-toolbox/compare/v0.26.0...v0.27.0) (2026-02-12)
### ⚠ BREAKING CHANGES
* Update configuration file v2 ([#2369](https://github.com/googleapis/genai-toolbox/issues/2369))([293c1d6](https://github.com/googleapis/genai-toolbox/commit/293c1d6889c39807855ba5e01d4c13ba2a4c50ce))
* Update/add detailed telemetry for mcp endpoint compliant with OTEL semantic convention ([#1987](https://github.com/googleapis/genai-toolbox/issues/1987)) ([478a0bd](https://github.com/googleapis/genai-toolbox/commit/478a0bdb59288c1213f83862f95a698b4c2c0aab))
### Features
* **cli/invoke:** Add support for direct tool invocation from CLI ([#2353](https://github.com/googleapis/genai-toolbox/issues/2353)) ([6e49ba4](https://github.com/googleapis/genai-toolbox/commit/6e49ba436ef2390c13feaf902b29f5907acffb57))
* **cli/skills:** Add support for generating agent skills from toolset ([#2392](https://github.com/googleapis/genai-toolbox/issues/2392)) ([80ef346](https://github.com/googleapis/genai-toolbox/commit/80ef34621453b77bdf6a6016c354f102a17ada04))
* **cloud-logging-admin:** Add source, tools, integration test and docs ([#2137](https://github.com/googleapis/genai-toolbox/issues/2137)) ([252fc30](https://github.com/googleapis/genai-toolbox/commit/252fc3091af10d25d8d7af7e047b5ac87a5dd041))
* **cockroachdb:** Add CockroachDB integration with cockroach-go ([#2006](https://github.com/googleapis/genai-toolbox/issues/2006)) ([1fdd99a](https://github.com/googleapis/genai-toolbox/commit/1fdd99a9b609a5e906acce414226ff44d75d5975))
* **prebuiltconfigs/alloydb-omni:** Implement Alloydb omni dataplane tools ([#2340](https://github.com/googleapis/genai-toolbox/issues/2340)) ([e995349](https://github.com/googleapis/genai-toolbox/commit/e995349ea0756c700d188b8f04e9459121219f0c))
* **server:** Add Tool call error categories ([#2387](https://github.com/googleapis/genai-toolbox/issues/2387)) ([32cb4db](https://github.com/googleapis/genai-toolbox/commit/32cb4db712d27579c1bf29e61cbd0bed02286c28))
* **tools/looker:** support `looker-validate-project` tool ([#2430](https://github.com/googleapis/genai-toolbox/issues/2430)) ([a15a128](https://github.com/googleapis/genai-toolbox/commit/a15a12873f936b0102aeb9500cc3bcd71bb38c34))
### Bug Fixes
* **dataplex:** Capture GCP HTTP errors in MCP Toolbox ([#2347](https://github.com/googleapis/genai-toolbox/issues/2347)) ([1d7c498](https://github.com/googleapis/genai-toolbox/commit/1d7c4981164c34b4d7bc8edecfd449f57ad11e15))
* **sources/cockroachdb:** Update kind to type ([#2465](https://github.com/googleapis/genai-toolbox/issues/2465)) ([2d341ac](https://github.com/googleapis/genai-toolbox/commit/2d341acaa61c3c1fe908fceee8afbd90fb646d3a))
* Surface Dataplex API errors in MCP results ([#2347](https://github.com/googleapis/genai-toolbox/pull/2347))([1d7c498](https://github.com/googleapis/genai-toolbox/commit/1d7c4981164c34b4d7bc8edecfd449f57ad11e15))
## [0.26.0](https://github.com/googleapis/genai-toolbox/compare/v0.25.0...v0.26.0) (2026-01-22)
### ⚠ BREAKING CHANGES
* Validate tool naming ([#2305](https://github.com/googleapis/genai-toolbox/issues/2305)) ([5054212](https://github.com/googleapis/genai-toolbox/commit/5054212fa43017207fe83275d27b9fbab96e8ab5))
* **tools/cloudgda:** Update description and parameter name for cloudgda tool ([#2288](https://github.com/googleapis/genai-toolbox/issues/2288)) ([6b02591](https://github.com/googleapis/genai-toolbox/commit/6b025917032394a66840488259db8ff2c3063016))
### Features
* Add new `user-agent-metadata` flag ([#2302](https://github.com/googleapis/genai-toolbox/issues/2302)) ([adc9589](https://github.com/googleapis/genai-toolbox/commit/adc9589766904d9e3cbe0a6399222f8d4bb9d0cc))
* Add remaining flag to Toolbox server in MCP registry ([#2272](https://github.com/googleapis/genai-toolbox/issues/2272)) ([5e0999e](https://github.com/googleapis/genai-toolbox/commit/5e0999ebf5cdd9046e96857738254b2e0561b6d2))
* **embeddingModel:** Add embedding model to MCP handler ([#2310](https://github.com/googleapis/genai-toolbox/issues/2310)) ([e4f60e5](https://github.com/googleapis/genai-toolbox/commit/e4f60e56335b755ef55b9553d3f40b31858ec8d9))
* **sources/bigquery:** Make maximum rows returned from queries configurable ([#2262](https://github.com/googleapis/genai-toolbox/issues/2262)) ([4abf0c3](https://github.com/googleapis/genai-toolbox/commit/4abf0c39e717d53b22cc61efb65e09928c598236))
* **prebuilt/cloud-sql:** Add create backup tool for Cloud SQL ([#2141](https://github.com/googleapis/genai-toolbox/issues/2141)) ([8e0fb03](https://github.com/googleapis/genai-toolbox/commit/8e0fb0348315a80f63cb47b3c7204869482448f4))
* **prebuilt/cloud-sql:** Add restore backup tool for Cloud SQL ([#2171](https://github.com/googleapis/genai-toolbox/issues/2171)) ([00c3e6d](https://github.com/googleapis/genai-toolbox/commit/00c3e6d8cba54e2ab6cb271c7e6b378895df53e1))
* Support combining multiple prebuilt configurations ([#2295](https://github.com/googleapis/genai-toolbox/issues/2295)) ([e535b37](https://github.com/googleapis/genai-toolbox/commit/e535b372ea81864d644a67135a1b07e4e519b4b4))
* Support MCP specs version 2025-11-25 ([#2303](https://github.com/googleapis/genai-toolbox/issues/2303)) ([4d23a3b](https://github.com/googleapis/genai-toolbox/commit/4d23a3bbf2797b1f7fe328aeb5789e778121da23))
* **tools:** Add `valueFromParam` support to Tool config ([#2333](https://github.com/googleapis/genai-toolbox/issues/2333)) ([15101b1](https://github.com/googleapis/genai-toolbox/commit/15101b1edbe2b85a4a5f9f819c23cf83138f4ee1))
### Bug Fixes
* **tools/cloudhealthcare:** Add check for client authorization before retrieving token string ([#2327](https://github.com/googleapis/genai-toolbox/issues/2327)) ([c25a233](https://github.com/googleapis/genai-toolbox/commit/c25a2330fea2ac382a398842c9e572e4e19bcb08))
## [0.25.0](https://github.com/googleapis/genai-toolbox/compare/v0.24.0...v0.25.0) (2026-01-08)
### Features
* Add `embeddingModel` support ([#2121](https://github.com/googleapis/genai-toolbox/issues/2121)) ([9c62f31](https://github.com/googleapis/genai-toolbox/commit/9c62f313ff5edf0a3b5b8a3e996eba078fba4095))
* Add `allowed-hosts` flag ([#2254](https://github.com/googleapis/genai-toolbox/issues/2254)) ([17b41f6](https://github.com/googleapis/genai-toolbox/commit/17b41f64531b8fe417c28ada45d1992ba430dc1b))
* Add parameter default value to manifest ([#2264](https://github.com/googleapis/genai-toolbox/issues/2264)) ([9d1feca](https://github.com/googleapis/genai-toolbox/commit/9d1feca10810fa42cb4c94a409252f1bd373ee36))
* **snowflake:** Add Snowflake Source and Tools ([#858](https://github.com/googleapis/genai-toolbox/issues/858)) ([b706b5b](https://github.com/googleapis/genai-toolbox/commit/b706b5bc685aeda277f277868bae77d38d5fd7b6))
* **prebuilt/cloud-sql-mysql:** Update CSQL MySQL prebuilt tools to use IAM ([#2202](https://github.com/googleapis/genai-toolbox/issues/2202)) ([731a32e](https://github.com/googleapis/genai-toolbox/commit/731a32e5360b4d6862d81fcb27d7127c655679a8))
* **sources/bigquery:** Make credentials scope configurable ([#2210](https://github.com/googleapis/genai-toolbox/issues/2210)) ([a450600](https://github.com/googleapis/genai-toolbox/commit/a4506009b93771b77fb05ae97044f914967e67ed))
* **sources/trino:** Add ssl verification options and fix docs example ([#2155](https://github.com/googleapis/genai-toolbox/issues/2155)) ([4a4cf1e](https://github.com/googleapis/genai-toolbox/commit/4a4cf1e712b671853678dba99c4dc49dd4fc16a2))
* **tools/looker:** Add ability to set destination folder with `make_look` and `make_dashboard`. ([#2245](https://github.com/googleapis/genai-toolbox/issues/2245)) ([eb79339](https://github.com/googleapis/genai-toolbox/commit/eb793398cd1cc4006d9808ccda5dc7aea5e92bd5))
* **tools/postgressql:** Add tool to list store procedure ([#2156](https://github.com/googleapis/genai-toolbox/issues/2156)) ([cf0fc51](https://github.com/googleapis/genai-toolbox/commit/cf0fc515b57d9b84770076f3c0c5597c4597ef62))
* **tools/postgressql:** Add Parameter `embeddedBy` config support ([#2151](https://github.com/googleapis/genai-toolbox/issues/2151)) ([17b70cc](https://github.com/googleapis/genai-toolbox/commit/17b70ccaa754d15bcc33a1a3ecb7e652520fa600))
### Bug Fixes
* **server:** Add `embeddingModel` config initialization ([#2281](https://github.com/googleapis/genai-toolbox/issues/2281)) ([a779975](https://github.com/googleapis/genai-toolbox/commit/a7799757c9345f99b6d2717841fbf792d364e1a2))
* **sources/cloudgda:** Add import for cloudgda source ([#2217](https://github.com/googleapis/genai-toolbox/issues/2217)) ([7daa411](https://github.com/googleapis/genai-toolbox/commit/7daa4111f4ebfb0a35319fd67a8f7b9f0f99efcf))
* **tools/alloydb-wait-for-operation:** Fix connection message generation ([#2228](https://github.com/googleapis/genai-toolbox/issues/2228)) ([7053fbb](https://github.com/googleapis/genai-toolbox/commit/7053fbb1953653143d39a8510916ea97a91022a6))
* **tools/alloydbainl:** Only add psv when NL Config Param is defined ([#2265](https://github.com/googleapis/genai-toolbox/issues/2265)) ([ef8f3b0](https://github.com/googleapis/genai-toolbox/commit/ef8f3b02f2f38ce94a6ba9acf35d08b9469bef4e))
* **tools/looker:** Looker client OAuth nil pointer error ([#2231](https://github.com/googleapis/genai-toolbox/issues/2231)) ([268700b](https://github.com/googleapis/genai-toolbox/commit/268700bdbf8281de0318d60ca613ed3672990b20))
## [0.24.0](https://github.com/googleapis/genai-toolbox/compare/v0.23.0...v0.24.0) (2025-12-19)
### Features
* **sources/cloud-gemini-data-analytics:** Add the Gemini Data Analytics (GDA) integration for DB NL2SQL conversion to Toolbox ([#2181](https://github.com/googleapis/genai-toolbox/issues/2181)) ([aa270b2](https://github.com/googleapis/genai-toolbox/commit/aa270b2630da2e3d618db804ca95550445367dbc))
* **source/cloudsqlmysql:** Add support for IAM authentication in Cloud SQL MySQL source ([#2050](https://github.com/googleapis/genai-toolbox/issues/2050)) ([af3d3c5](https://github.com/googleapis/genai-toolbox/commit/af3d3c52044bea17781b89ce4ab71ff0f874ac20))
* **sources/oracle:** Add Oracle OCI and Wallet support ([#1945](https://github.com/googleapis/genai-toolbox/issues/1945)) ([8ea39ec](https://github.com/googleapis/genai-toolbox/commit/8ea39ec32fbbaa97939c626fec8c5d86040ed464))
* Support combining prebuilt and custom tool configurations ([#2188](https://github.com/googleapis/genai-toolbox/issues/2188)) ([5788605](https://github.com/googleapis/genai-toolbox/commit/57886058188aa5d2a51d5846a98bc6d8a650edd1))
* **tools/mysql-get-query-plan:** Add new `mysql-get-query-plan` tool for MySQL source ([#2123](https://github.com/googleapis/genai-toolbox/issues/2123)) ([0641da0](https://github.com/googleapis/genai-toolbox/commit/0641da0353857317113b2169e547ca69603ddfde))
### Bug Fixes
* **spanner:** Move list graphs validation to runtime ([#2154](https://github.com/googleapis/genai-toolbox/issues/2154)) ([914b3ee](https://github.com/googleapis/genai-toolbox/commit/914b3eefda40a650efe552d245369e007277dab5))
## [0.23.0](https://github.com/googleapis/genai-toolbox/compare/v0.22.0...v0.23.0) (2025-12-11)
### ⚠ BREAKING CHANGES
* **serverless-spark:** add URLs to create batch tool outputs
* **serverless-spark:** add URLs to list_batches output
* **serverless-spark:** add Cloud Console and Logging URLs to get_batch
* **tools/postgres:** Add additional filter params for existing postgres tools ([#2033](https://github.com/googleapis/genai-toolbox/issues/2033))
### Features
* **tools/postgres:** Add list-table-stats-tool to list table statistics. ([#2055](https://github.com/googleapis/genai-toolbox/issues/2055)) ([78b02f0](https://github.com/googleapis/genai-toolbox/commit/78b02f08c3cc3062943bb2f91cf60d5149c8d28d))
* **looker/tools:** Enhance dashboard creation with dashboard filters ([#2133](https://github.com/googleapis/genai-toolbox/issues/2133)) ([285aa46](https://github.com/googleapis/genai-toolbox/commit/285aa46b887d9acb2da8766e107bbf1ab75b8812))
* **serverless-spark:** Add Cloud Console and Logging URLs to get_batch ([e29c061](https://github.com/googleapis/genai-toolbox/commit/e29c0616d6b9ecda2badcaf7b69614e511ac031b))
* **serverless-spark:** Add URLs to create batch tool outputs ([c6ccf4b](https://github.com/googleapis/genai-toolbox/commit/c6ccf4bd87026484143a2d0f5527b2edab03b54a))
* **serverless-spark:** Add URLs to list_batches output ([5605eab](https://github.com/googleapis/genai-toolbox/commit/5605eabd696696ade07f52431a28ef65c0fb1f77))
* **sources/mariadb:** Add MariaDB source and MySQL tools integration ([#1908](https://github.com/googleapis/genai-toolbox/issues/1908)) ([3b40fea](https://github.com/googleapis/genai-toolbox/commit/3b40fea25edae607e02c1e8fc2b0c957fa2c8e9a))
* **tools/postgres:** Add additional filter params for existing postgres tools ([#2033](https://github.com/googleapis/genai-toolbox/issues/2033)) ([489117d](https://github.com/googleapis/genai-toolbox/commit/489117d74711ac9260e7547163ca463eb45eeaa2))
* **tools/postgres:** Add list_pg_settings, list_database_stats tools for postgres ([#2030](https://github.com/googleapis/genai-toolbox/issues/2030)) ([32367a4](https://github.com/googleapis/genai-toolbox/commit/32367a472fae9653fed7f126428eba0252978bd5))
* **tools/postgres:** Add new postgres-list-roles tool ([#2038](https://github.com/googleapis/genai-toolbox/issues/2038)) ([bea9705](https://github.com/googleapis/genai-toolbox/commit/bea97054502cfa236aa10e2ebc8ff58eb00ad035))
### Bug Fixes
* List tables tools null fix ([#2107](https://github.com/googleapis/genai-toolbox/issues/2107)) ([2b45266](https://github.com/googleapis/genai-toolbox/commit/2b452665983154041d4cd0ed7d82532e4af682eb))
* **tools/mongodb:** Removed sortPayload and sortParams ([#1238](https://github.com/googleapis/genai-toolbox/issues/1238)) ([c5a6daa](https://github.com/googleapis/genai-toolbox/commit/c5a6daa7683d2f9be654300d977692c368e55e31))
### Miscellaneous Chores
* **looker:** Upgrade to latest go sdk ([#2159](https://github.com/googleapis/genai-toolbox/issues/2159)) ([78e015d](https://github.com/googleapis/genai-toolbox/commit/78e015d7dfd9cce7e2b444ed934da17eb355bc86))
## [0.22.0](https://github.com/googleapis/genai-toolbox/compare/v0.21.0...v0.22.0) (2025-12-04)
### Features
* **tools/postgres:** Add allowed-origins flag ([#1984](https://github.com/googleapis/genai-toolbox/issues/1984)) ([862868f](https://github.com/googleapis/genai-toolbox/commit/862868f28476ea981575ce412faa7d6a03138f31))
* **tools/postgres:** Add list-query-stats and get-column-cardinality functions ([#1976](https://github.com/googleapis/genai-toolbox/issues/1976)) ([9f76026](https://github.com/googleapis/genai-toolbox/commit/9f760269253a8cc92a357e995c6993ccc4a0fb7b))
* **tools/spanner:** Add spanner list graphs to prebuiltconfigs ([#2056](https://github.com/googleapis/genai-toolbox/issues/2056)) ([0e7fbf4](https://github.com/googleapis/genai-toolbox/commit/0e7fbf465c488397aa9d8cab2e55165fff4eb53c))
* **prebuilt/cloud-sql:** Add clone instance tool for cloud sql ([#1845](https://github.com/googleapis/genai-toolbox/issues/1845)) ([5e43630](https://github.com/googleapis/genai-toolbox/commit/5e43630907aa2d7bc6818142483a33272eab060b))
* **serverless-spark:** Add create_pyspark_batch tool ([1bf0b51](https://github.com/googleapis/genai-toolbox/commit/1bf0b51f033c956790be1577bf5310d0b17e9c12))
* **serverless-spark:** Add create_spark_batch tool ([17a9792](https://github.com/googleapis/genai-toolbox/commit/17a979207dbc4fe70acd0ebda164d1a8d34c1ed3))
* Support alternate accessToken header name ([#1968](https://github.com/googleapis/genai-toolbox/issues/1968)) ([18017d6](https://github.com/googleapis/genai-toolbox/commit/18017d6545335a6fc1c472617101c35254d9a597))
* Support for annotations ([#2007](https://github.com/googleapis/genai-toolbox/issues/2007)) ([ac21335](https://github.com/googleapis/genai-toolbox/commit/ac21335f4e88ca52d954d7f8143a551a35661b94))
* **tool/mssql:** Set default host and port for MSSQL source ([#1943](https://github.com/googleapis/genai-toolbox/issues/1943)) ([7a9cc63](https://github.com/googleapis/genai-toolbox/commit/7a9cc633768d9ae9a7ff8230002da69d6a36ca86))
* **tools/cloudsqlpg:** Add CloudSQL PostgreSQL pre-check tool ([#1722](https://github.com/googleapis/genai-toolbox/issues/1722)) ([8752e05](https://github.com/googleapis/genai-toolbox/commit/8752e05ab6e98812d95673a6f1ff67e9a6ae48d2))
* **tools/postgres-list-publication-tables:** Add new postgres-list-publication-tables tool ([#1919](https://github.com/googleapis/genai-toolbox/issues/1919)) ([f4b1f0a](https://github.com/googleapis/genai-toolbox/commit/f4b1f0a68000ca2fc0325f55a1905705417c38a2))
* **tools/postgres-list-tablespaces:** Add new postgres-list-tablespaces tool ([#1934](https://github.com/googleapis/genai-toolbox/issues/1934)) ([5ad7c61](https://github.com/googleapis/genai-toolbox/commit/5ad7c6127b3e47504fc4afda0b7f3de1dff78b8b))
* **tools/spanner-list-graph:** Tool impl + docs + tests ([#1923](https://github.com/googleapis/genai-toolbox/issues/1923)) ([a0f44d3](https://github.com/googleapis/genai-toolbox/commit/a0f44d34ea3f044dd08501be616f70ddfd63ab45))
### Bug Fixes
* Add import for firebirdsql ([#2045](https://github.com/googleapis/genai-toolbox/issues/2045)) ([fb7aae9](https://github.com/googleapis/genai-toolbox/commit/fb7aae9d35b760d3471d8379642f835a0d84ec41))
* Correct FAQ to mention HTTP tools ([#2036](https://github.com/googleapis/genai-toolbox/issues/2036)) ([7b44237](https://github.com/googleapis/genai-toolbox/commit/7b44237d4a21bfbf8d3cebe4d32a15affa29584d))
* Format BigQuery numeric output as decimal strings ([#2084](https://github.com/googleapis/genai-toolbox/issues/2084)) ([155bff8](https://github.com/googleapis/genai-toolbox/commit/155bff80c1da4fae1e169e425fd82e1dc3373041))
* Set default annotations for tools in code if annotation not provided in yaml ([#2049](https://github.com/googleapis/genai-toolbox/issues/2049)) ([565460c](https://github.com/googleapis/genai-toolbox/commit/565460c4ea8953dbe80070a8e469f957c0f7a70c))
* **tools/alloydb-postgres-list-tables:** Exclude google_ml schema from list_tables ([#2046](https://github.com/googleapis/genai-toolbox/issues/2046)) ([a03984c](https://github.com/googleapis/genai-toolbox/commit/a03984cc15254c928f30085f8fa509ded6a79a0c))
* **tools/alloydbcreateuser:** Remove duplication of project praram ([#2028](https://github.com/googleapis/genai-toolbox/issues/2028)) ([730ac6d](https://github.com/googleapis/genai-toolbox/commit/730ac6d22805fd50b4a675b74c1865f4e7689e7c))
* **tools/mongodb:** Remove `required` tag from the `canonical` field ([#2099](https://github.com/googleapis/genai-toolbox/issues/2099)) ([744214e](https://github.com/googleapis/genai-toolbox/commit/744214e04cd12b11d166e6eb7da8ce4714904abc))
## [0.21.0](https://github.com/googleapis/genai-toolbox/compare/v0.20.0...v0.21.0) (2025-11-19)
### ⚠ BREAKING CHANGES
* **tools/spanner-list-tables:** Unmarshal `object_details` json string into map to make response have nested json ([#1894](https://github.com/googleapis/genai-toolbox/issues/1894)) ([446d62a](https://github.com/googleapis/genai-toolbox/commit/446d62acd995d5128f52e9db254dd1c7138227c6))
### Features
* **tools/postgres:** Add `long_running_transactions`, `list_locks` and `replication_stats` tools ([#1751](https://github.com/googleapis/genai-toolbox/issues/1751)) ([5abad5d](https://github.com/googleapis/genai-toolbox/commit/5abad5d56c6cc5ba86adc5253b948bf8230fa830))
### Bug Fixes
* **tools/alloydbgetinstance:** Remove parameter duplication ([#1993](https://github.com/googleapis/genai-toolbox/issues/1993)) ([0e269a1](https://github.com/googleapis/genai-toolbox/commit/0e269a1d125eed16a51ead27db4398e6e48cb948))
* **tools:** Check for query execution error for pgxpool.Pool ([#1969](https://github.com/googleapis/genai-toolbox/issues/1969)) ([2bff138](https://github.com/googleapis/genai-toolbox/commit/2bff1384a3570ef46bc03ebebc507923af261987))
## [0.20.0](https://github.com/googleapis/genai-toolbox/compare/v0.19.1...v0.20.0) (2025-11-14)
### Features
* Added prompt support for toolbox ([#1798](https://github.com/googleapis/genai-toolbox/issues/1798)) ([cd56ea4](https://github.com/googleapis/genai-toolbox/commit/cd56ea44fbdd149fcb92324e70ee36ac747635db))
* **source/alloydb, source/cloud-sql-postgres,source/cloud-sql-mysql,source/cloud-sql-mssql:** Use project from env for alloydb and cloud sql control plane tools ([#1588](https://github.com/googleapis/genai-toolbox/issues/1588)) ([12bdd95](https://github.com/googleapis/genai-toolbox/commit/12bdd954597e49d3ec6b247cc104584c5a4d1943))
* **source/mysql:** Set default host and port for MySQL source ([#1922](https://github.com/googleapis/genai-toolbox/issues/1922)) ([2c228ef](https://github.com/googleapis/genai-toolbox/commit/2c228ef4f2d4cb8dfc41e845466bfe3566d141a1))
* **source/Postgresql:** Set default host and port for Postgresql source ([#1927](https://github.com/googleapis/genai-toolbox/issues/1927)) ([7e6e88a](https://github.com/googleapis/genai-toolbox/commit/7e6e88a21f2b9b60e0d645cdde33a95892d31a04))
* **tool/looker-generate-embed-url:** Adding generate embed url tool ([#1877](https://github.com/googleapis/genai-toolbox/issues/1877)) ([ef63860](https://github.com/googleapis/genai-toolbox/commit/ef63860559798fbad54c1051d9f53bce42d66464))
* **tools/postgres:** Add `list_triggers`, `database_overview` tools for postgres ([#1912](https://github.com/googleapis/genai-toolbox/issues/1912)) ([a4c9287](https://github.com/googleapis/genai-toolbox/commit/a4c9287aecf848faa98d973a9ce5b13fa309a58e))
* **tools/postgres:** Add list_indexes, list_sequences tools for postgres ([#1765](https://github.com/googleapis/genai-toolbox/issues/1765)) ([897c63d](https://github.com/googleapis/genai-toolbox/commit/897c63dcea43226262d2062088c59f2d1068fca7))
## [0.19.1](https://github.com/googleapis/genai-toolbox/compare/v0.18.0...v0.19.1) (2025-11-07)
### ⚠ BREAKING CHANGES
* **tools/alloydbainl:** update AlloyDB AI NL statement order ([#1753](https://github.com/googleapis/genai-toolbox/issues/1753))
* **tools/bigquery-analyze-contribution:** Add allowed dataset support ([#1675](https://github.com/googleapis/genai-toolbox/issues/1675)) ([ef28e39](https://github.com/googleapis/genai-toolbox/commit/ef28e39e90b21287ca8e69b99f4e792c78e9d31f))
* **tools/bigquery-get-dataset-info:** add allowed dataset support ([#1654](https://github.com/googleapis/genai-toolbox/issues/1654))
### Features
* Support `excludeValues` for parameters ([#1818](https://github.com/googleapis/genai-toolbox/issues/1818)) ([a8e98dc](https://github.com/googleapis/genai-toolbox/commit/a8e98dc99d208e8b37a3bc4d1ab4749b5154ed36))
* **elasticsearch:** Add Elasticsearch source and tools ([#1109](https://github.com/googleapis/genai-toolbox/issues/1109)) ([5367285](https://github.com/googleapis/genai-toolbox/commit/5367285e91ddda99ae7261d8ed4b025f975d1453))
* **mindsdb:** Add MindsDB Source and Tools ([#878](https://github.com/googleapis/genai-toolbox/issues/878)) ([1b2cca9](https://github.com/googleapis/genai-toolbox/commit/1b2cca9faa6f7bacbeb5d25634ce3bf61067de16))
* **cloud-healthcare:** Add support for healthcare source, tool and prebuilt config ([#1853](https://github.com/googleapis/genai-toolbox/issues/1853)) ([1f833fb](https://github.com/googleapis/genai-toolbox/commit/1f833fb1a124e23819ddfec476f2e63ef31dd22f))
* **singlestore:** Add SingleStore Source and Tools ([#1333](https://github.com/googleapis/genai-toolbox/issues/1333)) ([40b9dba](https://github.com/googleapis/genai-toolbox/commit/40b9dbab088add05a66995abb1c71a9345b8f7e5))
* **source/bigquery:** Add client cache for user-passed credentials ([#1119](https://github.com/googleapis/genai-toolbox/issues/1119)) ([cf7012a](https://github.com/googleapis/genai-toolbox/commit/cf7012a82bb5c77309da3a26e563a5015786aa69))
* **source/bigquery:** Add service account impersonation support for bigquery ([#1641](https://github.com/googleapis/genai-toolbox/issues/1641)) ([e09d182](https://github.com/googleapis/genai-toolbox/commit/e09d182f88bf697a169428f477aebc9f1741e35f))
* **tools/bigquery-analyze-contribution:** Add allowed dataset support ([#1675](https://github.com/googleapis/genai-toolbox/issues/1675)) ([ef28e39](https://github.com/googleapis/genai-toolbox/commit/ef28e39e90b21287ca8e69b99f4e792c78e9d31f))
* **tools/bigquery-get-dataset-info:** Add allowed dataset support ([#1654](https://github.com/googleapis/genai-toolbox/issues/1654)) ([a2006ad](https://github.com/googleapis/genai-toolbox/commit/a2006ad57718ebad3de5c6850e9c6a5a763808ec))
* **tools/looker-run-dashboard:** New `run_dashboard` tool ([#1858](https://github.com/googleapis/genai-toolbox/issues/1858)) ([30857c2](https://github.com/googleapis/genai-toolbox/commit/30857c2294bb14961d3be49e2c368c69ecf844ec))
* **tools/looker-run-look:** Modify run_look to show query origin ([#1860](https://github.com/googleapis/genai-toolbox/issues/1860)) ([991e539](https://github.com/googleapis/genai-toolbox/commit/991e539f9c7978fa618ada3179a0b656c33ff501))
* **tools/looker:** Tools to retrieve the connections, schemas, databases, and column metadata from a looker system. ([#1804](https://github.com/googleapis/genai-toolbox/issues/1804)) ([d7d1b03](https://github.com/googleapis/genai-toolbox/commit/d7d1b03f3b746ed748d67f67e833457d55c846ab))
* **tools/mongodb:** Make MongoDB tools' `filterParams` field optional ([#1614](https://github.com/googleapis/genai-toolbox/issues/1614)) ([208ab92](https://github.com/googleapis/genai-toolbox/commit/208ab92eb377b538a99330a415ecc18790b077b7))
* **tools/neo4j-execute-cypher:** Add dry_run parameter to validate Cypher queries ([#1769](https://github.com/googleapis/genai-toolbox/issues/1769)) ([f475da6](https://github.com/googleapis/genai-toolbox/commit/f475da63ce1b65387b503ac497eca47635452723))
* **tools/postgres-list-schemas:** Add new postgres-list-schemas tool ([#1741](https://github.com/googleapis/genai-toolbox/issues/1741)) ([1a19cac](https://github.com/googleapis/genai-toolbox/commit/1a19cac7cd89ed70291eb55e190370fe7b2c1aba))
* **tools/postgres-list-views:** Add new postgres-list-views tool ([#1709](https://github.com/googleapis/genai-toolbox/issues/1709)) ([e8c7fe0](https://github.com/googleapis/genai-toolbox/commit/e8c7fe0994fedcb9be78d461fab3c98cc6bd86b2))
* **tools/serverless-spark:** Add cancel-batch tool ([#1827](https://github.com/googleapis/genai-toolbox/pull/1827))([2881683](https://github.com/googleapis/genai-toolbox/commit/28816832265250de97d84e6ba38bf6c35e040796))
* **tools/serverless-spark:** Add get_batch tool ([#1783](https://github.com/googleapis/genai-toolbox/pull/1783))([7ad1072](https://github.com/googleapis/genai-toolbox/commit/7ad10720b4638324cd77d8aa410cbd1ccf0cc93f))
* **tools/serverless-spark:** Add serverless-spark source with list_batches tool ([#1690](https://github.com/googleapis/genai-toolbox/pull/1690))([816dbce](https://github.com/googleapis/genai-toolbox/commit/816dbce268392046e54767732bd31488c6e89bdb))
### Bug Fixes
* Bigquery execute_sql to assign values to array ([#1884](https://github.com/googleapis/genai-toolbox/issues/1884)) ([559e2a2](https://github.com/googleapis/genai-toolbox/commit/559e2a22e0db20bb947702e13140ce869b5865a7))
* **cloudmonitoring:** Populate `authRequired` in tool manifest ([#1800](https://github.com/googleapis/genai-toolbox/issues/1800)) ([954152c](https://github.com/googleapis/genai-toolbox/commit/954152c7928bf0da9be221e011e32f74bc4cebbc))
* Update debug logs statements ([#1828](https://github.com/googleapis/genai-toolbox/issues/1828)) ([3cff915](https://github.com/googleapis/genai-toolbox/commit/3cff915e22c3a5e4e296607f83ae6409b896c9a9))
* Instructions to quote filters that include commas ([#1794](https://github.com/googleapis/genai-toolbox/issues/1794)) ([4b01720](https://github.com/googleapis/genai-toolbox/commit/4b0172083c0dd4c71098d4e0ab5fa0b16ea0d830))
* **source/cloud-sql-mssql:** Remove `ipAddress` field ([#1822](https://github.com/googleapis/genai-toolbox/issues/1822)) ([38d535d](https://github.com/googleapis/genai-toolbox/commit/38d535de34cfedd6828a01d9dcd25daf1bad7306))
* **tools/alloydbainl:** AlloyDB AI NL execute_sql statement order ([#1753](https://github.com/googleapis/genai-toolbox/issues/1753)) ([9723cad](https://github.com/googleapis/genai-toolbox/commit/9723cadaa181a76d8fdda65a6254f6c887c3cf57))
* **tools/postgres-execute-sql:** Do not ignore SQL failure ([#1829](https://github.com/googleapis/genai-toolbox/issues/1829)) ([8984287](https://github.com/googleapis/genai-toolbox/commit/898428759c2a1a384bea8939605cf0914d129bec))
## [0.18.0](https://github.com/googleapis/genai-toolbox/compare/v0.17.0...v0.18.0) (2025-10-23)
### Features
* Support `allowedValues`, `escape`, `minValue` and `maxValue` for parameters ([#1770](https://github.com/googleapis/genai-toolbox/issues/1770)) ([eaf7740](https://github.com/googleapis/genai-toolbox/commit/eaf77406fd386c12315d67eb685dc69e0415c516))
* **tools/looker:** Tools to allow the agent to retrieve, create, modify, and delete LookML project files. ([#1673](https://github.com/googleapis/genai-toolbox/issues/1673)) ([089081f](https://github.com/googleapis/genai-toolbox/commit/089081feb0e32f9eb65d00df5987392d413a4081))
### Bug Fixes
* **sources/mysql:** Escape mysql user agent ([#1707](https://github.com/googleapis/genai-toolbox/issues/1707)) ([eeb694c](https://github.com/googleapis/genai-toolbox/commit/eeb694c20facc40a38bfa67073c4cb1f3dd657ff))
* **sources/mysql:** Escape program_name for MySQL ([#1717](https://github.com/googleapis/genai-toolbox/issues/1717)) ([02f7f8a](https://github.com/googleapis/genai-toolbox/commit/02f7f8af979057efe99fd138cb1b958130355b68))
* **tools/http:** Allow 2xx status code on tool invocation ([#1761](https://github.com/googleapis/genai-toolbox/issues/1761)) ([a06d0d8](https://github.com/googleapis/genai-toolbox/commit/a06d0d8735fbec29bea97457248845a8c6b4aa3c))
* **tools/http:** Omit optional nil query parameters ([#1762](https://github.com/googleapis/genai-toolbox/issues/1762)) ([bd16ba3](https://github.com/googleapis/genai-toolbox/commit/bd16ba3921e6177065780e5f29870859b8e18e4f))
* **tools/looker:** Looker file content calls should not use url.QueryEscape ([#1758](https://github.com/googleapis/genai-toolbox/issues/1758)) ([336de1b](https://github.com/googleapis/genai-toolbox/commit/336de1bd04b869d322c0fd1f4667eb652159d791))
## [0.17.0](https://github.com/googleapis/genai-toolbox/compare/v0.16.0...v0.17.0) (2025-10-10)
### ⚠ BREAKING CHANGES
* **tools/bigquery-get-table-info:** add allowed dataset support ([#1093](https://github.com/googleapis/genai-toolbox/issues/1093))
* **tools/bigquery-list-dataset-ids:** add allowed datasets support ([#1573](https://github.com/googleapis/genai-toolbox/issues/1573))
### Features
* Add configs and workflows for docs versioning ([#1611](https://github.com/googleapis/genai-toolbox/issues/1611)) ([21ac98b](https://github.com/googleapis/genai-toolbox/commit/21ac98bc065e95bde911d66185c67d8380891bf8))
* Add metadata in MCP Manifest for Toolbox auth ([#1395](https://github.com/googleapis/genai-toolbox/issues/1395)) ([0b3dac4](https://github.com/googleapis/genai-toolbox/commit/0b3dac41322f7aaa5a19df571686fa8fd4338ca5))
* Add program name to MySQL connections ([#1617](https://github.com/googleapis/genai-toolbox/issues/1617)) ([c4a22b8](https://github.com/googleapis/genai-toolbox/commit/c4a22b8d3bd0307325215ebd2f30ba37927cd37e))
* **source/bigquery:** Add optional write mode config ([#1157](https://github.com/googleapis/genai-toolbox/issues/1157)) ([63adc78](https://github.com/googleapis/genai-toolbox/commit/63adc78beae949dfe5e300c50e5ceef073e9652c))
* **sources/alloydb,cloudsqlpg,cloudsqlmysql,cloudsqlmssql:** Support PSC connection ([#1686](https://github.com/googleapis/genai-toolbox/issues/1686)) ([9d2bf79](https://github.com/googleapis/genai-toolbox/commit/9d2bf79becfda104ef77f34b8d4b22cbedbc4bf3))
* **sources/mssql:** Add app name to MSSQL ([#1620](https://github.com/googleapis/genai-toolbox/issues/1620)) ([1536d1f](https://github.com/googleapis/genai-toolbox/commit/1536d1fdabb9d7f73dbdeebeb05a83d9a3b78e1c))
* **sources/oracle:** Add Oracle Source and Tool ([#1456](https://github.com/googleapis/genai-toolbox/issues/1456)) ([3a19a50](https://github.com/googleapis/genai-toolbox/commit/3a19a50ff211e33429de1d05338d353359a52987))
* **sources/oracle:** Switch Oracle driver from godror to go-ora ([#1685](https://github.com/googleapis/genai-toolbox/issues/1685)) ([8faf376](https://github.com/googleapis/genai-toolbox/commit/8faf37667e371b4ed88ebb892e8784b67611ba64))
* **tool/bigquery-list-dataset-ids:** Add allowed datasets support ([#1573](https://github.com/googleapis/genai-toolbox/issues/1573)) ([1a44c67](https://github.com/googleapis/genai-toolbox/commit/1a44c671ec593e764a2d2f67f70a98ceec20a168))
* **tools/bigquery-get-table-info:** Add allowed dataset support ([#1093](https://github.com/googleapis/genai-toolbox/issues/1093)) ([acb205c](https://github.com/googleapis/genai-toolbox/commit/acb205ca4761d59ce97b804827230978c8c98ede))
* **tools/dataform:** Add dataform compile tool ([#1470](https://github.com/googleapis/genai-toolbox/issues/1470)) ([3be9b7b](https://github.com/googleapis/genai-toolbox/commit/3be9b7b3bdf112fe7303706e56e9f39935cde661))
* **tools/looker:** Add support for pulse, vacuum and analyze audit and performance functions on a Looker instance ([#1581](https://github.com/googleapis/genai-toolbox/issues/1581)) ([5aed4e1](https://github.com/googleapis/genai-toolbox/commit/5aed4e136d0091731d2ded10ec076ee789e1987c))
* **tools/looker:** Enable access to the Conversational Analytics API for Looker ([#1596](https://github.com/googleapis/genai-toolbox/issues/1596)) ([2d5a93e](https://github.com/googleapis/genai-toolbox/commit/2d5a93e312990c8a9f3170c7e9c655f97cf11712))
### Bug Fixes
* Added google_ml_integration extension to use alloydb ai-nl support api ([#1445](https://github.com/googleapis/genai-toolbox/issues/1445)) ([dbc477a](https://github.com/googleapis/genai-toolbox/commit/dbc477ab0f832495cf51f73ea16ae363472d6eed))
* Fix broken links ([#1625](https://github.com/googleapis/genai-toolbox/issues/1625)) ([36c6584](https://github.com/googleapis/genai-toolbox/commit/36c658472ccdeb6cddd8a4452a8b3438aeb0a744))
* Remove duplicated build type in Dockerfile ([#1598](https://github.com/googleapis/genai-toolbox/issues/1598)) ([b43c945](https://github.com/googleapis/genai-toolbox/commit/b43c94575d86aa65b0528d59f9b41d30b439fee5))
* **source/bigquery:** Allowed datasets project id issue with client oauth ([#1663](https://github.com/googleapis/genai-toolbox/issues/1663)) ([f4cf486](https://github.com/googleapis/genai-toolbox/commit/f4cf486fa929299fef076cf71689776e5dec19c1))
* **sources/looker:** Allow Looker to be configured without setting a Client Id or Secret ([#1496](https://github.com/googleapis/genai-toolbox/issues/1496)) ([67d8221](https://github.com/googleapis/genai-toolbox/commit/67d8221a2e780df54a81f0f7e8f7e41e4bf1a82e))
* **tools/looker:** Refactor run-inline-query logic to helper function ([#1497](https://github.com/googleapis/genai-toolbox/issues/1497)) ([62af39d](https://github.com/googleapis/genai-toolbox/commit/62af39d751443eb758586663969b162c868a233f))
* **tools/mysql-list-tables:** Update sql query to resolve subquery scope error ([#1629](https://github.com/googleapis/genai-toolbox/issues/1629)) ([94e19d8](https://github.com/googleapis/genai-toolbox/commit/94e19d87e54e831b80eb766572e48bc7370305d8))
## [0.16.0](https://github.com/googleapis/genai-toolbox/compare/v0.15.0...v0.16.0) (2025-09-25)
### ⚠ BREAKING CHANGES
* **tool/bigquery-execute-sql:** add allowed datasets support ([#1443](https://github.com/googleapis/genai-toolbox/issues/1443))
* **tool/bigquery-forecast:** add allowed datasets support ([#1412](https://github.com/googleapis/genai-toolbox/issues/1412))
### Features
* **cassandra:** Add Cassandra Source and Tool ([#1012](https://github.com/googleapis/genai-toolbox/issues/1012)) ([6e42053](https://github.com/googleapis/genai-toolbox/commit/6e420534ee894da4a8d226acb6cdb63d0d5d9ce5))
* **sources/postgres:** Add application_name ([#1504](https://github.com/googleapis/genai-toolbox/issues/1504)) ([72a2366](https://github.com/googleapis/genai-toolbox/commit/72a2366b28870aa6f81c4f890f4770ec5ecffdba))
* **tool/bigquery-execute-sql:** Add allowed datasets support ([#1443](https://github.com/googleapis/genai-toolbox/issues/1443)) ([9501ebb](https://github.com/googleapis/genai-toolbox/commit/9501ebbdbcba871b98663185c690308dda1729b5))
* **tool/bigquery-forecast:** Add allowed datasets support ([#1412](https://github.com/googleapis/genai-toolbox/issues/1412)) ([88bac7e](https://github.com/googleapis/genai-toolbox/commit/88bac7e36f5ebb6ad18773bff30b85ef678431e7))
* **tools/clickhouse-list-tables:** Add list-tables tool ([#1446](https://github.com/googleapis/genai-toolbox/issues/1446)) ([69a3caf](https://github.com/googleapis/genai-toolbox/commit/69a3cafabec5a40e2776d71de3587c0d16c722a2))
### Bug Fixes
* **tool/mongodb-find:** Fix find tool `limit` field ([#1570](https://github.com/googleapis/genai-toolbox/issues/1570)) ([4166bf7](https://github.com/googleapis/genai-toolbox/commit/4166bf7ab85732f64b877d5f20235057df919049))
* **tools/mongodb:** Concat filter params only once in mongodb update tools ([#1545](https://github.com/googleapis/genai-toolbox/issues/1545)) ([295f9dc](https://github.com/googleapis/genai-toolbox/commit/295f9dc41a43f0a4bdbd99e465bf2be01249084e))
## [0.15.0](https://github.com/googleapis/genai-toolbox/compare/v0.14.0...v0.15.0) (2025-09-18)
### ⚠ BREAKING CHANGES
* **prebuilt:** update prebuilt tool names to use consistent guidance ([#1421](https://github.com/googleapis/genai-toolbox/issues/1421))
* **tools/alloydb-wait-for-operation:** Add `alloydb-admin` source to `alloydb-wait-for-operation` tool ([#1449](https://github.com/googleapis/genai-toolbox/issues/1449))
### Features
* Add AlloyDB admin source ([#1369](https://github.com/googleapis/genai-toolbox/issues/1369)) ([33beb71](https://github.com/googleapis/genai-toolbox/commit/33beb7187d2e0f968fc949a00c780073d1bc7cdd))
* Add Cloud monitoring source and tool ([#1311](https://github.com/googleapis/genai-toolbox/issues/1311)) ([d661f53](https://github.com/googleapis/genai-toolbox/commit/d661f5343f2ad28fbf0481db16440aec823eece6))
* Add YugabyteDB Source and Tool ([#732](https://github.com/googleapis/genai-toolbox/issues/732)) ([664711f](https://github.com/googleapis/genai-toolbox/commit/664711f4b35409bd1c57af92f625b70a0dc9a4e6))
* **prebuilt:** Update default values for prebuilt tools ([#1355](https://github.com/googleapis/genai-toolbox/issues/1355)) ([70e832b](https://github.com/googleapis/genai-toolbox/commit/70e832bd08a98a95b925e590f31c8d3f2d8b6aa0))
* **prebuilt/cloud-sql:** Add list instances tool for cloudsql ([#1310](https://github.com/googleapis/genai-toolbox/issues/1310)) ([0171228](https://github.com/googleapis/genai-toolbox/commit/01712284b480774ffa68930affae290ee2e3fcfd))
* **prebuilt/cloud-sql:** Add cloud sql create database tool. ([#1453](https://github.com/googleapis/genai-toolbox/issues/1453)) ([a1bc044](https://github.com/googleapis/genai-toolbox/commit/a1bc04477b0f822ffaab039098682f1776b8a472))
* **prebuilt/cloud-sql:** Add `cloud-sql-get-instances` tool ([#1383](https://github.com/googleapis/genai-toolbox/issues/1383)) ([77919c7](https://github.com/googleapis/genai-toolbox/commit/77919c7d8e4aac16eeb703c0cc61ca774dc4f94e))
* **prebuilt/cloud-sql:** Add create user tool for cloud sql ([#1406](https://github.com/googleapis/genai-toolbox/issues/1406)) ([3a6b517](https://github.com/googleapis/genai-toolbox/commit/3a6b51752f077b225b8c2e2e7308a69a68eec3c0))
* **prebuilt/cloud-sql:** Add list databases tool for cloud sql ([#1454](https://github.com/googleapis/genai-toolbox/issues/1454)) ([e6a6c61](https://github.com/googleapis/genai-toolbox/commit/e6a6c615d5480e8930ad173d44d243f5bd99eebc))
* **prebuilt/cloud-sql:** Package cloud sql tools ([#1455](https://github.com/googleapis/genai-toolbox/issues/1455)) ([bf6266b](https://github.com/googleapis/genai-toolbox/commit/bf6266ba1131bd1c5829ac112a8c45c8a5919fea))
* **prebuilt/cloud-sql-mssql:** Add create instance tool for mssql ([#1440](https://github.com/googleapis/genai-toolbox/issues/1440)) ([b176523](https://github.com/googleapis/genai-toolbox/commit/b17652309d8a02b1f20c6c576b1617b23c8e481f))
* **prebuilt/cloud-sql-mysql:** Add create instance tool for Cloud SQL MySQL ([#1434](https://github.com/googleapis/genai-toolbox/issues/1434)) ([15b628d](https://github.com/googleapis/genai-toolbox/commit/15b628d2d2feb2ecdd418394b9265a6c77c77f6d))
* **prebuilt/cloud-sql-mysql:** Add env var support for IP Type ([#1232](https://github.com/googleapis/genai-toolbox/issues/1232)) ([#1347](https://github.com/googleapis/genai-toolbox/issues/1347)) ([0cd3f16](https://github.com/googleapis/genai-toolbox/commit/0cd3f16f877f426b45e35625ba0af03789459591))
* **prebuilt/cloudsqlpg:** Add cloud sql pg create instance tool ([#1403](https://github.com/googleapis/genai-toolbox/issues/1403)) ([d302499](https://github.com/googleapis/genai-toolbox/commit/d30249961b5a2ddc2c3809b481085d1ca034ead0))
* **prebuilt/mysql:** Add a new tool to show query plan of a given query in MySQL ([#1474](https://github.com/googleapis/genai-toolbox/issues/1474)) ([1a42e05](https://github.com/googleapis/genai-toolbox/commit/1a42e05675645ac4f1b89edef7a71ac61b637a76))
* **prebuilt/mysql:** Add `queryParams` field in MySQL prebuilt config ([#1318](https://github.com/googleapis/genai-toolbox/issues/1318)) ([4b32c2a](https://github.com/googleapis/genai-toolbox/commit/4b32c2a7701ce5ccc56d019055283e73e7046372))
* **prebuilt/neo4j:** Add prebuiltconfig support for neo4j ([#1352](https://github.com/googleapis/genai-toolbox/issues/1352)) ([f819e26](https://github.com/googleapis/genai-toolbox/commit/f819e2644315a589ec283494f244c1b8407cae59))
* **prebuilt/observability:** Add cloud sql observability tools ([#1425](https://github.com/googleapis/genai-toolbox/issues/1425)) ([236be89](https://github.com/googleapis/genai-toolbox/commit/236be89961fe423c1ec992d3d1f699f77a6e5b29))
* **prebuilt/postgres:** Add postgres prebuilt tools ([#1473](https://github.com/googleapis/genai-toolbox/issues/1473)) ([edca9dc](https://github.com/googleapis/genai-toolbox/commit/edca9dc7d772baf1a234485020fa69d76f71bfcc))
* **prebuilt/sqlite:** Prebuilt tools for the sqlite. ([#1227](https://github.com/googleapis/genai-toolbox/issues/1227)) ([681c2b4](https://github.com/googleapis/genai-toolbox/commit/681c2b4f3a65837d972c138c623c08fb6b1f1785))
* **source/alloydb-admin:** Add user agent and attach alloydb api in `alloydb-admin` source ([#1448](https://github.com/googleapis/genai-toolbox/issues/1448)) ([9710014](https://github.com/googleapis/genai-toolbox/commit/971001400f25796784f8aeb3ec5cb1a2df2e4c69))
* **source/bigquery:** Add support for datasets selection ([#1313](https://github.com/googleapis/genai-toolbox/issues/1313)) ([aa39724](https://github.com/googleapis/genai-toolbox/commit/aa3972470fd0f6f5901c5d85dd05f1e2ae973e7b))
* **source/cloud-monitoring:** Add support for user agent in cloud monitoring source ([#1472](https://github.com/googleapis/genai-toolbox/issues/1472)) ([92680b1](https://github.com/googleapis/genai-toolbox/commit/92680b18d6159300ae66f80ddb4c6bf0547d45a1))
* **source/cloud-sql-admin:** Add User agent and attach sqldmin in `cloud-sql-admin` source. ([#1441](https://github.com/googleapis/genai-toolbox/issues/1441)) ([56b6574](https://github.com/googleapis/genai-toolbox/commit/56b6574fc2c506c7c7df7f2a25686e3e4aae0e8a))
* **source/cloudsqladmin:** Add cloud sql admin source ([#1408](https://github.com/googleapis/genai-toolbox/issues/1408)) ([4f46782](https://github.com/googleapis/genai-toolbox/commit/4f4678292762507494515ce61188cd0310805c40))
* **tool/cloudsql:** Add cloud sql wait for operation tool with exponential backoff ([#1306](https://github.com/googleapis/genai-toolbox/issues/1306)) ([3aef2bb](https://github.com/googleapis/genai-toolbox/commit/3aef2bb7be8274bb4718739faeaa5f97b50dbf19))
* **tools/alloydb-create-cluster:** Add custom tool kind for AlloyDB create cluster ([#1331](https://github.com/googleapis/genai-toolbox/issues/1331)) ([76bb876](https://github.com/googleapis/genai-toolbox/commit/76bb876d546780908c1a69ef3b1a92781af28a3b))
* **tools/alloydb-create-instance:** Add new custom tool kind for AlloyDB ([#1379](https://github.com/googleapis/genai-toolbox/issues/1379)) ([091cd9a](https://github.com/googleapis/genai-toolbox/commit/091cd9aa1aabe1cb3de2ce2be5c707a8f77ad647))
* **tools/alloydb-create-user:** Add new custom tool kind for AlloyDB create user ([#1380](https://github.com/googleapis/genai-toolbox/issues/1380)) ([ab3fd26](https://github.com/googleapis/genai-toolbox/commit/ab3fd261af373dcdaf4555292c63d7095d7a02df))
* **tools/alloydb-get-cluster:** Add new tool for AlloyDB ([#1420](https://github.com/googleapis/genai-toolbox/issues/1420)) ([c181dab](https://github.com/googleapis/genai-toolbox/commit/c181dabc91bdc1c24c89a3c7bba0049d9af4cf2b))
* **tools/alloydb-get-instance:** Add new for AlloyDB ([#1435](https://github.com/googleapis/genai-toolbox/issues/1435)) ([f2d9e3b](https://github.com/googleapis/genai-toolbox/commit/f2d9e3b57963082f0db70880d5c02b1cbe3eb75d))
* **tools/alloydb-get-user:** Add new tool for AlloyDB ([#1436](https://github.com/googleapis/genai-toolbox/issues/1436)) ([677254e](https://github.com/googleapis/genai-toolbox/commit/677254e6d9c532fa9f0fb0b0e4062446640ab75f))
* **tools/alloydb-list-cluster:** Add custom tool kind for AlloyDB ([#1319](https://github.com/googleapis/genai-toolbox/issues/1319)) ([d4a9eb0](https://github.com/googleapis/genai-toolbox/commit/d4a9eb0ce217c7969aff61868e53c7dc7757d28d))
* **tools/alloydb-list-instances:** Add custom tool kind for AlloyDB ([#1357](https://github.com/googleapis/genai-toolbox/issues/1357)) ([93c1b30](https://github.com/googleapis/genai-toolbox/commit/93c1b30fced113d6721ade9fdcfb92b5ed6c0ad6))
* **tools/alloydb-list-users:** Add new custom tool kind for AlloyDB ([#1377](https://github.com/googleapis/genai-toolbox/issues/1377)) ([3a8a65c](https://github.com/googleapis/genai-toolbox/commit/3a8a65ceaa92368563e237087c4a38ed7c0d3fd5))
* **tools/bigquery-analyze-contribution:** Add analyze contribution tool ([#1223](https://github.com/googleapis/genai-toolbox/issues/1223)) ([81d239b](https://github.com/googleapis/genai-toolbox/commit/81d239b053a6978250878a6809905dcc9424909e))
* **tools/bigquery-conversational-analytics:** Add allowed datasets support ([#1411](https://github.com/googleapis/genai-toolbox/issues/1411)) ([345bd6a](https://github.com/googleapis/genai-toolbox/commit/345bd6af520bb9ce8a43834951e68fea7bbe6a02))
* **tools/bigquery-search-catalog:** Add new tool to BigQuery ([#1382](https://github.com/googleapis/genai-toolbox/issues/1382)) ([bffb39d](https://github.com/googleapis/genai-toolbox/commit/bffb39dea3cc946a1e611e3523241443b1e4f047))
* **tools/bigquery:** Add `useClientOAuth` to BigQuery prebuilt source config ([#1431](https://github.com/googleapis/genai-toolbox/issues/1431)) ([fe2999a](https://github.com/googleapis/genai-toolbox/commit/fe2999a691ac92b2bf35cb7cfd504df2f3ce84b3))
* **tools/clickhouse-list-databases:** Add `list-databases` tool to clickhouse source ([#1274](https://github.com/googleapis/genai-toolbox/issues/1274)) ([e515d92](https://github.com/googleapis/genai-toolbox/commit/e515d9254f3b8e89f89322d490eb3cedce85d2bb))
* **tools/firestore-get-rules:** Add `databaseId` to the Firestore source and `firestore-get-rules` tool ([#1505](https://github.com/googleapis/genai-toolbox/issues/1505)) ([7450482](https://github.com/googleapis/genai-toolbox/commit/7450482bb2479eab7d1c8f0d40755a8d11aa3b26))
* **tools/firestore:** Add `firestore-query` tool ([#1305](https://github.com/googleapis/genai-toolbox/issues/1305)) ([cce602f](https://github.com/googleapis/genai-toolbox/commit/cce602f28097353f6a3017cec1fa5f75283f111d))
* **tools/looker:** Query tracking for MCP Toolbox in Looker System Activity views ([#1410](https://github.com/googleapis/genai-toolbox/issues/1410)) ([2036c8e](https://github.com/googleapis/genai-toolbox/commit/2036c8efd2fb9edc26df599629d3131c6c367f4b))
* **tools/mssql-list-tables:** Add new tool for sql server ([#1433](https://github.com/googleapis/genai-toolbox/issues/1433)) ([b036047](https://github.com/googleapis/genai-toolbox/commit/b036047a21f63265c9d9637ac1a671792c9c2e80))
* **tools/mysql-list-active-queries:** Add a new tool to list ongoing queries in a MySQL instance ([#1471](https://github.com/googleapis/genai-toolbox/issues/1471)) ([ed54cd6](https://github.com/googleapis/genai-toolbox/commit/ed54cd6cfd17a3bdd84025d4eb8264763da36a98))
* **tools/mysql-list-table-fragmentation:** Add a new tool to list table fragmentation in a MySQL instance ([#1479](https://github.com/googleapis/genai-toolbox/issues/1479)) ([fe651d8](https://github.com/googleapis/genai-toolbox/commit/fe651d822f88832833e869ec049c6c084eae7e51))
* **tools/mysql-list-tables-missing-index:** Add a new tool to list tables that do not have primary or unique keys in a MySQL instance ([#1493](https://github.com/googleapis/genai-toolbox/issues/1493)) ([9eb821a](https://github.com/googleapis/genai-toolbox/commit/9eb821a6dca408ba993f904aa42b5b4f70674ba7))
* **tools/mysql-list-tables:** Add new tool for MySQL ([#1287](https://github.com/googleapis/genai-toolbox/issues/1287)) ([6c8460b](https://github.com/googleapis/genai-toolbox/commit/6c8460b0e507315d407c91ba1c821f4820cc1620))
* **tools/postgres-list-active-queries:** Add new `postgres-list-active-queries` tool ([#1400](https://github.com/googleapis/genai-toolbox/issues/1400)) ([b2b06c7](https://github.com/googleapis/genai-toolbox/commit/b2b06c72c29fd99a0c7118b85e6f7bcf6853d173))
* **tools/postgres-list-tables:** Add new tool to postgres source ([#1284](https://github.com/googleapis/genai-toolbox/issues/1284)) ([71f360d](https://github.com/googleapis/genai-toolbox/commit/71f360d31522f429a646b705ce7d1d11dac4cf68))
* **tools/spanner-list-tables:** Add new tool `spanner-list-tables` ([#1404](https://github.com/googleapis/genai-toolbox/issues/1404)) ([7d384dc](https://github.com/googleapis/genai-toolbox/commit/7d384dc28f8c37dddc2f6cefc0bbeb4c201e3167))
### Bug Fixes
* **bigquery:** Add `Bearer` parsing to auth token ([#1386](https://github.com/googleapis/genai-toolbox/issues/1386)) ([b5f9780](https://github.com/googleapis/genai-toolbox/commit/b5f9780a59e15eca2591dee32f5da42435e03039))
* **source/alloydb-admin, source/cloudsql-admin:** Post append new user agent ([#1494](https://github.com/googleapis/genai-toolbox/issues/1494)) ([30f1d3a](https://github.com/googleapis/genai-toolbox/commit/30f1d3a983aa317f1e1a98f9fe753005b56c52bd))
* **tools/alloydb:** Update parameter names and set default description for AlloyDB control plane tools ([#1468](https://github.com/googleapis/genai-toolbox/issues/1468)) ([6c140d7](https://github.com/googleapis/genai-toolbox/commit/6c140d718a66b45c7ec2d5a267331adb7680f689))
* **tools/bigquery-conversational-analytics:** Fix authentication scope error in Cloud Run ([#1381](https://github.com/googleapis/genai-toolbox/issues/1381)) ([80b7488](https://github.com/googleapis/genai-toolbox/commit/80b7488ad248ab1d98ee6713e1f6737f67f6754b))
* **tools/mysql-list-tables:** Update `mysql-list-tables` table_names parameter with default value ([#1439](https://github.com/googleapis/genai-toolbox/issues/1439)) ([da24661](https://github.com/googleapis/genai-toolbox/commit/da246610e105df10a9dc1bce19fa35d408c039f3))
* **tools/neo4j:** Implement value conversion for Neo4j types to JSON-compatible ([#1428](https://github.com/googleapis/genai-toolbox/issues/1428)) ([4babc4e](https://github.com/googleapis/genai-toolbox/commit/4babc4e11b3b64db8d8c9d6b65e47744f5174f7f))
## [0.14.0](https://github.com/googleapis/genai-toolbox/compare/v0.13.0...v0.14.0) (2025-09-05)
### ⚠ BREAKING CHANGES
* **bigquery:** Move `useClientOAuth` config from tool to source ([#1279](https://github.com/googleapis/genai-toolbox/issues/1279)) ([8d20a48](https://github.com/googleapis/genai-toolbox/commit/8d20a48f13bcda853d41bdf80a162de12b076d1b))
* **tools/bigquerysql:** remove `useClientOAuth` from tools config ([#1312](https://github.com/googleapis/genai-toolbox/issues/1312))
### Features
* **clickhouse:** Add ClickHouse Source and Tools ([#1088](https://github.com/googleapis/genai-toolbox/issues/1088)) ([75a04a5](https://github.com/googleapis/genai-toolbox/commit/75a04a55dd2259bed72fe95119a7a51a906c0b21))
* **prebuilt/alloydb-postgres:** Support ipType and IAM users ([#1324](https://github.com/googleapis/genai-toolbox/issues/1324)) ([0b2121e](https://github.com/googleapis/genai-toolbox/commit/0b2121ea72eb81348dcd9c740a62ccd32e71fe37))
* **server/mcp:** Support toolbox auth in mcp ([#1140](https://github.com/googleapis/genai-toolbox/issues/1140)) ([ca353e0](https://github.com/googleapis/genai-toolbox/commit/ca353e0b66fedc00e9110df57db18632aef49018))
* **source/mysql:** Support `queryParams` in MySQL source ([#1299](https://github.com/googleapis/genai-toolbox/issues/1299)) ([3ae2526](https://github.com/googleapis/genai-toolbox/commit/3ae2526e0fe36b57b05a9b54f1d99f3fc68d9657))
* **tools/bigquery:** Support end-user credential passthrough on multiple BQ tools ([#1314](https://github.com/googleapis/genai-toolbox/issues/1314)) ([88f4b30](https://github.com/googleapis/genai-toolbox/commit/88f4b3028df3b6a400936cdf8a035bf55021924c))
* **tools/looker:** Add description for looker-get-models tool ([#1266](https://github.com/googleapis/genai-toolbox/issues/1266)) ([89af3a4](https://github.com/googleapis/genai-toolbox/commit/89af3a4ca332f029615b2a739d1f6cd50519638d))
* **tools/looker:** Authenticate via end user credentials ([#1257](https://github.com/googleapis/genai-toolbox/issues/1257)) ([8755e3d](https://github.com/googleapis/genai-toolbox/commit/8755e3db3476abb35629b3cca9c78db7366757a4))
* **tools/looker:** Report field suggestions to agent ([#1267](https://github.com/googleapis/genai-toolbox/issues/1267)) ([2cad82e](https://github.com/googleapis/genai-toolbox/commit/2cad82e5107566dd6c9b75e34e9976af63af0bb5))
### Bug Fixes
* Do not print usage on runtime error ([#1315](https://github.com/googleapis/genai-toolbox/issues/1315)) ([afba7a5](https://github.com/googleapis/genai-toolbox/commit/afba7a57cdd4fe7c1b0741dbf8f8c78b14a68089))
* Update env var to allow empty string ([#1260](https://github.com/googleapis/genai-toolbox/issues/1260)) ([03aa9fa](https://github.com/googleapis/genai-toolbox/commit/03aa9fabacda06f860c9f178485126bddb7d5782))
* **tools/firestore:** Add document/collection path validation ([#1229](https://github.com/googleapis/genai-toolbox/issues/1229)) ([14c2249](https://github.com/googleapis/genai-toolbox/commit/14c224939a2f9bb349fa00a7d5227877198530c2))
* **tools/looker-get-dashboards:** Fix Looker client OAuth check ([#1338](https://github.com/googleapis/genai-toolbox/issues/1338)) ([36225aa](https://github.com/googleapis/genai-toolbox/commit/36225aa6db7f8426ad87930866530fde4e9bf0cd))
* **tools/oceanbase:** Fix encoded text with mysql driver ([#1283](https://github.com/googleapis/genai-toolbox/issues/1283)) ([d16f89f](https://github.com/googleapis/genai-toolbox/commit/d16f89fbb6e49c03998f114ef7dc2b584b5e4967)), closes [#1161](https://github.com/googleapis/genai-toolbox/issues/1161)
## [0.13.0](https://github.com/googleapis/genai-toolbox/compare/v0.12.0...v0.13.0) (2025-08-27)
### ⚠ BREAKING CHANGES

View File

@@ -1 +0,0 @@
GEMINI.md

View File

@@ -25,66 +25,166 @@ This project follows
## Contribution process
> [!NOTE]
> New contributions should always include both unit and integration tests.
### Code reviews
All submissions, including submissions by project members, require review. We
use GitHub pull requests for this purpose. Consult
[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
information on using pull requests.
### Code reviews
* Within 2-5 days, a reviewer will review your PR. They may approve it, or request
changes.
* When requesting changes, reviewers should self-assign the PR to ensure
Within 2-5 days, a reviewer will review your PR. They may approve it, or request
changes. When requesting changes, reviewers should self-assign the PR to ensure
they are aware of any updates.
* If additional changes are needed, push additional commits to your PR branch -
this helps the reviewer know which parts of the PR have changed.
* Commits will be
If additional changes are needed, push additional commits to your PR branch -
this helps the reviewer know which parts of the PR have changed. Commits will be
squashed when merged.
* Please follow up with changes promptly.
* If a PR is awaiting changes by the
Please follow up with changes promptly. If a PR is awaiting changes by the
author for more than 10 days, maintainers may mark that PR as Draft. PRs that
are inactive for more than 30 days may be closed.
### Automated Code Reviews
### Adding a New Database Source and Tool
This repository uses **Gemini Code Assist** to provide automated code reviews on Pull Requests. While this does not replace human review, it provides immediate feedback on code quality and potential issues.
We recommend creating an
[issue](https://github.com/googleapis/genai-toolbox/issues) before
implementation to ensure we can accept the contribution and no duplicated work.
If you have any questions, reach out on our
[Discord](https://discord.gg/Dmm69peqjh) to chat directly with the team. New
contributions should be added with both unit tests and integration tests.
You can manually trigger the bot by commenting on your Pull Request:
#### 1. Implement the New Data Source
* `/gemini`: Manually invokes Gemini Code Assist in comments
* `/gemini review`: Posts a code review of the changes in the pull request
* `/gemini summary`: Posts a summary of the changes in the pull request.
* `/gemini help`: Overview of the available commands
We recommend looking at an [example source
implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/sources/postgres/postgres.go).
## Guidelines for Pull Requests
* **Create a new directory** under `internal/sources` for your database type
(e.g., `internal/sources/newdb`).
* **Define a configuration struct** for your data source in a file named
`newdb.go`. Create a `Config` struct to include all the necessary parameters
for connecting to the database (e.g., host, port, username, password, database
name) and a `Source` struct to store necessary parameters for tools (e.g.,
Name, Kind, connection object, additional config).
* **Implement the
[`SourceConfig`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/sources/sources.go#L57)
interface**. This interface requires two methods:
* `SourceConfigKind() string`: Returns a unique string identifier for your
data source (e.g., `"newdb"`).
* `Initialize(ctx context.Context, tracer trace.Tracer) (Source, error)`:
Creates a new instance of your data source and establishes a connection to
the database.
* **Implement the
[`Source`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/sources/sources.go#L63)
interface**. This interface requires one method:
* `SourceKind() string`: Returns the same string identifier as `SourceConfigKind()`.
* **Implement `init()`** to register the new Source.
* **Implement Unit Tests** in a file named `newdb_test.go`.
1. Please keep your PR small for more thorough review and easier updates. In case of regression, it also allows us to roll back a single feature instead of multiple ones.
1. For non-trivial changes, consider opening an issue and discussing it with the code owners first.
1. Provide a good PR description as a record of what change is being made and why it was made. Link to a GitHub issue if it exists.
1. Make sure your code is thoroughly tested with unit tests and integration tests. Remember to clean up the test instances properly in your code to avoid memory leaks.
#### 2. Implement the New Tool
## Implementation Guides
We recommend looking at an [example tool
implementation](https://github.com/googleapis/genai-toolbox/tree/main/internal/tools/postgres/postgressql).
For technical details on how to implement new features, please refer to the
[Developer Documentation](./DEVELOPER.md).
* **Create a new directory** under `internal/tools` for your tool type (e.g.,
`internal/tools/newdb` or `internal/tools/newdb<tool_name>`).
* **Define a configuration struct** for your tool in a file named `newdbtool.go`.
Create a `Config` struct and a `Tool` struct to store necessary parameters for
tools.
* **Implement the
[`ToolConfig`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/tools/tools.go#L61)
interface**. This interface requires one method:
* `ToolConfigKind() string`: Returns a unique string identifier for your tool
(e.g., `"newdb"`).
* `Initialize(sources map[string]Source) (Tool, error)`: Creates a new
instance of your tool and validates that it can connect to the specified
data source.
* **Implement the `Tool` interface**. This interface requires the following
methods:
* `Invoke(ctx context.Context, params map[string]any) ([]any, error)`:
Executes the operation on the database using the provided parameters.
* `ParseParams(data map[string]any, claims map[string]map[string]any)
(ParamValues, error)`: Parses and validates the input parameters.
* `Manifest() Manifest`: Returns a manifest describing the tool's capabilities
and parameters.
* `McpManifest() McpManifest`: Returns an MCP manifest describing the tool for
use with the Model Context Protocol.
* `Authorized(services []string) bool`: Checks if the tool is authorized to
run based on the provided authentication services.
* **Implement `init()`** to register the new Tool.
* **Implement Unit Tests** in a file named `newdb_test.go`.
* [Adding a New Database Source](./DEVELOPER.md#adding-a-new-database-source)
* [Adding a New Tool](./DEVELOPER.md#adding-a-new-tool)
* [Adding Integration Tests](./DEVELOPER.md#adding-integration-tests)
* [Adding Documentation](./DEVELOPER.md#adding-documentation)
* [Adding Prebuilt Tools](./DEVELOPER.md#adding-prebuilt-tools)
#### 3. Add Integration Tests
## Submitting a Pull Request
* **Add a test file** under a new directory `tests/newdb`.
* **Add pre-defined integration test suites** in the
`/tests/newdb/newdb_test.go` that are **required** to be run as long as your
code contains related features. Please check each test suites for the config
defaults, if your source require test suites config updates, please refer to
[config option](./tests/option.go):
1. [RunToolGetTest][tool-get]: tests for the `GET` endpoint that returns the
tool's manifest.
2. [RunToolInvokeTest][tool-call]: tests for tool calling through the native
Toolbox endpoints.
3. [RunMCPToolCallMethod][mcp-call]: tests tool calling through the MCP
endpoints.
4. (Optional) [RunExecuteSqlToolInvokeTest][execute-sql]: tests an
`execute-sql` tool for any source. Only run this test if you are adding an
`execute-sql` tool.
5. (Optional) [RunToolInvokeWithTemplateParameters][temp-param]: tests for [template
parameters][temp-param-doc]. Only run this test if template
parameters apply to your tool.
* **Add the new database to the integration test workflow** in
[integration.cloudbuild.yaml](.ci/integration.cloudbuild.yaml).
[tool-get]:
https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L31
[tool-call]:
<https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L79>
[mcp-call]:
https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L554
[execute-sql]:
<https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L431>
[temp-param]:
<https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L297>
[temp-param-doc]:
https://googleapis.github.io/genai-toolbox/resources/tools/#template-parameters
#### 4. Add Documentation
* **Update the documentation** to include information about your new data source
and tool. This includes:
* Adding a new page to the `docs/en/resources/sources` directory for your data
source.
* Adding a new page to the `docs/en/resources/tools` directory for your tool.
* **(Optional) Add samples** to the `docs/en/samples/<newdb>` directory.
#### (Optional) 5. Add Prebuilt Tools
You can provide developers with a set of "build-time" tools to aid common
software development user journeys like viewing and creating tables/collections
and data.
* **Create a set of prebuilt tools** by defining a new `tools.yaml` and adding
it to `internal/tools`. Make sure the file name matches the source (i.e. for
source "alloydb-postgres" create a file named "alloydb-postgres.yaml").
* **Update `cmd/root.go`** to add new source to the `prebuilt` flag.
* **Add tests** in
[internal/prebuiltconfigs/prebuiltconfigs_test.go](internal/prebuiltconfigs/prebuiltconfigs_test.go)
and [cmd/root_test.go](cmd/root_test.go).
#### 6. Submit a Pull Request
Submit a pull request to the repository with your changes. Be sure to include a
detailed description of your changes and any requests for long term testing
resources.
* **Title:** All pull request title should follow the formatting of
[Conventional
[Conventional
Commit](https://www.conventionalcommits.org/) guidelines: `<type>[optional
scope]: description`. For example, if you are adding a new field in postgres
source, the title should be `feat(source/postgres): add support for
@@ -106,7 +206,7 @@ resources.
| style | Update src code, with only formatting and whitespace updates (e.g. code formatter or linter changes). |
Pull requests should always add scope whenever possible. The scope is
formatted as `<scope-resource>/<scope-type>` (e.g., `sources/postgres`, or
formatted as `<scope-type>/<scope-kind>` (e.g., `sources/postgres`, or
`tools/mssql-sql`).
Ideally, **each PR covers only one scope**, if this is
@@ -118,4 +218,4 @@ resources.
* **PR Description:** PR description should **always** be included. It should
include a concise description of the changes, it's impact, along with a
summary of the solution. If the PR is related to a specific issue, the issue
number should be mentioned in the PR description (e.g. `Fixes #1`).
number should be mentioned in the PR description (e.g. `Fixes #1`).

View File

@@ -44,288 +44,6 @@ Before you begin, ensure you have the following:
curl http://127.0.0.1:5000
```
#### Cross Compiling For Windows
Most developers work in a Unix or Unix-like environment.
Compiling for Windows requires the download of zig to provide a C and C++
compiler. These instructions are for cross compiling from Linux x86 but
should work for macOS with small changes.
1. Download zig for your platform.
```bash
cd $HOME
curl -fL "https://ziglang.org/download/0.15.2/zig-x86_64-linux-0.15.2.tar.xz" -o zig.tar.xz
tar xf zig.tar.xz
```
This will create the directory $HOME/zig-x86_64-linux-0.15.2. You only need to do this once.
If you are on macOS curl from https://ziglang.org/download/0.15.2/zig-x86_64-macos-0.15.2.tar.xz
or https://ziglang.org/download/0.15.2/zig-aarch64-macos-0.15.2.tar.xz.
2. Change to your MCP Toolbox directory and run the following:
```bash
cd $HOME/genai-toolbox
GOOS=windows \
GOARCH=amd64 \
CGO_ENABLED=1 \
CC="$HOME/zig-x86_64-linux-0.15.2/zig cc -target x86_64-windows-gnu" \
CXX="$HOME/zig-x86_64-linux-0.15.2/zig c++ -target x86_64-windows-gnu" \
go build -o toolbox.exe
```
If you are on macOS alter the path `zig-x86_64-linux-0.15.2` to the proper path
for your zig installation.
Now the toolbox.exe file is ready to use. Transfer it to your windows machine and test it.
### Tool Naming Conventions
This section details the purpose and conventions for MCP Toolbox's tools naming
properties, **tool name** and **tool type**.
```
kind: tool
name: cancel_hotel <- tool name
type: postgres-sql <- tool type
source: my_pg_source
```
#### Tool Name
Tool name is the identifier used by a Large Language Model (LLM) to invoke a
specific tool.
* Custom tools: The user can define any name they want. The below guidelines
do not apply.
* Pre-built tools: The tool name is predefined and cannot be changed. It
should follow the guidelines.
The following guidelines apply to tool names:
* Should use underscores over hyphens (e.g., `list_collections` instead of
`list-collections`).
* Should not have the product name in the name (e.g., `list_collections` instead
of `firestore_list_collections`).
* Superficial changes are NOT considered as breaking (e.g., changing tool name).
* Non-superficial changes MAY be considered breaking (e.g. adding new parameters
to a function) until they can be validated through extensive testing to ensure
they do not negatively impact agent's performances.
#### Tool Type
Tool type serves as a category or type that a user can assign to a tool.
The following guidelines apply to tool types:
* Should use hyphens over underscores (e.g. `firestore-list-collections` or
`firestore_list_colelctions`).
* Should use product name in name (e.g. `firestore-list-collections` over
`list-collections`).
* Changes to tool type are breaking changes and should be avoided.
### Tool Invocation & Error Handling
To align with the Model Context Protocol (MCP) and ensure robust agentic workflows, Toolbox distinguishes between errors the agent can fix and errors that require developer intervention.
#### Error Categorization
When implementing `Invoke()` or `ParseParams()`, you must return the appropriate error type from `internal/util/errors.go`. This allows the LLM to attempt a "self-correct" for Agent Errors while signaling a hard stop for Server Errors.
| Category | Description | HTTP Status | MCP Result |
|---|---|---|---|
| **Agent Error** (`AgentError`) | Input/Execution logic errors (e.g., SQL syntax, missing records, invalid params). The agent can fix this. | 200 OK | `isError: true` |
| **Server Error** (`ClientServerError`) | Infrastructure failures (e.g., DB down, auth failure, network failure). The agent cannot fix this. | 500 Internal Error | JSON-RPC Error |
#### Implementation Guidelines
**Use Typed Errors**: Refactor or implement the `Tool` interface methods to return `util.ToolboxError`.
**In `Invoke()`:**
* **Agent Error**: Wrap database driver errors (syntax, constraint violations) in `AgentError`.
* **Server Error**: Wrap connection failures or internal logic crashes in `ClientServerError`.
**In `ParseParams()`:**
* Return `ToolboxError` for missing required parameters or wrong types.
* Return `ClientServerError` for failures in resolving authenticated parameters (e.g., invalid tokens).
**Example:**
func (t *MyTool) Invoke(ctx context.Context, sp tools.SourceProvider, params parameters.ParamValues, token tools.AccessToken) (any, util.ToolboxError) {
res, err := t.db.Exec(ctx, params.SQL)
if err != nil {
// Driver error is likely a syntax issue the LLM can fix
return nil, util.NewAgentError("error executing SQL query", err)
}
return res, nil
}
## Implementation Guides
### Adding a New Database Source or Tool
Please create an
[issue](https://github.com/googleapis/genai-toolbox/issues) before
implementation to ensure we can accept the contribution and no duplicated work.
This issue should include an overview of the API design. If you have any
questions, reach out on our [Discord](https://discord.gg/Dmm69peqjh) to chat
directly with the team.
> [!NOTE]
> New tools can be added for [pre-existing data
> sources](https://github.com/googleapis/genai-toolbox/tree/main/internal/sources).
> However, any new database source should also include at least one new tool
> type.
#### Adding a New Database Source
We recommend looking at an [example source
implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/sources/postgres/postgres.go).
* **Create a new directory** under `internal/sources` for your database type
(e.g., `internal/sources/newdb`).
* **Define a configuration struct** for your data source in a file named
`newdb.go`. Create a `Config` struct to include all the necessary parameters
for connecting to the database (e.g., host, port, username, password, database
name) and a `Source` struct to store necessary parameters for tools (e.g.,
Name, Type, connection object, additional config).
* **Implement the
[`SourceConfig`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/sources/sources.go#L57)
interface**. This interface requires two methods:
* `SourceConfigType() string`: Returns a unique string identifier for your
data source (e.g., `"newdb"`).
* `Initialize(ctx context.Context, tracer trace.Tracer) (Source, error)`:
Creates a new instance of your data source and establishes a connection to
the database.
* **Implement the
[`Source`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/sources/sources.go#L63)
interface**. This interface requires one method:
* `SourceType() string`: Returns the same string identifier as `SourceConfigType()`.
* **Implement `init()`** to register the new Source.
* **Implement Unit Tests** in a file named `newdb_test.go`.
#### Adding a New Tool
> [!NOTE]
> Please follow the tool naming convention detailed
> [here](#tool-naming-conventions).
We recommend looking at an [example tool
implementation](https://github.com/googleapis/genai-toolbox/tree/main/internal/tools/postgres/postgressql).
Remember to keep your PRs small. For example, if you are contributing a new Source, only include one or two core Tools within the same PR, the rest of the Tools can come in subsequent PRs.
* **Create a new directory** under `internal/tools` for your tool type (e.g., `internal/tools/newdb/newdbtool`).
* **Define a configuration struct** for your tool in a file named `newdbtool.go`.
Create a `Config` struct and a `Tool` struct to store necessary parameters for
tools.
* **Implement the
[`ToolConfig`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/tools/tools.go#L61)
interface**. This interface requires one method:
* `ToolConfigType() string`: Returns a unique string identifier for your tool
(e.g., `"newdb-tool"`).
* `Initialize(sources map[string]Source) (Tool, error)`: Creates a new
instance of your tool and validates that it can connect to the specified
data source.
* **Implement the `Tool` interface**. This interface requires the following
methods:
* `Invoke(ctx context.Context, params map[string]any) ([]any, error)`:
Executes the operation on the database using the provided parameters.
* `ParseParams(data map[string]any, claims map[string]map[string]any)
(ParamValues, error)`: Parses and validates the input parameters.
* `Manifest() Manifest`: Returns a manifest describing the tool's capabilities
and parameters.
* `McpManifest() McpManifest`: Returns an MCP manifest describing the tool for
use with the Model Context Protocol.
* `Authorized(services []string) bool`: Checks if the tool is authorized to
run based on the provided authentication services.
* **Implement `init()`** to register the new Tool.
* **Implement Unit Tests** in a file named `newdbtool_test.go`.
#### Adding Integration Tests
* **Add a test file** under a new directory `tests/newdb`.
* **Add pre-defined integration test suites** in the
`/tests/newdb/newdb_integration_test.go` that are **required** to be run as
long as your code contains related features. Please check each test suites for
the config defaults, if your source require test suites config updates, please
refer to [config option](./tests/option.go):
1. [RunToolGetTest][tool-get]: tests for the `GET` endpoint that returns the
tool's manifest.
2. [RunToolInvokeTest][tool-call]: tests for tool calling through the native
Toolbox endpoints.
3. [RunMCPToolCallMethod][mcp-call]: tests tool calling through the MCP
endpoints.
4. (Optional) [RunExecuteSqlToolInvokeTest][execute-sql]: tests an
`execute-sql` tool for any source. Only run this test if you are adding an
`execute-sql` tool.
5. (Optional) [RunToolInvokeWithTemplateParameters][temp-param]: tests for [template
parameters][temp-param-doc]. Only run this test if template
parameters apply to your tool.
* **Add additional tests** for the tools that are not covered by the predefined tests. Every tool must be tested!
* **Add the new database to the integration test workflow** in
[integration.cloudbuild.yaml](.ci/integration.cloudbuild.yaml).
[tool-get]:
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L41
[tool-call]:
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L229
[mcp-call]:
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L789
[execute-sql]:
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L609
[temp-param]:
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L454
[temp-param-doc]:
https://googleapis.github.io/genai-toolbox/resources/tools/#template-parameters
#### Adding Documentation
When updating documentation, you must adhere to the structural constraints enforced by our Diátaxis-based layout and internal linters:
* **Adding a New Data Source:**
* Create a new folder for your integration in the `docs/en/integrations/` directory (e.g., `docs/en/integrations/newdb/`).
* Create an empty `_index.md` file. This acts purely as a structural folder wrapper for Hugo. Do not add body content here.
* Create a `source.md` file. **This is the definitive guide.** Add all connection details, authentication, and YAML configurations here. Ensure you include the `{{< list-tools >}}` shortcode to dynamically display tools.
* **Adding a New Native Tool:**
* Create a nested `tools/` directory inside your source (e.g., `docs/en/integrations/newdb/tools/`).
* Create an empty `_index.md` file inside the `tools/` directory. **It must contain only frontmatter** and absolutely no markdown body text.
* Add the tool details in a `<tool_name>.md` file in this new `tools/` folder. Ensure you include the `{{< compatible-sources >}}` shortcode.
* **Adding Inherited/Shared Tools (e.g., Managed Databases):**
* If a new database inherits tools from a base integration (like Cloud SQL inheriting Postgres tools), create the `tools/` directory with an `_index.md` file.
* Map the inherited tools dynamically by adding the `shared_tools` YAML array to the frontmatter of this `tools/_index.md` file. **This file must strictly contain only frontmatter.**
* **Adding Samples:**
* **Physical Location:**
1. **Quickstarts:** `docs/en/documentation/getting-started/quickstart/`.
2. **Integration-Specific:** `docs/en/integrations/<db>/samples/`. Must include an `_index.md` with strictly only frontmatter.
3. **General:** `docs/en/samples/`.
* **Frontmatter Requirements (Maintenance):** To ensure samples appear correctly in the Samples Section, you must provide the following tags:
* `is_sample: true` - Required for indexing.
* `sample_filters:` - A YAML array used for UI filtering (e.g., `[postgres, go, sql]`).
* **Adding Top-Level Sections:** If you add a completely new top-level documentation directory (e.g., a new section alongside `integrations`, `documentation`), you **must** update the AI documentation layout files located at `.hugo/layouts/index.llms.txt` and `.hugo/layouts/index.llms-full.txt`. Specifically, update the "Diátaxis Narrative Framework" preamble so AI models understand the purpose of your new section.
#### Adding Prebuilt Tools
You can provide developers with a set of "build-time" tools to aid common
software development user journeys like viewing and creating tables/collections
and data.
* **Create a set of prebuilt tools** by defining a new `tools.yaml` and adding
it to `internal/tools`. Make sure the file name matches the source (i.e. for
source "alloydb-postgres" create a file named "alloydb-postgres.yaml").
* **Update `cmd/root.go`** to add new source to the `prebuilt` flag.
* **Add tests** in
[internal/prebuiltconfigs/prebuiltconfigs_test.go](internal/prebuiltconfigs/prebuiltconfigs_test.go)
and [cmd/root_test.go](cmd/root_test.go).
## Testing
### Infrastructure
@@ -336,40 +54,18 @@ project "toolbox-testing-438616".
### Linting
### Code Linting
Run the lint check to ensure code quality:
```bash
golangci-lint run --fix
```
### Documentation Structure Linting
To ensure consistency, we enforce a standardized structure for integration `Source` and `Tool` pages.
Before pushing changes to integration pages:
Run the **source page** linter to validate:
```bash
# From the repository root
./.ci/lint-docs-source-page.sh
```
Run the **tool page** linter to validate:
```bash
# From the repository root
./.ci/lint-docs-tool-page.sh
```
### Unit Tests
Execute unit tests locally:
```bash
go test -race -v ./cmd/... ./internal/...
go test -race -v ./...
```
### Integration Tests
@@ -395,30 +91,11 @@ go test -race -v ./cmd/... ./internal/...
go test -race -v ./tests/alloydbpg
```
1. **Timeout:** The integration test should have a timeout on the server.
Look for code like this:
```go
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
defer cancel()
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
if err != nil {
t.Fatalf("command initialization returned an error: %s", err)
}
defer cleanup()
```
Be sure to set the timeout to a reasonable value for your tests.
#### Running on Pull Requests
* **Internal Contributors:** Testing workflows should trigger automatically.
* **External Contributors:** Request Toolbox maintainers to trigger the testing
workflows on your PR.
* Maintainers can comment `/gcbrun` to execute the integration tests.
* Maintainers can add the label `tests:run` to execute the unit tests.
* Maintainers can add the label `docs: deploy-preview` to run the PR Preview workflow.
#### Test Resources
@@ -446,9 +123,6 @@ variables for each source.
* Couchbase - setup in the test project via the Marketplace
* DGraph - using the public dgraph interface <https://play.dgraph.io> for
testing
* Looker
* The Cloud Build service account is a user for conversational analytics
* The Looker instance runs under google.com:looker-sandbox.
* Memorystore Redis - setup in the test project using a Memorystore for Redis
standalone instance
* Memorystore Redis Cluster, Memorystore Valkey standalone, and Memorystore
@@ -468,30 +142,6 @@ variables for each source.
* SQLite - setup in the integration test, where we create a temporary database
file
### Link Checking and Fixing with Lychee
We use **[lychee](https://github.com/lycheeverse/lychee-action)** for repository link checks.
* To run the checker **locally**, see the [command-line usage guide](https://github.com/lycheeverse/lychee?tab=readme-ov-file#commandline-usage).
#### Fixing Broken Links
1. **Update the Link:** Correct the broken URL or update the content where it is used.
2. **Ignore the Link:** If you can't fix the link (e.g., due to **external rate-limits** or if it's a **local-only URL**), tell Lychee to **ignore** it.
* List **regular expressions** or **direct links** in the **[.lycheeignore](https://github.com/googleapis/genai-toolbox/blob/main/.lycheeignore)** file, one entry per line.
* **Always add a comment** explaining **why** the link is being skipped to prevent link rot. **Example `.lycheeignore`:**
```text
# These are email addresses, not standard web URLs, and usually cause check failures.
^mailto:.*
```
> [!NOTE]
> To avoid build failures in GitHub Actions, follow the linking pattern demonstrated here: <br>
> **Avoid:** (Works in Hugo, breaks Link Checker): `[Read more](docs/setup)` or `[Read more](docs/setup/)` <br>
> **Reason:** The link checker cannot find a file named "setup" or a directory with that name containing an index. <br>
> **Preferred:** `[Read more](docs/setup.md)` <br>
> **Reason:** The GitHub Action finds the physical file. Hugo then uses its internal logic (or render hooks) to resolve this to the correct `/docs/setup/` web URL. <br>
### Other GitHub Checks
* License header check (`.github/header-checker-lint.yml`) - Ensures files have
@@ -508,51 +158,6 @@ We use **[lychee](https://github.com/lycheeverse/lychee-action)** for repository
## Developing Documentation
### Documentation Standards & CI Checks
To maintain consistency and prevent repository bloat, all pull requests must pass the automated documentation linters.
#### Source Page Structure (`integrations/**/source.md`)
When adding or updating a Source page, your markdown file must strictly adhere to the following architectural rules:
* **File Name:** The configuration guide must be named `source.md`. *(Note: `_index.md` files are purely structural folder wrappers. Do not add body content to them).*
* **LinkTitle:** The linkTitle has to be set to the string `Source` always.
* **Frontmatter:** The `title` field must end with the word "Source" (e.g., `title: "Firestore Source"`).
* **No H1 Headings:** Do not use H1 (`#`) tags in the markdown body. The page title is automatically generated from the frontmatter.
* **H2 Heading Hierarchy:** You must use H2 (`##`) headings in a strict, specific order.
* **Required Headings:** `About`, `Example`, `Reference`
* **Allowed Optional Headings:** `Available Tools`, `Requirements`, `Advanced Usage`, `Troubleshooting`, `Additional Resources`
* **Available Tools Shortcode:** If you include the `## Available Tools` heading, you must place the list-tools shortcode (e.g., `{{< list-tools >}}`) directly beneath it.
#### Tool Page Structure (`integrations/**/tools/*.md`)
When adding or updating a Tool page, your markdown file must strictly adhere to the following architectural rules:
* **Location:** Native tools must be placed inside a nested `tools/` directory.
* **Frontmatter:** The `title` field must end with the word "Tool" (e.g., `title: "execute-sql Tool"`).
* **No H1 Headings:** Do not use H1 (`#`) tags in the markdown body. The page title is automatically generated from the frontmatter.
* **H2 Heading Hierarchy:** You must use H2 (`##`) headings in a strict, specific order.
* **Required Headings:** `About`, `Example`
* **Allowed Optional Headings:** `Compatible Sources`, `Requirements`, `Parameters`, `Output Format`, `Reference`, `Advanced Usage`, `Troubleshooting`, `Additional Resources`
* **Compatible Sources Shortcode:** If you include the `## Compatible Sources` heading, you must place the compatible-sources shortcode (e.g., `{{< compatible-sources >}}`) directly beneath it.
#### Prebuilt Configuration Structure (`integrations/**/prebuilt-configs/*.md`)
To ensure new prebuilt configurations are automatically indexed by the `{{< list-prebuilt-configs >}}` shortcode on the main Prebuilt Configs page, follow these rules:
* **Location:** Always place documentation for prebuilt configurations in a nested directory named `prebuilt-configs/` inside the database folder (e.g., `docs/en/integrations/alloydb/prebuilt-configs/`).
* **Index Wrapper:** Every `prebuilt-configs/` directory must contain an `_index.md` file. This file acts as the anchor for the directory and must contain the `title` and `description` used in the automated lists.
* **Architecture-Based Mapping:** Map configurations to database folders based on the `kind` defined in the tool's YAML file (in `internal/prebuiltconfigs/tools/`). For example, any tool using the `postgres` kind should live in the `postgres/` integration directory.
#### Frontend Assets & Layouts
If you need to modify the visual appearance, navigation, or behavior of the documentation website itself, all frontend assets are isolated within the `.hugo/` directory.
#### Repository Asset Limits
* **Max File Size:** No individual file within the `docs/` directory may exceed 24MB. This prevents repository bloat and ensures fast clone times. If you need to include large assets (like high-resolution videos or massive PDFs), host them externally and link to them in the markdown.
### Running a Local Hugo Server
Follow these steps to preview documentation changes locally using a Hugo server:
@@ -572,57 +177,19 @@ Follow these steps to preview documentation changes locally using a Hugo server:
npm ci
```
1. **Generate Search Index & Start the Server:** Because the Pagefind search engine requires physical files to build its index, `hugo server` (which runs purely in memory) will not display search results by default. To test the search bar locally, build the physical site once (using the development environment to avoid triggering production analytics), generate the index into the static folder, and then start the server:
1. **Start the Server:**
```bash
hugo --environment development
npx pagefind --site public --output-path static/pagefind
hugo server
```
*(Note: The `static/pagefind/` directory is git-ignored to prevent committing local search indexes).*
### Previewing Documentation on Pull Requests
Documentation preview links are automatically generated and commented on your pull request when working from a branch within the main repository.
**For external contributors (forks):**
For security reasons, automated deployment previews are disabled for pull requests originating from external forks for the cloudflare deployments. To review your documentation changes, please follow the [Running a Local Hugo Server](#running-a-local-hugo-server) instructions to build and view the site on your local machine before requesting a review.
### Document Versioning Setup
The documentation uses a dynamic versioning system that outputs standard HTML sites alongside AI-optimized plain text files (`llms.txt` and `llms-full.txt`).
**Search Indexing:** All deployment workflows automatically execute `npx pagefind --site public` to generate a version-scoped search index specific to that deployment's base URL.
There are 3 GHA workflows we use to achieve document versioning:
1. **Deploy In-development docs:**
This workflow is run on every commit merged into the main branch. It deploys
the built site to the `/dev/` subdirectory for the in-development
documentation.
1. **Deploy Versioned Docs:**
When a new GitHub Release is published, it performs two deployments based on
the new release tag. One to the new version subdirectory and one to the root
directory of the cloudflare-pages branch.
**Note:** Before the release PR from release-please is merged, add the
newest version into the hugo.toml file.
1. **Deploy Previous Version Docs:**
This is a manual workflow, started from the GitHub Actions UI.
To rebuild and redeploy documentation for an already released version that
were released before this new system was in place. This workflow can be
started on the UI by providing the git version tag which you want to create
the documentation for. The specific versioned subdirectory and the root docs
are updated on the cloudflare-pages branch.
#### Contributors
Request a repo owner to run the preview deployment workflow on your PR. A
preview link will be automatically added as a comment to your PR.
#### Maintainers
1. **Inspect Changes:** Review the proposed changes in the PR to ensure they are
@@ -631,44 +198,6 @@ preview link will be automatically added as a comment to your PR.
1. **Deploy Preview:** Apply the `docs: deploy-preview` label to the PR to
deploy a documentation preview.
### Shortcodes
This repository includes custom shortcodes to help with documentation consistency and maintenance.
For more information on how they work, see the [Hugo Shortcodes](https://gohugo.io/content-management/shortcodes/) documentation and the guide to [creating custom shortcodes](https://gohugo.io/templates/shortcode/).
#### `include` Shortcode
The `include` shortcode reads a file and optionally fences it with a language.
**Syntax:**
`{{< include "path/to/file" "language" >}}`
**Example:**
`{{< include "static/headers/license_header.txt" >}}`
`{{< include "samples/program.js" "javascript" >}}`
**Source:** [.hugo/layouts/shortcodes/include.html](.hugo/layouts/shortcodes/include.html)
#### `regionInclude` Shortcode
The `regionInclude` shortcode reads a file, extracts content between `[START region_name]` and `[END region_name]`, and optionally fences it.
**Syntax:**
`{{< regionInclude "path/to/file" "region_name" "language" >}}`
**Example Markdown:**
`{{< regionInclude "samples/program.js" "program_setup" "javascript" >}}`
**Example Code Snippet (`samples/program.js`):**
```javascript
// [START program_setup]
import { Toolbox } from '@googleapis/genai-toolbox';
const toolbox = new Toolbox();
// [END program_setup]
```
**Source:** [.hugo/layouts/shortcodes/regionInclude.html](.hugo/layouts/shortcodes/regionInclude.html)
## Building Toolbox
### Building the Binary
@@ -723,28 +252,9 @@ for instructions on developing Toolbox SDKs.
### Team
Team `@googleapis/senseai-eco` has been set as
Team, `@googleapis/senseai-eco`, has been set as
[CODEOWNERS](.github/CODEOWNERS). The GitHub TeamSync tool is used to create
this team from MDB Group, `senseai-eco`. Additionally, database-specific GitHub
teams (e.g., `@googleapis/toolbox-alloydb`) have been created from MDB groups to
manage code ownership and review for individual database products.
### Issue/PR Triage and SLO
After an issue is created, maintainers will assign the following labels:
* `Priority` (defaulted to P0)
* `Type` (if applicable)
* `Product` (if applicable)
All incoming issues and PRs will follow the following SLO:
| Type | Priority | Objective |
|-----------------|----------|------------------------------------------------------------------------|
| Feature Request | P0 | Must respond within **5 days** |
| Process | P0 | Must respond within **5 days** |
| Bugs | P0 | Must respond within **5 days**, and resolve/closure within **14 days** |
| Bugs | P1 | Must respond within **7 days**, and resolve/closure within **90 days** |
| Bugs | P2 | Must respond within **30 days**
_Types that are not listed in the table do not adhere to any SLO._
this team from MDB Group, `senseai-eco`.
### Releasing
@@ -842,12 +352,10 @@ Trigger pull request tests for external contributors by:
## Repo Setup & Automation
* .github/blunderbuss.yml - Auto-assign issues and PRs from GitHub teams. Use a
product label to assign to a product-specific team member.
* .github/blunderbuss.yml - Auto-assign issues and PRs from GitHub teams
* .github/renovate.json5 - Tooling for dependency updates. Dependabot is built
into the GitHub repo for GitHub security warnings
* go/github-issue-mirror - GitHub issues are automatically mirrored into buganizer
* (Suspended) .github/sync-repo-settings.yaml - configure repo settings
* .github/release-please.yml - Creates GitHub releases
* .github/ISSUE_TEMPLATE - templates for GitHub issues

View File

@@ -11,14 +11,9 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
FROM --platform=$BUILDPLATFORM golang:1 AS build
# Install Zig for CGO cross-compilation
RUN apt-get update && apt-get install -y xz-utils
RUN curl -fL "https://ziglang.org/download/0.15.2/zig-x86_64-linux-0.15.2.tar.xz" -o zig.tar.xz && \
mkdir -p /zig && \
tar -xf zig.tar.xz -C /zig --strip-components=1 && \
rm zig.tar.xz
# Use the latest stable golang 1.x to compile to a binary
FROM --platform=$BUILDPLATFORM golang:1 AS build
WORKDIR /go/src/genai-toolbox
COPY . .
@@ -29,27 +24,14 @@ ARG BUILD_TYPE="container.dev"
ARG COMMIT_SHA=""
RUN go get ./...
RUN export ZIG_TARGET="" && \
case "${TARGETARCH}" in \
("amd64") ZIG_TARGET="x86_64-linux-gnu" ;; \
("arm64") ZIG_TARGET="aarch64-linux-gnu" ;; \
(*) echo "Unsupported architecture: ${TARGETARCH}" && exit 1 ;; \
esac && \
CGO_ENABLED=1 GOOS=${TARGETOS} GOARCH=${TARGETARCH} \
CC="/zig/zig cc -target ${ZIG_TARGET}" \
CXX="/zig/zig c++ -target ${ZIG_TARGET}" \
go build \
-ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=${BUILD_TYPE} -X github.com/googleapis/genai-toolbox/cmd.commitSha=${COMMIT_SHA}" \
-o genai-toolbox .
RUN CGO_ENABLED=0 GOOS=${TARGETOS} GOARCH=${TARGETARCH} \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=container.${BUILD_TYPE} -X github.com/googleapis/genai-toolbox/cmd.commitSha=${COMMIT_SHA}"
# Final Stage
FROM gcr.io/distroless/cc-debian12:nonroot
FROM gcr.io/distroless/static:nonroot
WORKDIR /app
COPY --from=build --chown=nonroot /go/src/genai-toolbox/genai-toolbox /toolbox
USER nonroot
LABEL io.modelcontextprotocol.server.name="io.github.googleapis/genai-toolbox"
ENTRYPOINT ["/toolbox"]

243
GEMINI.md
View File

@@ -1,243 +0,0 @@
# MCP Toolbox Context & Style Guide
This file (symlinked as `CLAUDE.md`, `AGENTS.md`, and `.gemini/styleguide.md`) provides context and guidelines for AI agents working on the MCP Toolbox for Databases project. It summarizes key information from `CONTRIBUTING.md` and `DEVELOPER.md`.
## Project Overview
**MCP Toolbox for Databases** is a Go-based project designed to provide Model Context Protocol (MCP) tools for various data sources and services. It allows Large Language Models (LLMs) to interact with databases and other tools safely and efficiently.
## Tech Stack
- **Language:** Go (1.23+)
- **Documentation:** Hugo (Extended Edition v0.146.0+)
- **Containerization:** Docker
- **CI/CD:** GitHub Actions, Google Cloud Build
- **Linting:** `golangci-lint`
## Key Directories
- `cmd/`: Application entry points.
- `internal/sources/`: Implementations of database sources (e.g., Postgres, BigQuery).
- `internal/tools/`: Implementations of specific tools for each source.
- `tests/`: Integration tests.
- `docs/en`: Project documentation. Separated logically into:
- `documentation/`: Documentation and concepts (Section I).
- `integrations/`: Reference architectures for DB connectivity and tools (Section II).
- `samples/`: Tutorials and code samples (Section III).
- `reference/`: CLI info and FAQs (Section IV).
## Development Workflow
### Prerequisites
- Go 1.23 or later.
- Docker (for building container images and running some tests).
- Access to necessary Google Cloud resources for integration testing (if applicable).
### Building and Running
1. **Build Binary:** `go build -o toolbox`
2. **Run Server:** `go run .` (Listens on port 5000 by default)
3. **Run with Help:** `go run . --help`
4. **Test Endpoint:** `curl http://127.0.0.1:5000`
### Testing
- **Unit Tests:** `go test -race -v ./cmd/... ./internal/...`
- **Integration Tests:**
- Run specific source tests: `go test -race -v ./tests/<source_dir>`
- Example: `go test -race -v ./tests/alloydbpg`
- Add new sources to `.ci/integration.cloudbuild.yaml`
- **Linting:** `golangci-lint run --fix`
## Developing Documentation
### Prerequisites
- Hugo (Extended Edition v0.146.0+)
- Node.js (for `npm ci`)
### Running Local Server
1. Navigate to `.hugo` directory: `cd .hugo`
2. Install dependencies: `npm ci`
3. **Generate Search Index:** Because Pagefind requires physical files, `hugo server` alone will not populate the search bar. Build the local index first (using the development environment to block analytics) by running:
`hugo --environment development && npx pagefind --site public --output-path static/pagefind`
4. Start server: `hugo server`
### Versioning Workflows
Documentation builds automatically generate standard HTML alongside AI-friendly text files (`llms.txt` and `llms-full.txt`).
There are 6 workflows in total, handling parallel deployments to both GitHub Pages and Cloudflare Pages. **All deployment workflows automatically execute `npx pagefind --site public` to generate version-scoped search indexes.**
1. **Deploy In-development docs**: Commits merged to `main` deploy to the `/dev/` path. Automatically defaults to version `Dev`.
2. **Deploy Versioned Docs**: New GitHub releases deploy to `/<version>/` and the root path. The release tag is automatically injected into the build as the documentation version. *(Note: Developers must manually add the new version to the `[[params.versions]]` dropdown array in `hugo.toml` prior to merging a release PR).*
3. **Deploy Previous Version Docs**: A manual workflow to rebuild older versions by explicitly passing the target tag via the GitHub Actions UI.
## Coding Conventions
### Tool Naming
- **Tool Name:** `snake_case` (e.g., `list_collections`, `run_query`).
- Do *not* include the product name (e.g., avoid `firestore_list_collections`).
- **Tool Type:** `kebab-case` (e.g., `firestore-list-collections`).
- *Must* include the product name.
### Branching and Commits
- **Branch Naming:** `feat/`, `fix/`, `docs/`, `chore/` (e.g., `feat/add-gemini-md`).
- **Commit Messages:** [Conventional Commits](https://www.conventionalcommits.org/) format.
- Format: `<type>(<scope>): <description>`
- Example: `feat(source/postgres): add new connection option`
- Types: `feat`, `fix`, `docs`, `chore`, `test`, `ci`, `refactor`, `revert`, `style`.
### PR Title Format
Format: `<type>[optional scope]: <description>`
- **Example:** `feat(source/postgres): add support for "new-field" field`
- **Example (Breaking Change):** `fix(tool/sql)!: change default parameter value`
#### Types
| Type | Description | Version change affected |
| :--- | :--- | :--- |
| **BREAKING CHANGE** | Anything with this type or a `!` after the type/scope introduces a breaking API change. E.g. `fix!: description` or `feat!: description`. | major |
| **feat** | Adding a new feature to the codebase. | minor |
| **fix** | Fixing a bug or typo in the codebase. | patch |
| **ci** | Changes made to the continuous integration configuration files or scripts (usually the yml and other configuration files). | n/a |
| **docs** | Documentations-related PRs, including fixes on docs. | n/a |
| **chore** | Other small tasks or updates that don't fall into any of the types above. | n/a |
| **perf** | changed src code, with improvement of performance metrics. | n/a |
| **refactor** | Change src code but unlike feat, there are no tests broken and no lines lost coverage. | n/a |
| **revert** | Revert changes made in another commit. | n/a |
| **style** | updated src code, with only formatting and whitespace updates. In other words, this includes anything a code formatter or linter changes. | n/a |
| **test** | Changes made to test files. | n/a |
| **build** | Changes related to build of the projects and dependency. | n/a |
#### Scopes
PRs addressing a specific source or tool should **always** add the source or tool name as scope.
The scope is formatted as `<type>/<kind>`. Common scopes include:
- `source/postgres`, `source/cloudsql-mysql`
- `tool/mssql-sql`, `tool/list-tables`
- `auth/google`
**Multiple Scopes:**
- If the PR covers multiple scopes of the same kind, separate them with a comma: `feat(source/postgres,source/alloydbpg): ...`.
- If the PR covers multiple scope types (e.g., adding a new database source and tool), disregard the scope type prefix: `feat(new-db): adding support for new-db source and tool`.
#### PR Description
Every PR must include a description that follows the repository's template:
**1. Description**
A concise description of the changes (bug or feature), its impact, and a summary of the solution.
**2. PR Checklist**
- [ ] Make sure to open an issue as a bug/issue before writing your code!
- [ ] Ensure the tests and linter pass
- [ ] Code coverage does not decrease (if any source code was changed)
- [ ] Appropriate docs were updated (if necessary)
- [ ] Make sure to add `!` if this involves a breaking change
**3. Issue Reference**
Use the format: `Fixes #<issue_number> 🦕`
## Adding New Features
### Adding a New Data Source
1. Create a new directory: `internal/sources/<newdb>`.
2. Define `Config` and `Source` structs in `internal/sources/<newdb>/<newdb>.go`.
3. Implement `SourceConfig` interface (`SourceConfigType`, `Initialize`).
4. Implement `Source` interface (`SourceType`).
5. Implement `init()` to register the source.
6. Add unit tests in `internal/sources/<newdb>/<newdb>_test.go`.
### Adding a New Tool
1. Create a new directory: `internal/tools/<newdb>/<toolname>`.
2. Define `Config` and `Tool` structs.
3. Implement `ToolConfig` interface (`ToolConfigType`, `Initialize`).
4. Implement `Tool` interface (`Invoke`, `ParseParams`, `Manifest`, `McpManifest`, `Authorized`).
5. Implement `init()` to register the tool.
6. Add unit tests.
### Adding Documentation
- **For a new source:** Add source documentation to `docs/en/integrations/<source_name>/source.md`. Ensure the root `_index.md` file contains **strictly only frontmatter** and no markdown body text.
- **For a new native tool:** Add tool documentation to `docs/en/integrations/<source_name>/tools/<tool_name>.md`. Ensure the `tools/_index.md` file contains **strictly only frontmatter**.
- **Adding Integration Samples:** Add integration-specific samples to `docs/en/integrations/<source_name>/samples/`. Ensure the `samples/_index.md` file contains **strictly only frontmatter**.
- **Tool Inheritance (Shared Tools):** Managed databases (e.g., Cloud SQL Postgres) that use the tools of their underlying engine (e.g., Postgres) map their inherited tools by utilizing the `shared_tools` frontmatter parameter inside their `tools/_index.md` file. This file must contain only frontmatter.
- **New Top-Level Directories:** If adding a completely new top-level section to the documentation site, you must update the "Diátaxis Narrative Framework" section inside both `.hugo/layouts/index.llms.txt` and `.hugo/layouts/index.llms-full.txt` to keep the AI context synced with the site structure.
#### Integration Documentation Rules
When generating or editing documentation for this repository, you must strictly adhere to the following CI-enforced rules. Failure to do so will break the build.
##### Source Page Constraints (`integrations/**/source.md`)
1. **File Naming:** The primary connection guide for a source must be named `source.md`. Use `_index.md` solely as an empty structural folder wrapper containing **only YAML frontmatter**.
2. **LinkTitle:** The linkTitle has to be set to the string `Source` always.
3. **Title Convention:** The YAML frontmatter `title` must always end with "Source" (e.g., `title: "Postgres Source"`).
4. **No H1 Tags:** Never generate H1 (`#`) headings in the markdown body.
5. **Strict H2 Ordering:** You must use the following H2 (`##`) headings in this exact sequence.
* `## About` (Required)
* `## Available Tools` (Optional)
* `## Requirements` (Optional)
* `## Example` (Required)
* `## Reference` (Required)
* `## Advanced Usage` (Optional)
* `## Troubleshooting` (Optional)
* `## Additional Resources` (Optional)
6. **Shortcode Placement:** If you generate the `## Available Tools` section, you must include the `{{< list-tools >}}` shortcode beneath it.
##### Tool Page Constraints (`integrations/**/tools/*.md`)
1. **Location:** All native tools must reside inside a nested `tools/` subdirectory. The `tools/` directory must contain an `_index.md` file consisting **strictly of frontmatter**.
2. **Title Convention:** The YAML frontmatter `title` must always end with "Tool" (e.g., `title: "Execute SQL Tool"`).
3. **No H1 Tags:** Never generate H1 (`#`) headings in the markdown body.
4. **Strict H2 Ordering:** You must use the following H2 (`##`) headings in this exact sequence.
* `## About` (Required)
* `## Compatible Sources` (Optional)
* `## Requirements` (Optional)
* `## Parameters` (Optional)
* `## Example` (Required)
* `## Output Format` (Optional)
* `## Reference` (Optional)
* `## Advanced Usage` (Optional)
* `## Troubleshooting` (Optional)
* `## Additional Resources` (Optional)
5. **Shortcode Placement:** If you generate the `## Compatible Sources` section, you must include the `{{< compatible-sources >}}` shortcode beneath it.
##### Samples Architecture Constraints
Sample code is aggregated visually in the UI via the Samples section, but the physical markdown files are distributed logically based on their scope.
1. **Quickstarts:** `docs/en/documentation/getting-started/`
2. **Integration-Specific Samples:** `docs/en/integrations/<source_name>/samples/`. (The `samples/_index.md` wrapper must contain **strictly only frontmatter**).
3. **General/Cross-Category Samples:** `docs/en/samples/`
##### Samples Maintenance Rules
1. **Filtering:** Always include `sample_filters` in the frontmatter. Use specific tags for:
* Data Source (e.g., `bigquery`, `alloydb`)
* Language (e.g., `python`, `js`, `go`)
* Tool Type (e.g., `mcp`, `sdk`)
2. **Metadata:** Ensure `is_sample: true` is present to prevent the sample from being excluded from the Samples Gallery.
##### Prebuilt Config Constraints (`integrations/**/prebuilt-configs/*.md`)
1. **Naming & Path:** All prebuilt config docs must reside in `prebuilt-configs/`.
2. **Shortcode Requirement:** The main `documentation/configuration/prebuilt-configs/_index.md` page uses the `{{< list-prebuilt-configs >}}` shortcode, which only detects directories named exactly `prebuilt-configs`.
3. **YAML Mapping:** Always verify the `kind` of the data source in `internal/prebuiltconfigs/tools/` before choosing the integration folder.
##### Asset Constraints (`docs/`)
1. **File Size Limits:** Never add files larger than 24MB to the `docs/` directory.

View File

@@ -1,247 +0,0 @@
This document helps you find and install the right Gemini CLI extension to
interact with your databases.
## How to Install an Extension
To install any of the extensions listed below, use the `gemini extensions
install` command followed by the extension's GitHub repository URL.
For complete instructions on finding, installing, and managing extensions,
please see the [official Gemini CLI extensions
documentation](https://github.com/google-gemini/gemini-cli/blob/main/docs/extensions/index.md).
**Example Installation Command:**
```bash
gemini extensions install https://github.com/gemini-cli-extensions/EXTENSION_NAME
```
Make sure the user knows:
* These commands are not supported from within the CLI
* These commands will only be reflected in active CLI sessions on restart
* Extensions require Application Default Credentials in your environment. See
[Set up ADC for a local development
environment](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment)
to learn how you can provide either your user credentials or service account
credentials to ADC in a local development environment.
* Most extensions require you to set environment variables to connect to a
database. If there is a link provided for the configuration, fetch the web
page and return the configuration.
-----
## Find Your Database Extension
Find your database or service in the list below to get the correct installation
command.
**Note on Observability:** Extensions with `-observability` in their name are
designed to help you understand the health and performance of your database
instances, often by analyzing metrics and logs.
### Google Cloud Managed Databases
#### BigQuery
* For data analytics and querying:
```bash
gemini extensions install https://github.com/gemini-cli-extensions/bigquery-data-analytics
```
Configuration:
https://github.com/gemini-cli-extensions/bigquery-data-analytics/tree/main?tab=readme-ov-file#configuration
* For conversational analytics (using natural language):
```bash
gemini extensions install https://github.com/gemini-cli-extensions/bigquery-conversational-analytics
```
Configuration: https://github.com/gemini-cli-extensions/bigquery-conversational-analytics/tree/main?tab=readme-ov-file#configuration
#### Cloud SQL for MySQL
* Main Extension:
```bash
gemini extensions install https://github.com/gemini-cli-extensions/cloud-sql-mysql
```
Configuration:
https://github.com/gemini-cli-extensions/cloud-sql-mysql/tree/main?tab=readme-ov-file#configuration
* Observability:
```bash
gemini extensions install https://github.com/gemini-cli-extensions/cloud-sql-mysql-observability
```
If you are looking for self-hosted MySQL, consider the `mysql` extension.
#### Cloud SQL for PostgreSQL
* Main Extension:
```bash
gemini extensions install https://github.com/gemini-cli-extensions/cloud-sql-postgresql
```
Configuration:
https://github.com/gemini-cli-extensions/cloud-sql-postgresql/tree/main?tab=readme-ov-file#configuration
* Observability:
```bash
gemini extensions install https://github.com/gemini-cli-extensions/cloud-sql-postgresql-observability
```
If you are looking for other PostgreSQL options, consider the `postgres`
extension for self-hosted instances, or the `alloydb` extension for AlloyDB
for PostgreSQL.
#### Cloud SQL for SQL Server
* Main Extension:
```bash
gemini extensions install https://github.com/gemini-cli-extensions/cloud-sql-sqlserver
```
Configuration:
https://github.com/gemini-cli-extensions/cloud-sql-sqlserver/tree/main?tab=readme-ov-file#configuration
* Observability:
```bash
gemini extensions install https://github.com/gemini-cli-extensions/cloud-sql-sqlserver-observability
```
If you are looking for self-hosted SQL Server, consider the `sql-server`
extension.
#### AlloyDB for PostgreSQL
* Main Extension:
```bash
gemini extensions install https://github.com/gemini-cli-extensions/alloydb
```
Configuration:
https://github.com/gemini-cli-extensions/alloydb/tree/main?tab=readme-ov-file#configuration
* Observability:
```bash
gemini extensions install https://github.com/gemini-cli-extensions/alloydb-observability
```
If you are looking for other PostgreSQL options, consider the `postgres`
extension for self-hosted instances, or the `cloud-sql-postgresql` extension
for Cloud SQL for PostgreSQL.
#### Spanner
* For querying Spanner databases:
```bash
gemini extensions install https://github.com/gemini-cli-extensions/spanner
```
Configuration:
https://github.com/gemini-cli-extensions/spanner/tree/main?tab=readme-ov-file#configuration
#### Firestore
* For querying Firestore in Native Mode:
```bash
gemini extensions install https://github.com/gemini-cli-extensions/firestore-native
```
Configuration:
https://github.com/gemini-cli-extensions/firestore-native/tree/main?tab=readme-ov-file#configuration
### Other Google Cloud Data Services
#### Dataplex
* For interacting with Dataplex data lakes and assets:
```bash
gemini extensions install https://github.com/gemini-cli-extensions/dataplex
```
Configuration:
https://github.com/gemini-cli-extensions/dataplex/tree/main?tab=readme-ov-file#configuration
#### Looker
* For querying Looker instances:
```bash
gemini extensions install https://github.com/gemini-cli-extensions/looker
```
Configuration:
https://github.com/gemini-cli-extensions/looker/tree/main?tab=readme-ov-file#configuration
### Other Database Engines
These extensions are for connecting to database instances not managed by Cloud
SQL (e.g., self-hosted on-prem, on a VM, or in another cloud).
* MySQL:
```bash
gemini extensions install https://github.com/gemini-cli-extensions/mysql
```
Configuration:
https://github.com/gemini-cli-extensions/mysql/tree/main?tab=readme-ov-file#configuration
If you are looking for Google Cloud managed MySQL, consider the
`cloud-sql-mysql` extension.
* PostgreSQL:
```bash
gemini extensions install https://github.com/gemini-cli-extensions/postgres
```
Configuration:
https://github.com/gemini-cli-extensions/postgres/tree/main?tab=readme-ov-file#configuration
If you are looking for Google Cloud managed PostgreSQL, consider the
`cloud-sql-postgresql` or `alloydb` extensions.
* SQL Server:
```bash
gemini extensions install https://github.com/gemini-cli-extensions/sql-server
```
Configuration:
https://github.com/gemini-cli-extensions/sql-server/tree/main?tab=readme-ov-file#configuration
If you are looking for Google Cloud managed SQL Server, consider the
`cloud-sql-sqlserver` extension.
### Custom Tools
#### MCP Toolbox
* For connecting to MCP Toolbox servers:
This extension can be used with any Google Cloud database to build custom
tools. For more information, see the [MCP Toolbox
documentation](https://googleapis.github.io/genai-toolbox/getting-started/introduction/).
```bash
gemini extensions install https://github.com/gemini-cli-extensions/mcp-toolbox
```
Configuration:
https://github.com/gemini-cli-extensions/mcp-toolbox/tree/main?tab=readme-ov-file#configuration

659
README.md
View File

@@ -1,222 +1,108 @@
<div align="center">
![logo](./logo.png)
# MCP Toolbox for Databases
<a href="https://trendshift.io/repositories/13019" target="_blank"><img src="https://trendshift.io/api/badge/repositories/13019" alt="googleapis%2Fgenai-toolbox | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
[![Go Report Card](https://goreportcard.com/badge/github.com/googleapis/genai-toolbox)](https://goreportcard.com/report/github.com/googleapis/genai-toolbox)
[![License: Apache
2.0](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)
[![Docs](https://img.shields.io/badge/Docs-MCP_Toolbox-blue)](https://googleapis.github.io/genai-toolbox/)
[![Docs](https://img.shields.io/badge/docs-MCP_Toolbox-blue)](https://googleapis.github.io/genai-toolbox/)
[![Discord](https://img.shields.io/badge/Discord-%235865F2.svg?style=flat&logo=discord&logoColor=white)](https://discord.gg/Dmm69peqjh)
[![Medium](https://img.shields.io/badge/Medium-12100E?style=flat&logo=medium&logoColor=white)](https://medium.com/@mcp_toolbox)
[![Python SDK](https://img.shields.io/pypi/v/toolbox-core?logo=python&logoColor=white&label=Python%20SDK)](https://pypi.org/project/toolbox-core/)
[![JS/TS SDK](https://img.shields.io/npm/v/@toolbox-sdk/core?logo=javascript&logoColor=white&label=JS%20SDK)](https://www.npmjs.com/package/@toolbox-sdk/core)
[![Go SDK](https://img.shields.io/github/v/release/googleapis/mcp-toolbox-sdk-go?logo=go&logoColor=white&label=Go%20SDK)](https://pkg.go.dev/github.com/googleapis/mcp-toolbox-sdk-go)
[![Java SDK](https://img.shields.io/maven-central/v/com.google.cloud.mcp/mcp-toolbox-sdk-java?logo=apache-maven&logoColor=white&label=Java%20SDK)](https://mvnrepository.com/artifact/com.google.cloud.mcp/mcp-toolbox-sdk-java)
</div>
MCP Toolbox for Databases is an open source Model Context Protocol (MCP) server that connects your AI agents, IDEs, and applications directly to your enterprise databases.
<p align="center">
<img src="docs/en/documentation/introduction/architecture.png" alt="architecture" width="50%"/>
</p>
It serves a **dual purpose**:
1. **Ready-to-use MCP Server (Build-Time):** Instantly connect Gemini CLI, Google Antigravity, Claude Code, Codex, or other MCP clients to your databases using our *prebuilt generic tools*. Talk to your data, explore schemas, and generate code without writing boilerplate.
2. **Custom Tools Framework (Run-Time):** A robust framework to build specialized, highly secure AI tools for your production agents. Define structured queries, semantic search, and NL2SQL capabilities safely and easily.
This README provides a brief overview. For comprehensive details, see the [full documentation](https://googleapis.github.io/genai-toolbox/).
[![Go Report Card](https://goreportcard.com/badge/github.com/googleapis/genai-toolbox)](https://goreportcard.com/report/github.com/googleapis/genai-toolbox)
> [!NOTE]
> This solution was originally named “Gen AI Toolbox for Databases” (github.com/googleapis/genai-toolbox) as its initial development predated MCP, but was renamed to align with the MCP compatibility.
> MCP Toolbox for Databases is currently in beta, and may see breaking
> changes until the first stable release (v1.0).
MCP Toolbox for Databases is an open source MCP server for databases. It enables
you to develop tools easier, faster, and more securely by handling the complexities
such as connection pooling, authentication, and more.
This README provides a brief overview. For comprehensive details, see the [full
documentation](https://googleapis.github.io/genai-toolbox/).
> [!NOTE]
> This solution was originally named “Gen AI Toolbox for Databases” as
> its initial development predated MCP, but was renamed to align with recently
> added MCP compatibility.
<!-- TOC ignore:true -->
## Table of Contents
- [Why MCP Toolbox?](#why-mcp-toolbox)
- [Quick Start: Prebuilt Tools](#quick-start-prebuilt-tools)
- [Quick Start: Custom Tools](#quick-start-custom-tools)
- [Install & Run the Toolbox server](#install--run-the-toolbox-server)
- [Connect to Toolbox](#connect-to-toolbox)
- [MCP Client](#mcp-client)
- [Toolbox SDKs: Integrate with your Application](#toolbox-sdks-integrate-with-your-application)
- [Additional Features](#additional-features)
<!-- TOC -->
- [Why Toolbox?](#why-toolbox)
- [General Architecture](#general-architecture)
- [Getting Started](#getting-started)
- [Installing the server](#installing-the-server)
- [Running the server](#running-the-server)
- [Homebrew Users](#homebrew-users)
- [Integrating your application](#integrating-your-application)
- [Configuration](#configuration)
- [Sources](#sources)
- [Tools](#tools)
- [Toolsets](#toolsets)
- [Versioning](#versioning)
- [Pre-1.0.0 Versioning](#pre-100-versioning)
- [Post-1.0.0 Versioning](#post-100-versioning)
- [Contributing](#contributing)
- [Community](#community)
---
<!-- /TOC -->
## Why MCP Toolbox?
## Why Toolbox?
- **Out-of-the-Box Database Access:** Prebuilt generic tools for instant data exploration (e.g., `list_tables`, `execute_sql`) directly from your IDE or CLI.
- **Custom Tools Framework:** Build production-ready tools with your own predefined logic, ensuring safety through Restricted Access, Structured Queries, and Semantic Search.
- **Simplified Development:** Integrate tools into your Agent Development Kit (ADK), LangChain, LlamaIndex, or custom agents in less than 10 lines of code.
- **Better Performance:** Handles connection pooling, integrated auth (IAM), and end-to-end observability (OpenTelemetry) out of the box.
- **Enhanced Security**: Integrated authentication for more secure access to your data.
- **End-to-end Observability**: Out of the box metrics and tracing with built-in support for OpenTelemetry.
Toolbox helps you build Gen AI tools that let your agents access data in your
database. Toolbox provides:
---
- **Simplified development**: Integrate tools to your agent in less than 10
lines of code, reuse tools between multiple agents or frameworks, and deploy
new versions of tools more easily.
- **Better performance**: Best practices such as connection pooling,
authentication, and more.
- **Enhanced security**: Integrated auth for more secure access to your data
- **End-to-end observability**: Out of the box metrics and tracing with built-in
support for OpenTelemetry.
## Quick Start: Prebuilt Tools
**⚡ Supercharge Your Workflow with an AI Database Assistant ⚡**
Stop context-switching and let your AI assistant become a true co-developer. By connecting your IDE to your databases with MCP Toolbox, you can query your data in plain English, automate schema discovery and management, and generate database-aware code.
Stop context-switching and let your AI assistant become a true co-developer. By
[connecting your IDE to your databases with MCP Toolbox][connect-ide], you can
delegate complex and time-consuming database tasks, allowing you to build faster
and focus on what matters. This isn't just about code completion; it's about
giving your AI the context it needs to handle the entire development lifecycle.
You can use the Toolbox in any MCP-compatible IDE or client (e.g., Gemini CLI, Google Antigravity, Claude Code, Codex, etc.) by configuring the MCP server.
Heres how it will save you time:
**Prebuilt tools are also conveniently available via the [Google Antigravity MCP Store](https://antigravity.google/docs/mcp) with a simple click-to-install experience.**
- **Query in Plain English**: Interact with your data using natural language
right from your IDE. Ask complex questions like, *"How many orders were
delivered in 2024, and what items were in them?"* without writing any SQL.
- **Automate Database Management**: Simply describe your data needs, and let the
AI assistant manage your database for you. It can handle generating queries,
creating tables, adding indexes, and more.
- **Generate Context-Aware Code**: Empower your AI assistant to generate
application code and tests with a deep understanding of your real-time
database schema. This accelerates the development cycle by ensuring the
generated code is directly usable.
- **Slash Development Overhead**: Radically reduce the time spent on manual
setup and boilerplate. MCP Toolbox helps streamline lengthy database
configurations, repetitive code, and error-prone schema migrations.
1. Add the following to your client's MCP configuration file (usually `mcp.json` or `claude_desktop_config.json`):
Learn [how to connect your AI tools (IDEs) to Toolbox using MCP][connect-ide].
```json
{
"mcpServers": {
"toolbox-postgres": {
"command": "npx",
"args": [
"-y",
"@toolbox-sdk/server",
"--prebuilt=postgres"
]
}
}
}
```
[connect-ide]: https://googleapis.github.io/genai-toolbox/how-to/connect-ide/
2. Set the appropriate environment variables to connect, see the [Prebuilt Tools Reference](https://googleapis.github.io/genai-toolbox/reference/prebuilt-tools/).
## General Architecture
When you run Toolbox with a `--prebuilt=<database>` flag, you instantly get access to standard tools to interact with that database.
Toolbox sits between your application's orchestration framework and your
database, providing a control plane that is used to modify, distribute, or
invoke tools. It simplifies the management of your tools by providing you with a
centralized location to store and update tools, allowing you to share tools
between agents and applications and update those tools without necessarily
redeploying your application.
Supported databases currently include:
- **Google Cloud:** AlloyDB, BigQuery, Cloud SQL (PostgreSQL, MySQL, SQL Server), Spanner, Firestore, Dataplex
- **Other Databases:** PostgreSQL, MySQL, SQL Server, Oracle, MongoDB, Redis, Elasticsearch, CockroachDB, ClickHouse, Couchbase, Neo4j, Snowflake, Trino, and more.
![architecture](./docs/en/getting-started/introduction/architecture.png)
For a full list of available tools and their capabilities across all supported databases, see the [Prebuilt Tools Reference](https://googleapis.github.io/genai-toolbox/reference/prebuilt-tools/).
## Getting Started
*See the [Install & Run the Toolbox server](#install--run-the-toolbox-server) section for different execution methods like Docker or binaries.*
> [!TIP]
> For users looking for a managed solution, [Google Cloud MCP Servers](https://cloud.google.com/blog/products/databases/managed-mcp-servers-for-google-cloud-databases)
> provide a managed MCP experience with prebuilt tools; you can [learn more about the differences here](https://mcp-toolbox.dev/dev/reference/faq/).
---
## Quick Start: Custom Tools
Toolbox can also be used as a framework for customized tools.
The primary way to configure Toolbox is through the `tools.yaml` file. If you
have multiple files, you can tell Toolbox which to load with the `--config
tools.yaml` flag.
You can find more detailed reference documentation to all resource types in the
[Resources](https://googleapis.github.io/genai-toolbox/resources/).
### Sources
The `sources` section of your `tools.yaml` defines what data sources your
Toolbox should have access to. Most tools will have at least one source to
execute against.
```yaml
kind: source
name: my-pg-source
type: postgres
host: 127.0.0.1
port: 5432
database: toolbox_db
user: toolbox_user
password: my-password
```
For more details on configuring different types of sources, see the
[Sources](https://googleapis.github.io/genai-toolbox/resources/sources).
### Tools
The `tools` section of a `tools.yaml` define the actions an agent can take: what
type of tool it is, which source(s) it affects, what parameters it uses, etc.
```yaml
kind: tool
name: search-hotels-by-name
type: postgres-sql
source: my-pg-source
description: Search for hotels based on name.
parameters:
- name: name
type: string
description: The name of the hotel.
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
```
For more details on configuring different types of tools, see the
[Tools](https://googleapis.github.io/genai-toolbox/resources/tools).
### Toolsets
The `toolsets` section of your `tools.yaml` allows you to define groups of tools
that you want to be able to load together. This can be useful for defining
different groups based on agent or application.
```yaml
kind: toolset
name: my_first_toolset
tools:
- my_first_tool
- my_second_tool
---
kind: toolset
name: my_second_toolset
tools:
- my_second_tool
- my_third_tool
```
### Prompts
The `prompts` section of a `tools.yaml` defines prompts that can be used for
interactions with LLMs.
```yaml
kind: prompt
name: code_review
description: "Asks the LLM to analyze code quality and suggest improvements."
messages:
- content: >
Please review the following code for quality, correctness,
and potential improvements: \n\n{{.code}}
arguments:
- name: "code"
description: "The code to review"
```
For more details on configuring prompts, see the
[Prompts](https://googleapis.github.io/genai-toolbox/resources/prompts).
---
## Install & Run the Toolbox server
You can run Toolbox directly with a [configuration file](#quick-start-custom-tools):
```sh
npx @toolbox-sdk/server --config tools.yaml
```
This runs the latest version of the Toolbox server with your configuration file.
> [!NOTE]
> This method is optimized for convenience rather than performance.
> For a more standard and reliable installation, please use the binary
> or container image as described in [Install & Run the Toolbox server](#install--run-the-toolbox-server).
### Install Toolbox
### Installing the server
For the latest version, check the [releases page][releases] and use the
following instructions for your OS and CPU architecture.
@@ -229,69 +115,13 @@ following instructions for your OS and CPU architecture.
To install Toolbox as a binary:
<!-- {x-release-please-start-version} -->
> <details>
> <summary>Linux (AMD64)</summary>
>
> To install Toolbox as a binary on Linux (AMD64):
>
> ```sh
> # see releases page for other versions
> export VERSION=0.31.0
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
> chmod +x toolbox
> ```
>
> </details>
> <details>
> <summary>macOS (Apple Silicon)</summary>
>
> To install Toolbox as a binary on macOS (Apple Silicon):
>
> ```sh
> # see releases page for other versions
> export VERSION=0.31.0
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
> chmod +x toolbox
> ```
>
> </details>
> <details>
> <summary>macOS (Intel)</summary>
>
> To install Toolbox as a binary on macOS (Intel):
>
> ```sh
> # see releases page for other versions
> export VERSION=0.31.0
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
> chmod +x toolbox
> ```
>
> </details>
> <details>
> <summary>Windows (Command Prompt)</summary>
>
> To install Toolbox as a binary on Windows (Command Prompt):
>
> ```cmd
> :: see releases page for other versions
> set VERSION=0.31.0
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
> ```
>
> </details>
> <details>
> <summary>Windows (PowerShell)</summary>
>
> To install Toolbox as a binary on Windows (PowerShell):
>
> ```powershell
> # see releases page for other versions
> $VERSION = "0.31.0"
> curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
> ```
>
> </details>
```sh
# see releases page for other versions
export VERSION=0.13.0
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
chmod +x toolbox
```
</details>
<details>
@@ -300,7 +130,7 @@ You can also install Toolbox as a container:
```sh
# see releases page for other versions
export VERSION=0.31.0
export VERSION=0.13.0
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
```
@@ -324,36 +154,15 @@ To install from source, ensure you have the latest version of
[Go installed](https://go.dev/doc/install), and then run the following command:
```sh
go install github.com/googleapis/genai-toolbox@v0.31.0
go install github.com/googleapis/genai-toolbox@v0.13.0
```
<!-- {x-release-please-end} -->
</details>
<details>
<summary>Gemini CLI</summary>
Check out the [Gemini CLI extensions](https://geminicli.com/extensions/) to install prebuilt tools for specific databases like AlloyDB, BigQuery, and Cloud SQL directly into Gemini CLI.
```sh
# Install Gemini CLI
npm install -g @google/gemini-cli
# Install the extension
gemini extensions install https://github.com/gemini-cli-extensions/cloud-sql-postgres
# Run Gemini CLI
gemini
```
### Running the server
Interact with your custom tools using natural language through the Gemini CLI.
```sh
# Install the extension
gemini extensions install https://github.com/gemini-cli-extensions/mcp-toolbox
```
</details>
### Run Toolbox
[Configure](#quick-start-custom-tools) a `tools.yaml` to define your tools, and then
[Configure](#configuration) a `tools.yaml` to define your tools, and then
execute `toolbox` to start the server:
<details open>
@@ -362,12 +171,11 @@ execute `toolbox` to start the server:
To run Toolbox from binary:
```sh
./toolbox --config "tools.yaml"
./toolbox --tools-file "tools.yaml"
```
> ⓘ Note
> Toolbox enables dynamic reloading by default. To disable, use the
> `--disable-reload` flag.
**NOTE:**
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
</details>
@@ -375,19 +183,18 @@ To run Toolbox from binary:
<summary>Container image</summary>
To run the server after pulling the [container image](#install-toolbox):
To run the server after pulling the [container image](#installing-the-server):
```sh
export VERSION=0.24.0 # Use the version you pulled
export VERSION=0.11.0 # Use the version you pulled
docker run -p 5000:5000 \
-v $(pwd)/tools.yaml:/app/tools.yaml \
us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION \
--config "/app/tools.yaml"
--tools-file "/app/tools.yaml"
```
> ⓘ Note
> The `-v` flag mounts your local `tools.yaml` into the container, and `-p` maps
> the container's port `5000` to your host's port `5000`.
**NOTE:**
The `-v` flag mounts your local `tools.yaml` into the container, and `-p` maps the container's port `5000` to your host's port `5000`.
</details>
@@ -395,18 +202,14 @@ us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION \
<summary>Source</summary>
To run the server directly from source, navigate to the project root directory
and run:
To run the server directly from source, navigate to the project root directory and run:
```sh
go run .
```
> ⓘ Note
> This command runs the project from source, and is more suitable for development
> and testing. It does **not** compile a binary into your `$GOPATH`. If you want
> to compile a binary instead, refer the [Developer
> Documentation](./DEVELOPER.md#building-the-binary).
**NOTE:**
This command runs the project from source, and is more suitable for development and testing. It does **not** compile a binary into your `$GOPATH`. If you want to compile a binary instead, refer the [Developer Documentation](./DEVELOPER.md#building-the-binary).
</details>
@@ -414,42 +217,14 @@ go run .
<summary>Homebrew</summary>
If you installed Toolbox using [Homebrew](https://brew.sh/), the `toolbox`
binary is available in your system path. You can start the server with the same
command:
If you installed Toolbox using [Homebrew](https://brew.sh/), the `toolbox` binary is available in your system path. You can start the server with the same command:
```sh
toolbox --config "tools.yaml"
toolbox --tools-file "tools.yaml"
```
</details>
<details>
<summary>NPM</summary>
To run Toolbox directly without manually downloading the binary (requires Node.js):
```sh
npx @toolbox-sdk/server --config tools.yaml
```
</details>
<details>
<summary>Gemini CLI</summary>
After installing a [Gemini CLI extensions](https://geminicli.com/extensions/), the prebuilt tools will be available during use.
```sh
# Run Gemini CLI
gemini
# List extensions
/exttensions list
# List MCP servers
/mcp list
```
</details>
You can use `toolbox help` for a full list of flags! To stop the server, send a
terminate signal (`ctrl+c` on most platforms).
@@ -457,34 +232,11 @@ For more detailed documentation on deploying to different environments, check
out the resources in the [How-to
section](https://googleapis.github.io/genai-toolbox/how-to/)
---
## Connect to Toolbox
### Integrating your application
Once your Toolbox server is up and running, you can load tools into your MCP-compatible client or
application.
### MCP Client
Add the following configuration to your MCP client configuration:
```json
{
"mcpServers": {
"toolbox": {
"type": "http",
"url": "http://127.0.0.1:5000/mcp",
}
}
}
```
If you would like to connect to a specific toolset, replace url with "http://127.0.0.1:5000/mcp/{toolset_name}".
### Toolbox SDKs: Integrate with your Application
Toolbox Client SDKs provide the easy-to-use building blocks and advanced features for connecting your custom applications to the MCP Toolbox server. See below the list of Client SDKs for using various frameworks:
Once your server is up and running, you can load the tools into your
application. See below the list of Client SDKs for using various frameworks:
<details open>
<summary>Python (<a href="https://github.com/googleapis/mcp-toolbox-sdk-python">Github</a>)</summary>
@@ -688,36 +440,6 @@ For more detailed instructions on using the Toolbox Core SDK, see the
```
</details>
<details>
<summary>ADK</summary>
1. Install [Toolbox ADK SDK][toolbox-adk-js]:
```bash
npm install @toolbox-sdk/adk
```
2. Load tools:
```javascript
import { ToolboxClient } from '@toolbox-sdk/adk';
// update the url to point to your server
const URL = 'http://127.0.0.1:5000';
let client = new ToolboxClient(URL);
// these tools can be passed to your application!
const tools = await client.loadToolset('toolsetName');
```
For more detailed instructions on using the Toolbox ADK SDK, see the
[project's README][toolbox-adk-js-readme].
[toolbox-adk-js]: https://www.npmjs.com/package/@toolbox-sdk/adk
[toolbox-adk-js-readme]:
https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-adk/README.md
</details>
</details>
</blockquote>
<details>
@@ -725,7 +447,7 @@ For more detailed instructions on using the Toolbox Core SDK, see the
<br>
<blockquote>
<details>
<details open>
<summary>Core</summary>
1. Install [Toolbox Go SDK][toolbox-go]:
@@ -734,7 +456,7 @@ For more detailed instructions on using the Toolbox Core SDK, see the
go get github.com/googleapis/mcp-toolbox-sdk-go
```
2. Load tools:
1. Load tools:
```go
package main
@@ -832,11 +554,13 @@ For more detailed instructions on using the Toolbox Core SDK, see the
package main
import (
"context"
"log"
"encoding/json"
"github.com/firebase/genkit/go/ai"
"github.com/firebase/genkit/go/genkit"
"github.com/googleapis/mcp-toolbox-sdk-go/core"
"github.com/googleapis/mcp-toolbox-sdk-go/tbgenkit"
"github.com/invopop/jsonschema"
)
func main() {
@@ -844,7 +568,7 @@ For more detailed instructions on using the Toolbox Core SDK, see the
// Update the url to point to your server
URL := "http://127.0.0.1:5000"
ctx := context.Background()
g := genkit.Init(ctx)
g, err := genkit.Init(ctx)
client, err := core.NewToolboxClient(URL)
@@ -857,7 +581,6 @@ For more detailed instructions on using the Toolbox Core SDK, see the
if err != nil {
log.Fatalf("Failed to convert tool: %v\n", err)
}
log.Printf("Successfully converted tool: %s", genkitTool.Name())
}
```
@@ -966,118 +689,122 @@ For more detailed instructions on using the Toolbox Core SDK, see the
}
```
</details>
<details open>
<summary>ADK Go</summary>
1. Install [Toolbox Go SDK][toolbox-go]:
```bash
go get github.com/googleapis/mcp-toolbox-sdk-go
```
1. Load tools:
```go
package main
import (
"github.com/googleapis/mcp-toolbox-sdk-go/tbadk"
"context"
)
func main() {
// Make sure to add the error checks
// Update the url to point to your server
URL := "http://127.0.0.1:5000"
ctx := context.Background()
client, err := tbadk.NewToolboxClient(URL)
if err != nil {
return fmt.Sprintln("Could not start Toolbox Client", err)
}
// Use this tool with ADK Go
tool, err := client.LoadTool("toolName", ctx)
if err != nil {
return fmt.Sprintln("Could not load Toolbox Tool", err)
}
}
```
For more detailed instructions on using the Toolbox Go SDK, see the
[project's README][toolbox-core-go-readme].
</details>
</details>
</blockquote>
</details>
---
## Configuration
## Additional Features
The primary way to configure Toolbox is through the `tools.yaml` file. If you
have multiple files, you can tell toolbox which to load with the `--tools-file
tools.yaml` flag.
### Test tools with the Toolbox UI
You can find more detailed reference documentation to all resource types in the
[Resources](https://googleapis.github.io/genai-toolbox/resources/).
To launch Toolbox's interactive UI, use the `--ui` flag. This allows you to test
tools and toolsets with features such as authorized parameters. To learn more,
visit [Toolbox UI](https://googleapis.github.io/genai-toolbox/how-to/toolbox-ui/).
### Sources
```sh
./toolbox --ui
The `sources` section of your `tools.yaml` defines what data sources your
Toolbox should have access to. Most tools will have at least one source to
execute against.
```yaml
sources:
my-pg-source:
kind: postgres
host: 127.0.0.1
port: 5432
database: toolbox_db
user: toolbox_user
password: my-password
```
### Telemetry
For more details on configuring different types of sources, see the
[Sources](https://googleapis.github.io/genai-toolbox/resources/sources).
Toolbox emits traces and metrics via OpenTelemetry. Use `--telemetry-otlp=<endpoint>`
to export to any OTLP-compatible backend like Google Cloud Monitoring, Agnost AI, or
others. See the [telemetry docs](https://googleapis.github.io/genai-toolbox/how-to/export_telemetry/) for details.
### Tools
### Generate Agent Skills
The `tools` section of a `tools.yaml` define the actions an agent can take: what
kind of tool it is, which source(s) it affects, what parameters it uses, etc.
The `skills-generate` command allows you to convert a **toolset** into an **Agent Skill** compatible with the [Agent Skill specification](https://agentskills.io/specification). This is useful for distributing tools as portable skill packages.
```bash
toolbox --config tools.yaml skills-generate \
--name "my-skill" \
--toolset "my_toolset" \
--description "A skill containing multiple tools"
```yaml
tools:
search-hotels-by-name:
kind: postgres-sql
source: my-pg-source
description: Search for hotels based on name.
parameters:
- name: name
type: string
description: The name of the hotel.
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
```
Once generated, you can install the skill into the Gemini CLI:
For more details on configuring different types of tools, see the
[Tools](https://googleapis.github.io/genai-toolbox/resources/tools).
```bash
gemini skills install ./skills/my-skill
### Toolsets
The `toolsets` section of your `tools.yaml` allows you to define groups of tools
that you want to be able to load together. This can be useful for defining
different groups based on agent or application.
```yaml
toolsets:
my_first_toolset:
- my_first_tool
- my_second_tool
my_second_toolset:
- my_second_tool
- my_third_tool
```
For more details, see the [Generate Agent Skills guide](https://googleapis.github.io/genai-toolbox/how-to/generate_skill/).
You can load toolsets by name:
---
```python
# This will load all tools
all_tools = client.load_toolset()
# This will only load the tools listed in 'my_second_toolset'
my_second_toolset = client.load_toolset("my_second_toolset")
```
## Versioning
MCP Toolbox for Databases follows [Semantic Versioning](https://semver.org/).
This project uses [semantic versioning](https://semver.org/) (`MAJOR.MINOR.PATCH`).
Since the project is in a pre-release stage (version `0.x.y`), we follow the
standard conventions for initial development:
The Public API includes the Toolbox Server (CLI, configuration manifests, and pre-built toolsets) and the Client SDKs.
### Pre-1.0.0 Versioning
While the major version is `0`, the public API should be considered unstable.
The version will be incremented as follows:
- **Major versions** are incremented for breaking changes, such as incompatible CLI or manifest changes.
- **Minor versions** are incremented for new features, including modifications to pre-built toolsets or beta features.
- **Patch versions** are incremented for backward-compatible bug fixes.
- **`0.MINOR.PATCH`**: The **MINOR** version is incremented when we add
new functionality or make breaking, incompatible API changes.
- **`0.MINOR.PATCH`**: The **PATCH** version is incremented for
backward-compatible bug fixes.
For more details, see our [Full Versioning Policy](https://googleapis.github.io/genai-toolbox/about/versioning/).
### Post-1.0.0 Versioning
Once the project reaches a stable `1.0.0` release, the versioning will follow
the more common convention:
---
- **`MAJOR.MINOR.PATCH`**: Incremented for incompatible API changes.
- **`MAJOR.MINOR.PATCH`**: Incremented for new, backward-compatible functionality.
- **`MAJOR.MINOR.PATCH`**: Incremented for backward-compatible bug fixes.
The public API that this applies to is the CLI associated with Toolbox, the
interactions with official SDKs, and the definitions in the `tools.yaml` file.
## Contributing
Contributions are welcome. Please, see the [CONTRIBUTING](CONTRIBUTING.md) guide to get started.
Contributions are welcome. Please, see the [CONTRIBUTING](CONTRIBUTING.md)
to get started.
For technical details on setting up a environment for developing on Toolbox itself, see the [DEVELOPER](DEVELOPER.md) guide.
Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Contributor Code of Conduct](CODE_OF_CONDUCT.md) for more information.
---
Please note that this project is released with a Contributor Code of Conduct.
By participating in this project you agree to abide by its terms. See
[Contributor Code of Conduct](CODE_OF_CONDUCT.md) for more information.
## Community
Join our [Discord community](https://discord.gg/GQrFB3Ec3W) to connect with our developers!
Join our [discord community](https://discord.gg/GQrFB3Ec3W) to connect with our developers!

View File

@@ -1,6 +0,0 @@
# Security Policy
To report a security issue, please email toolbox-security@google.com with a
description of the issue, the steps you took to create the issue, affected
versions, and, if known, mitigations for the issue. Our vulnerability management
team will respond within 3 working days of your email.

View File

@@ -1,112 +0,0 @@
# Upgrading to MCP Toolbox for Databases v1.0.0
Welcome to the v1.0.0 release of the MCP Toolbox for Databases!
This release stabilizes our core APIs and standardizes our protocol alignments.
As part of this milestone, we have introduced several breaking changes and
deprecations that require updates to your configuration and code.
**📖 New Versioning Policy**
We have officially published our [Versioning Policy](https://googleapis.github.io/genai-toolbox/dev/about/versioning/). Moving forward, we follow standard versioning conventions to classify updates:
* **Major (vX.0.0):** Breaking changes requiring manual updates.
* **Minor (v1.X.0):** New, backward-compatible features and deprecation notices.
* **Patch (v1.0.X):** Backward-compatible bug fixes and security patches.
This guide outlines what has changed and the steps you need to take to upgrade.
## 🚨 Breaking Changes (Action Required)
### 1. Endpoint Transition: `/api` disabled by default
The legacy `/api` endpoint for the native Toolbox protocol is now disabled by default. All official SDKs have been updated to use the `/mcp` endpoint, which aligns with the standard Model Context Protocol (MCP) specification.
If you still require the legacy `/api` endpoint, you must explicitly activate it using a new command-line flag.
* **Usage:** `./toolbox --enable-api`
* **Migration:** You must update all custom implementations to use the `/mcp`
endpoint exclusively, as the `/api` endpoint is now deprecated. If your workflow
relied on a non-standard feature that is missing from the new implementation, please submit a
feature request on our [GitHub Issues page](https://github.com/googleapis/genai-toolbox/issues).
### 2. Strict Tool Naming Validation (SEP986)
Tool names are now strictly validated against [ModelContextProtocol SEP986 guidelines](https://github.com/alexhancock/modelcontextprotocol/blob/main/docs/specification/draft/server/tools.mdx#tool-names) prior to MCP initialization.
* **Migration:** Ensure all your tool names **only** contain alphanumeric characters, hyphens (`-`), underscores (`_`), and periods (`.`). Any other special characters will cause initialization to fail.
### 3. Removed CLI Flags
The legacy snake_case flag `--tools_file` has been completely removed.
* **Migration:** Update your deployment scripts to use `--config` instead.
### 4. Singular `kind` Values in Configuration
_(This step applies only if you are currently using the new flat format.)_
All primitive kind fields in configuration files have been updated to use singular nouns instead of plural. For example, `kind: sources` is now `kind: source`, and `kind: tools` is now `kind: tool`.
* **Migration:** Update your configuration files to use the singular form for all `kind`
values. _(Note: If you transitioned to the flat format using the `./toolbox migrate` command, this step was handled automatically.)_
### 5. Configuration Schema: `authSources` renamed
The `authSources` field is no longer supported in configuration files.
* **Migration:** Rename all instances of `authSources` to `authService` in your
configuration files.
### 6. CloudSQL for SQL Server: `ipAddress` removed
The `ipAddress` field for the CloudSQL for SQL Server source was redundant and has been removed.
* **Migration:** Remove the `ipAddress` field from your CloudSQL for SQL Server configurations.
## ⚠️ Deprecations & Modernization
### 1. Flat Configuration Format Introduced
We have introduced a new, streamlined "flat" format for configuration files. While the older nested format is still supported for now, **all new features will only be added to the flat format.**
**Schema Restructuring (`kind` vs. `type`):**
Along with the flat format, the configuration schema has been reorganized. The
old `kind` field (which specified the specific primitive types, like
`alloydb-postgres`) has been renamed to `type`. The `kind` field is now strictly
used to declare the core primitive of the block (e.g., `source` or `tool`).
**Example of the new flat format:**
```yaml
kind: source
name: my-source
type: alloydb-postgres
project: my-project
region: my-region
instance: my-instance
---
kind: tool
name: my-simple-tool
type: postgres-execute-sql
source: my-source
description: this is a tool that executes the sql provided.
```
**Migration:**
You can automatically migrate your existing nested configurations to the new flat format using the CLI. Run the following command:
```Bash
./toolbox migrate --config <path-to-your-config>
```
_Note: You can also use the `--configs` or `--config-folder` flags with this command._
### 2. Deprecated CLI Flags
The following CLI flags are deprecated and will be removed in a future release. Please update your scripts:
* `--tools-file` ➡️ Use `--config`
* `--tools-files` ➡️ Use `--configs`
* `--tools-folder` ➡️ Use `--config-folder`
## 💡 Other Notable Updates
* **Enhanced Error Handling:** Errors are now strictly categorized between Agent Errors (allowing the LLM to self-correct) and Client/Server Errors (which signal a hard stop).
* **Telemetry Updates:** The /mcp endpoint telemetry has been revised to fully comply with the [OpenTelemetry semantic conventions for MCP](https://opentelemetry.io/docs/specs/semconv/gen-ai/mcp/).
* **MCP Authorization Support:** The Model Context Protocol's [authorization specification](https://modelcontextprotocol.io/specification/2025-11-25/basic/authorization) is now fully supported.
* **Database Name Validation:** Removed the "required field" validation for the database name in CloudSQL for MySQL and generic MySQL sources.
* **Prebuilt Tools:** Toolsets have been resized for better performance.
## 📚 Documentation Moved
Our official documentation has a new home! Please update your bookmarks to [mcp-toolbox.dev](http://mcp-toolbox.dev).

View File

@@ -1,386 +0,0 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package internal
import (
"bytes"
"context"
"fmt"
"io"
"os"
"path/filepath"
"regexp"
"slices"
"strings"
"github.com/goccy/go-yaml"
"github.com/google/go-cmp/cmp"
"github.com/googleapis/genai-toolbox/internal/auth/generic"
"github.com/googleapis/genai-toolbox/internal/server"
)
type Config struct {
Sources server.SourceConfigs `yaml:"sources"`
AuthServices server.AuthServiceConfigs `yaml:"authServices"`
EmbeddingModels server.EmbeddingModelConfigs `yaml:"embeddingModels"`
Tools server.ToolConfigs `yaml:"tools"`
Toolsets server.ToolsetConfigs `yaml:"toolsets"`
Prompts server.PromptConfigs `yaml:"prompts"`
}
type ConfigParser struct {
EnvVars map[string]string
}
// parseEnv replaces environment variables ${ENV_NAME} with their values.
// also support ${ENV_NAME:default_value}.
func (p *ConfigParser) parseEnv(input string) (string, error) {
re := regexp.MustCompile(`\$\{(\w+)(:([^}]*))?\}`)
if p.EnvVars == nil {
p.EnvVars = make(map[string]string)
}
var err error
output := re.ReplaceAllStringFunc(input, func(match string) string {
parts := re.FindStringSubmatch(match)
// extract the variable name
variableName := parts[1]
if value, found := os.LookupEnv(variableName); found {
p.EnvVars[variableName] = value
return value
}
if len(parts) >= 4 && parts[2] != "" {
value := parts[3]
p.EnvVars[variableName] = value
return value
}
err = fmt.Errorf("environment variable not found: %q", variableName)
return ""
})
return output, err
}
// ParseConfig parses the provided yaml into appropriate configs.
func (p *ConfigParser) ParseConfig(ctx context.Context, raw []byte) (Config, error) {
var config Config
// Replace environment variables if found
output, err := p.parseEnv(string(raw))
if err != nil {
return config, fmt.Errorf("error parsing environment variables: %s", err)
}
raw = []byte(output)
raw, err = ConvertConfig(raw)
if err != nil {
return config, fmt.Errorf("error converting config file: %s", err)
}
// Parse contents
config.Sources, config.AuthServices, config.EmbeddingModels, config.Tools, config.Toolsets, config.Prompts, err = server.UnmarshalResourceConfig(ctx, raw)
if err != nil {
return config, err
}
return config, nil
}
// ConvertConfig converts configuration file to flat format.
func ConvertConfig(raw []byte) ([]byte, error) {
var input yaml.MapSlice
decoder := yaml.NewDecoder(bytes.NewReader(raw), yaml.UseOrderedMap())
// convert to config file v2
var buf bytes.Buffer
encoder := yaml.NewEncoder(&buf)
v1keys := []string{"sources", "authServices", "embeddingModels", "tools", "toolsets", "prompts"}
for {
if err := decoder.Decode(&input); err != nil {
if err == io.EOF {
break
}
return nil, err
}
for _, item := range input {
key, ok := item.Key.(string)
if !ok {
return nil, fmt.Errorf("unexpected non-string key in input: %v", item.Key)
}
// check if the key is config file v1's key
if slices.Contains(v1keys, key) {
// check if value conversion to yaml.MapSlice successfully
// fields such as "tools" in toolsets might pass the first check but
// fail to convert to MapSlice
if slice, ok := item.Value.(yaml.MapSlice); ok {
// Deprecated: convert authSources to authServices
switch key {
case "authSources", "authServices":
key = "authService"
case "sources":
key = "source"
case "embeddingModels":
key = "embeddingModel"
case "tools":
key = "tool"
case "toolsets":
key = "toolset"
case "prompts":
key = "prompt"
}
transformed, err := transformDocs(key, slice)
if err != nil {
return nil, err
}
// encode per-doc
for _, doc := range transformed {
if err := encoder.Encode(doc); err != nil {
return nil, err
}
}
} else {
// invalid input will be ignored
// we don't want to throw error here since the config could
// be valid but with a different order such as:
// ---
// tools:
// - tool_a
// kind: toolset
// ---
continue
}
} else {
// this doc is already v2, encode to buf
if err := encoder.Encode(input); err != nil {
return nil, err
}
break
}
}
}
return buf.Bytes(), nil
}
// transformDocs transforms the configuration file from v1 format to v2
// yaml.MapSlice will preserve the order in a map
func transformDocs(kind string, input yaml.MapSlice) ([]yaml.MapSlice, error) {
var transformed []yaml.MapSlice
for _, entry := range input {
entryName, ok := entry.Key.(string)
if !ok {
return nil, fmt.Errorf("unexpected non-string key for entry in '%s': %v", kind, entry.Key)
}
entryBody := processValue(entry.Value, kind == "toolset")
currentTransformed := yaml.MapSlice{
{Key: "kind", Value: kind},
{Key: "name", Value: entryName},
}
// Merge the transformed body into our result
if bodySlice, ok := entryBody.(yaml.MapSlice); ok {
currentTransformed = append(currentTransformed, bodySlice...)
} else {
return nil, fmt.Errorf("unable to convert entryBody to MapSlice")
}
transformed = append(transformed, currentTransformed)
}
return transformed, nil
}
// processValue recursively looks for MapSlices to rename 'kind' -> 'type'
func processValue(v any, isToolset bool) any {
switch val := v.(type) {
case yaml.MapSlice:
// creating a new MapSlice is safer for recursive transformation
newVal := make(yaml.MapSlice, len(val))
for i, item := range val {
// Perform renaming
if item.Key == "kind" {
item.Key = "type"
}
// Recursive call for nested values (e.g., nested objects or lists)
item.Value = processValue(item.Value, false)
newVal[i] = item
}
return newVal
case []any:
// Process lists: If it's a toolset top-level list, wrap it.
if isToolset {
return yaml.MapSlice{{Key: "tools", Value: val}}
}
// Otherwise, recurse into list items (to catch nested objects)
newVal := make([]any, len(val))
for i := range val {
newVal[i] = processValue(val[i], false)
}
return newVal
default:
return val
}
}
// mergeConfigs merges multiple Config structs into one.
// Detects and raises errors for resource conflicts in sources, authServices, tools, and toolsets.
// All resource names (sources, authServices, tools, toolsets) must be unique across all files.
func mergeConfigs(files ...Config) (Config, error) {
merged := Config{
Sources: make(server.SourceConfigs),
AuthServices: make(server.AuthServiceConfigs),
EmbeddingModels: make(server.EmbeddingModelConfigs),
Tools: make(server.ToolConfigs),
Toolsets: make(server.ToolsetConfigs),
Prompts: make(server.PromptConfigs),
}
var conflicts []string
for fileIndex, file := range files {
// Check for conflicts and merge sources
for name, source := range file.Sources {
if mergedSource, exists := merged.Sources[name]; exists {
if !cmp.Equal(mergedSource, source) {
conflicts = append(conflicts, fmt.Sprintf("source '%s' (file #%d)", name, fileIndex+1))
}
} else {
merged.Sources[name] = source
}
}
// Check for conflicts and merge authServices
for name, authService := range file.AuthServices {
if _, exists := merged.AuthServices[name]; exists {
conflicts = append(conflicts, fmt.Sprintf("authService '%s' (file #%d)", name, fileIndex+1))
} else {
merged.AuthServices[name] = authService
}
}
// Check for conflicts and merge embeddingModels
for name, em := range file.EmbeddingModels {
if _, exists := merged.EmbeddingModels[name]; exists {
conflicts = append(conflicts, fmt.Sprintf("embedding model '%s' (file #%d)", name, fileIndex+1))
} else {
merged.EmbeddingModels[name] = em
}
}
// Check for conflicts and merge tools
for name, tool := range file.Tools {
if _, exists := merged.Tools[name]; exists {
conflicts = append(conflicts, fmt.Sprintf("tool '%s' (file #%d)", name, fileIndex+1))
} else {
merged.Tools[name] = tool
}
}
// Check for conflicts and merge toolsets
for name, toolset := range file.Toolsets {
if _, exists := merged.Toolsets[name]; exists {
conflicts = append(conflicts, fmt.Sprintf("toolset '%s' (file #%d)", name, fileIndex+1))
} else {
merged.Toolsets[name] = toolset
}
}
// Check for conflicts and merge prompts
for name, prompt := range file.Prompts {
if _, exists := merged.Prompts[name]; exists {
conflicts = append(conflicts, fmt.Sprintf("prompt '%s' (file #%d)", name, fileIndex+1))
} else {
merged.Prompts[name] = prompt
}
}
}
// If conflicts were detected, return an error
if len(conflicts) > 0 {
return Config{}, fmt.Errorf("resource conflicts detected:\n - %s\n\nPlease ensure each source, authService, tool, toolset and prompt has a unique name across all files", strings.Join(conflicts, "\n - "))
}
// Ensure only one authService has mcpEnabled = true
var mcpEnabledAuthServers []string
for name, authService := range merged.AuthServices {
// Only generic type has McpEnabled right now
if genericService, ok := authService.(generic.Config); ok && genericService.McpEnabled {
mcpEnabledAuthServers = append(mcpEnabledAuthServers, name)
}
}
if len(mcpEnabledAuthServers) > 1 {
return Config{}, fmt.Errorf("multiple authServices with mcpEnabled=true detected: %s. Only one MCP authorization server is currently supported", strings.Join(mcpEnabledAuthServers, ", "))
}
return merged, nil
}
// LoadAndMergeConfigs loads multiple YAML files and merges them
func (p *ConfigParser) LoadAndMergeConfigs(ctx context.Context, filePaths []string) (Config, error) {
var configs []Config
for _, filePath := range filePaths {
buf, err := os.ReadFile(filePath)
if err != nil {
return Config{}, fmt.Errorf("unable to read config file at %q: %w", filePath, err)
}
config, err := p.ParseConfig(ctx, buf)
if err != nil {
return Config{}, fmt.Errorf("unable to parse config file at %q: %w", filePath, err)
}
configs = append(configs, config)
}
if len(configs) == 0 {
return Config{}, fmt.Errorf("no YAML files found")
}
if len(configs) > 1 {
mergedFile, err := mergeConfigs(configs...)
if err != nil {
return Config{}, fmt.Errorf("unable to merge config files: %w", err)
}
return mergedFile, nil
}
return configs[0], nil
}
// GetPathsFromConfigFolder loads all YAML files from a directory and merges them
func GetPathsFromConfigFolder(ctx context.Context, folderPath string) ([]string, error) {
// Check if directory exists
info, err := os.Stat(folderPath)
if err != nil {
return nil, fmt.Errorf("unable to access config folder at %q: %w", folderPath, err)
}
if !info.IsDir() {
return nil, fmt.Errorf("path %q is not a directory", folderPath)
}
// Find all YAML files in the directory
pattern := filepath.Join(folderPath, "*.yaml")
yamlFiles, err := filepath.Glob(pattern)
if err != nil {
return nil, fmt.Errorf("error finding YAML files in %q: %w", folderPath, err)
}
// Also find .yml files
ymlPattern := filepath.Join(folderPath, "*.yml")
ymlFiles, err := filepath.Glob(ymlPattern)
if err != nil {
return nil, fmt.Errorf("error finding YML files in %q: %w", folderPath, err)
}
// Combine both file lists
allFiles := append(yamlFiles, ymlFiles...)
return allFiles, nil
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,71 +0,0 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package internal
import (
"fmt"
"strings"
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
)
// PersistentFlags sets up flags that are available for all commands and
// subcommands
// It is also used to set up persistent flags during subcommand unit tests
func PersistentFlags(parentCmd *cobra.Command, opts *ToolboxOptions) {
persistentFlags := parentCmd.PersistentFlags()
persistentFlags.Var(&opts.Cfg.LogLevel, "log-level", "Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'.")
persistentFlags.Var(&opts.Cfg.LoggingFormat, "logging-format", "Specify logging format to use. Allowed: 'standard' or 'JSON'.")
persistentFlags.BoolVar(&opts.Cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.")
persistentFlags.StringVar(&opts.Cfg.TelemetryOTLP, "telemetry-otlp", "", "Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318')")
persistentFlags.StringVar(&opts.Cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
persistentFlags.StringSliceVar(&opts.Cfg.UserAgentMetadata, "user-agent-metadata", []string{}, "Appends additional metadata to the User-Agent.")
}
// ConfigFileFlags defines flags related to the configuration file.
// It should be applied to any command that requires configuration loading.
func ConfigFileFlags(flags *pflag.FlagSet, opts *ToolboxOptions) {
flags.StringVar(&opts.Config, "config", "", "File path specifying the tool configuration. Cannot be used with --configs, or --config-folder.")
flags.StringVar(&opts.Config, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
_ = flags.MarkDeprecated("tools-file", "please use --config instead") // DEPRECATED
flags.StringSliceVar(&opts.Configs, "configs", []string{}, "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --config, or --config-folder.")
flags.StringSliceVar(&opts.Configs, "tools-files", []string{}, "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file, or --tools-folder.")
_ = flags.MarkDeprecated("tools-files", "please use --configs instead") // DEPRECATED
flags.StringVar(&opts.ConfigFolder, "config-folder", "", "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --config, or --configs.")
flags.StringVar(&opts.ConfigFolder, "tools-folder", "", "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file, or --tools-files.")
_ = flags.MarkDeprecated("tools-folder", "please use --config-folder instead") // DEPRECATED
// Fetch prebuilt tools sources to customize the help description
prebuiltHelp := fmt.Sprintf(
"Use a prebuilt tool configuration by source type. Allowed: '%s'. Can be specified multiple times.",
strings.Join(prebuiltconfigs.GetPrebuiltSources(), "', '"),
)
flags.StringSliceVar(&opts.PrebuiltConfigs, "prebuilt", []string{}, prebuiltHelp)
}
// ServeFlags defines flags for starting and configuring the server.
func ServeFlags(flags *pflag.FlagSet, opts *ToolboxOptions) {
flags.StringVarP(&opts.Cfg.Address, "address", "a", "127.0.0.1", "Address of the interface the server will listen on.")
flags.IntVarP(&opts.Cfg.Port, "port", "p", 5000, "Port the server will listen on.")
flags.BoolVar(&opts.Cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
flags.BoolVar(&opts.Cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
flags.BoolVar(&opts.Cfg.EnableAPI, "enable-api", false, "Enable the /api endpoint.")
flags.StringVar(&opts.Cfg.ToolboxUrl, "toolbox-url", "", "Specifies the Toolbox URL. Used as the resource field in the MCP PRM file when MCP Auth is enabled. Falls back to TOOLBOX_URL environment variable.")
flags.StringVar(&opts.Cfg.McpPrmFile, "mcp-prm-file", "", "Path to a manual Protected Resource Metadata (PRM) JSON file. If provided, overrides auto-generation.")
flags.StringSliceVar(&opts.Cfg.AllowedOrigins, "allowed-origins", []string{"*"}, "Specifies a list of origins permitted to access this server. Defaults to '*'.")
flags.StringSliceVar(&opts.Cfg.AllowedHosts, "allowed-hosts", []string{"*"}, "Specifies a list of hosts permitted to access this server. Defaults to '*'.")
}

View File

@@ -1,273 +0,0 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package internal
import (
// Import prompt packages for side effect of registration
_ "github.com/googleapis/genai-toolbox/internal/prompts/custom"
// Import tool packages for side effect of registration
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreatecluster"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreateinstance"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreateuser"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetcluster"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetinstance"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetuser"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistclusters"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistinstances"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistusers"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbwaitforoperation"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydbainl"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryanalyzecontribution"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryconversationalanalytics"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryforecast"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygetdatasetinfo"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygettableinfo"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylistdatasetids"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylisttableids"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysearchcatalog"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigtable"
_ "github.com/googleapis/genai-toolbox/internal/tools/cassandra/cassandracql"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouseexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselistdatabases"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhousesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudgda"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirfetchpage"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatienteverything"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatientsearch"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdataset"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdicomstore"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdicomstoremetrics"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirresource"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirstore"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirstoremetrics"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarelistdicomstores"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarelistfhirstores"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcareretrieverendereddicominstance"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicominstances"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomseries"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomstudies"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminlistlognames"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminlistresourcetypes"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminquerylogs"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcloneinstance"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatebackup"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatedatabase"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreateusers"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistdatabases"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistinstances"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlrestorebackup"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmssql/cloudsqlmssqlcreateinstance"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmysql/cloudsqlmysqlcreateinstance"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlpg/cloudsqlpgcreateinstances"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlpg/cloudsqlpgupgradeprecheck"
_ "github.com/googleapis/genai-toolbox/internal/tools/cockroachdb/cockroachdbexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/cockroachdb/cockroachdblistschemas"
_ "github.com/googleapis/genai-toolbox/internal/tools/cockroachdb/cockroachdblisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/cockroachdb/cockroachdbsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/couchbase"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataform/dataformcompilelocal"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexlookupcontext"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexlookupentry"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchaspecttypes"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchentries"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataproc/dataprocgetcluster"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataproc/dataprocgetjob"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataproc/dataproclistclusters"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataproc/dataproclistjobs"
_ "github.com/googleapis/genai-toolbox/internal/tools/dgraph"
_ "github.com/googleapis/genai-toolbox/internal/tools/elasticsearch/elasticsearchesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/firebird/firebirdexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/firebird/firebirdsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreadddocuments"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoredeletedocuments"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetdocuments"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetrules"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorelistcollections"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequery"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequerycollection"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreupdatedocument"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules"
_ "github.com/googleapis/genai-toolbox/internal/tools/http"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardelement"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardfilter"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerconversationalanalytics"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookercreateprojectdirectory"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookercreateprojectfile"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookercreateviewfromtable"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdeleteprojectdirectory"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdeleteprojectfile"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdevmode"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergenerateembedurl"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiondatabases"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnections"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectionschemas"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiontablecolumns"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiontables"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdashboards"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdimensions"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetexplores"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetfilters"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetlookmltests"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetlooks"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmeasures"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmodels"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetparameters"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojectdirectories"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojectfile"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojectfiles"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojects"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergitbranch"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthanalyze"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthpulse"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthvacuum"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakedashboard"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakelook"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquery"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquerysql"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerqueryurl"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrundashboard"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrunlook"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrunlookmltests"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerupdateprojectfile"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookervalidateproject"
_ "github.com/googleapis/genai-toolbox/internal/tools/mindsdb/mindsdbexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mindsdb/mindsdbsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbaggregate"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeletemany"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeleteone"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfind"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfindone"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertmany"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertone"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdatemany"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdateone"
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqllisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlgetqueryplan"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllistactivequeries"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttablefragmentation"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttablesmissinguniqueindexes"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jcypher"
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jexecutecypher"
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema"
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbaseexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbasesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/oracle/oracleexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/oracle/oraclesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresdatabaseoverview"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresgetcolumncardinality"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistactivequeries"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistavailableextensions"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistdatabasestats"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistindexes"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistinstalledextensions"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistlocks"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpgsettings"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpublicationtables"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistquerystats"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistroles"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistschemas"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistsequences"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresliststoredprocedure"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablespaces"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablestats"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttriggers"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistviews"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslongrunningtransactions"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresreplicationstats"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
_ "github.com/googleapis/genai-toolbox/internal/tools/redis"
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcancelbatch"
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatepysparkbatch"
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatesparkbatch"
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkgetbatch"
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkgetsession"
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkgetsessiontemplate"
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparklistbatches"
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparklistsessions"
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoreexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoresql"
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakeexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlistgraphs"
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannersql"
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqliteexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinoexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinosql"
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/wait"
_ "github.com/googleapis/genai-toolbox/internal/tools/valkey"
_ "github.com/googleapis/genai-toolbox/internal/tools/yugabytedbsql"
_ "github.com/googleapis/genai-toolbox/internal/sources/alloydbadmin"
_ "github.com/googleapis/genai-toolbox/internal/sources/alloydbpg"
_ "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
_ "github.com/googleapis/genai-toolbox/internal/sources/bigtable"
_ "github.com/googleapis/genai-toolbox/internal/sources/cassandra"
_ "github.com/googleapis/genai-toolbox/internal/sources/clickhouse"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudgda"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudhealthcare"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudloggingadmin"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudmonitoring"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmssql"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmysql"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg"
_ "github.com/googleapis/genai-toolbox/internal/sources/cockroachdb"
_ "github.com/googleapis/genai-toolbox/internal/sources/couchbase"
_ "github.com/googleapis/genai-toolbox/internal/sources/dataplex"
_ "github.com/googleapis/genai-toolbox/internal/sources/dataproc"
_ "github.com/googleapis/genai-toolbox/internal/sources/dgraph"
_ "github.com/googleapis/genai-toolbox/internal/sources/elasticsearch"
_ "github.com/googleapis/genai-toolbox/internal/sources/firebird"
_ "github.com/googleapis/genai-toolbox/internal/sources/firestore"
_ "github.com/googleapis/genai-toolbox/internal/sources/http"
_ "github.com/googleapis/genai-toolbox/internal/sources/looker"
_ "github.com/googleapis/genai-toolbox/internal/sources/mindsdb"
_ "github.com/googleapis/genai-toolbox/internal/sources/mongodb"
_ "github.com/googleapis/genai-toolbox/internal/sources/mssql"
_ "github.com/googleapis/genai-toolbox/internal/sources/mysql"
_ "github.com/googleapis/genai-toolbox/internal/sources/neo4j"
_ "github.com/googleapis/genai-toolbox/internal/sources/oceanbase"
_ "github.com/googleapis/genai-toolbox/internal/sources/oracle"
_ "github.com/googleapis/genai-toolbox/internal/sources/postgres"
_ "github.com/googleapis/genai-toolbox/internal/sources/redis"
_ "github.com/googleapis/genai-toolbox/internal/sources/serverlessspark"
_ "github.com/googleapis/genai-toolbox/internal/sources/singlestore"
_ "github.com/googleapis/genai-toolbox/internal/sources/snowflake"
_ "github.com/googleapis/genai-toolbox/internal/sources/spanner"
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
_ "github.com/googleapis/genai-toolbox/internal/sources/tidb"
_ "github.com/googleapis/genai-toolbox/internal/sources/trino"
_ "github.com/googleapis/genai-toolbox/internal/sources/valkey"
_ "github.com/googleapis/genai-toolbox/internal/sources/yugabytedb"
)

View File

@@ -1,143 +0,0 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package invoke
import (
"context"
"encoding/json"
"fmt"
"strings"
"github.com/googleapis/genai-toolbox/cmd/internal"
"github.com/googleapis/genai-toolbox/internal/server"
"github.com/googleapis/genai-toolbox/internal/server/resources"
"github.com/googleapis/genai-toolbox/internal/util"
"github.com/googleapis/genai-toolbox/internal/util/parameters"
"github.com/spf13/cobra"
)
func NewCommand(opts *internal.ToolboxOptions) *cobra.Command {
cmd := &cobra.Command{
Use: "invoke <tool-name> [params]",
Short: "Execute a tool directly",
Long: `Execute a tool directly with parameters.
Params must be a JSON string.
Example:
toolbox invoke my-tool '{"param1": "value1"}'`,
Args: cobra.MinimumNArgs(1),
RunE: func(c *cobra.Command, args []string) error {
return runInvoke(c, args, opts)
},
}
flags := cmd.Flags()
internal.ConfigFileFlags(flags, opts)
return cmd
}
func runInvoke(cmd *cobra.Command, args []string, opts *internal.ToolboxOptions) error {
ctx, cancel := context.WithCancel(cmd.Context())
defer cancel()
ctx, shutdown, err := opts.Setup(ctx)
if err != nil {
return err
}
defer func() {
_ = shutdown(ctx)
}()
_, err = opts.LoadConfig(ctx, &internal.ConfigParser{})
if err != nil {
return err
}
// Initialize Resources
sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, opts.Cfg)
if err != nil {
errMsg := fmt.Errorf("failed to initialize resources: %w", err)
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
resourceMgr := resources.NewResourceManager(sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap)
// Execute Tool
toolName := args[0]
tool, ok := resourceMgr.GetTool(toolName)
if !ok {
errMsg := fmt.Errorf("tool %q not found", toolName)
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
var paramsInput string
if len(args) > 1 {
paramsInput = args[1]
}
params := make(map[string]any)
if paramsInput != "" {
if err := util.DecodeJSON(strings.NewReader(paramsInput), &params); err != nil {
errMsg := fmt.Errorf("params must be a valid JSON string: %w", err)
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
}
parsedParams, err := parameters.ParseParams(tool.GetParameters(), params, nil)
if err != nil {
errMsg := fmt.Errorf("invalid parameters: %w", err)
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
parsedParams, err = tool.EmbedParams(ctx, parsedParams, resourceMgr.GetEmbeddingModelMap())
if err != nil {
errMsg := fmt.Errorf("error embedding parameters: %w", err)
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
// Client Auth not supported for ephemeral CLI call
requiresAuth, err := tool.RequiresClientAuthorization(resourceMgr)
if err != nil {
errMsg := fmt.Errorf("failed to check auth requirements: %w", err)
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
if requiresAuth {
errMsg := fmt.Errorf("client authorization is not supported")
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
result, err := tool.Invoke(ctx, resourceMgr, parsedParams, "")
if err != nil {
errMsg := fmt.Errorf("tool execution failed: %w", err)
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
// Print Result
output, err := json.MarshalIndent(result, "", " ")
if err != nil {
errMsg := fmt.Errorf("failed to marshal result: %w", err)
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
fmt.Fprintln(opts.IOStreams.Out, string(output))
return nil
}

View File

@@ -1,167 +0,0 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package invoke
import (
"bytes"
"os"
"path/filepath"
"strings"
"testing"
"github.com/googleapis/genai-toolbox/cmd/internal"
_ "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql"
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql"
"github.com/spf13/cobra"
)
func invokeCommand(args []string) (string, error) {
parentCmd := &cobra.Command{Use: "toolbox"}
buf := new(bytes.Buffer)
opts := internal.NewToolboxOptions(internal.WithIOStreams(buf, buf))
internal.PersistentFlags(parentCmd, opts)
cmd := NewCommand(opts)
parentCmd.AddCommand(cmd)
parentCmd.SetArgs(args)
err := parentCmd.Execute()
return buf.String(), err
}
func TestInvokeTool(t *testing.T) {
// Create a temporary config
tmpDir := t.TempDir()
toolsFileContent := `
sources:
my-sqlite:
kind: sqlite
database: test.db
tools:
hello-sqlite:
kind: sqlite-sql
source: my-sqlite
description: "hello tool"
statement: "SELECT 'hello' as greeting"
echo-tool:
kind: sqlite-sql
source: my-sqlite
description: "echo tool"
statement: "SELECT ? as msg"
parameters:
- name: message
type: string
description: message to echo
int-tool:
kind: sqlite-sql
source: my-sqlite
description: "int tool"
statement: "SELECT ? as val"
parameters:
- name: value
type: integer
description: int value
`
toolsFilePath := filepath.Join(tmpDir, "tools.yaml")
if err := os.WriteFile(toolsFilePath, []byte(toolsFileContent), 0644); err != nil {
t.Fatalf("failed to write config: %v", err)
}
tcs := []struct {
desc string
args []string
want string
wantErr bool
errStr string
}{
{
desc: "success - basic tool call",
args: []string{"invoke", "hello-sqlite", "--config", toolsFilePath},
want: `"greeting": "hello"`,
},
{
desc: "success - tool call with parameters",
args: []string{"invoke", "echo-tool", `{"message": "world"}`, "--config", toolsFilePath},
want: `"msg": "world"`,
},
{
desc: "success - tool call with integer parameters",
args: []string{"invoke", "int-tool", `{"value": 42}`, "--tools-file", toolsFilePath},
want: `"val": 42`,
},
{
desc: "error - tool not found",
args: []string{"invoke", "non-existent", "--config", toolsFilePath},
wantErr: true,
errStr: `tool "non-existent" not found`,
},
{
desc: "error - invalid JSON params",
args: []string{"invoke", "echo-tool", `invalid-json`, "--config", toolsFilePath},
wantErr: true,
errStr: `params must be a valid JSON string`,
},
}
for _, tc := range tcs {
t.Run(tc.desc, func(t *testing.T) {
got, err := invokeCommand(tc.args)
if (err != nil) != tc.wantErr {
t.Fatalf("got error %v, wantErr %v", err, tc.wantErr)
}
if tc.wantErr && !strings.Contains(err.Error(), tc.errStr) {
t.Fatalf("got error %v, want error containing %q", err, tc.errStr)
}
if !tc.wantErr && !strings.Contains(got, tc.want) {
t.Fatalf("got %q, want it to contain %q", got, tc.want)
}
})
}
}
func TestInvokeTool_AuthUnsupported(t *testing.T) {
tmpDir := t.TempDir()
toolsFileContent := `
sources:
my-bq:
kind: bigquery
project: my-project
useClientOAuth: true
tools:
bq-tool:
kind: bigquery-sql
source: my-bq
description: "bq tool"
statement: "SELECT 1"
`
toolsFilePath := filepath.Join(tmpDir, "auth_tools.yaml")
if err := os.WriteFile(toolsFilePath, []byte(toolsFileContent), 0644); err != nil {
t.Fatalf("failed to write config: %v", err)
}
args := []string{"invoke", "bq-tool", "--config", toolsFilePath}
_, err := invokeCommand(args)
if err == nil {
t.Fatal("expected error for tool requiring client auth, but got nil")
}
if !strings.Contains(err.Error(), "client authorization is not supported") {
t.Fatalf("unexpected error message: %v", err)
}
}

Some files were not shown because too many files have changed in this diff Show More