mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-02-18 11:02:26 -05:00
Compare commits
78 Commits
lsc-177139
...
antigravit
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
89c85729d1 | ||
|
|
93f638650d | ||
|
|
8752e05ab6 | ||
|
|
185c72939d | ||
|
|
87ef9c5908 | ||
|
|
1deda6cb4c | ||
|
|
768bf01966 | ||
|
|
b0fe6775d5 | ||
|
|
a539c71ffe | ||
|
|
975d02e243 | ||
|
|
624d06e7ba | ||
|
|
9f27134918 | ||
|
|
baf1bd1a97 | ||
|
|
9b67f7354c | ||
|
|
60470c32dd | ||
|
|
89277513f0 | ||
|
|
ee10723480 | ||
|
|
eee77ba333 | ||
|
|
6b0ffaa309 | ||
|
|
b935193bea | ||
|
|
dabfad1e1c | ||
|
|
58d31a9f51 | ||
|
|
871514d635 | ||
|
|
18017d6545 | ||
|
|
e81dfeaff5 | ||
|
|
02a11af350 | ||
|
|
8a497a8227 | ||
|
|
0e269a1d12 | ||
|
|
b8af73fa4e | ||
|
|
8f13621e02 | ||
|
|
e047b35e1e | ||
|
|
dccd14cd74 | ||
|
|
8487c8d4cf | ||
|
|
504391b60d | ||
|
|
88caf7344e | ||
|
|
5abad5d56c | ||
|
|
eddf1a3ea2 | ||
|
|
2bff1384a3 | ||
|
|
e1c4700428 | ||
|
|
3c29cade24 | ||
|
|
50ce52d2fa | ||
|
|
446d62acd9 | ||
|
|
735cb760ea | ||
|
|
e5e9fb7f94 | ||
|
|
6b326ac2f3 | ||
|
|
62359deae5 | ||
|
|
3ff8f533cf | ||
|
|
c858c49fd3 | ||
|
|
5156db2621 | ||
|
|
7c67bcc810 | ||
|
|
f6804420b9 | ||
|
|
927881ffb9 | ||
|
|
42c8dd7ddd | ||
|
|
ae0c29254a | ||
|
|
46b072c3f4 | ||
|
|
4aabb4aaca | ||
|
|
897c63dcea | ||
|
|
22b5aca395 | ||
|
|
57f6220b9e | ||
|
|
c451015509 | ||
|
|
ef63860559 | ||
|
|
a89191d8bb | ||
|
|
13a682f407 | ||
|
|
dc7c62c951 | ||
|
|
aec8897805 | ||
|
|
a4c9287aec | ||
|
|
2c228ef4f2 | ||
|
|
1e9c4762a5 | ||
|
|
7e6e88a21f | ||
|
|
b2ea4b7b8f | ||
|
|
cfd4b18dee | ||
|
|
d2576cbc38 | ||
|
|
cd56ea44fb | ||
|
|
12bdd95459 | ||
|
|
61739300be | ||
|
|
3b140f5006 | ||
|
|
84e826a93e | ||
|
|
edd739c490 |
@@ -33,19 +33,56 @@ steps:
|
||||
script: |
|
||||
go get -d ./...
|
||||
|
||||
- id: "install-zig"
|
||||
name: golang:1
|
||||
waitFor: ['-']
|
||||
volumes:
|
||||
- name: 'zig'
|
||||
path: '/zig-tools'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
apt-get update && apt-get install -y xz-utils
|
||||
curl -fL "https://ziglang.org/download/0.15.2/zig-x86_64-linux-0.15.2.tar.xz" -o zig.tar.xz
|
||||
tar -xf zig.tar.xz -C /zig-tools --strip-components=1
|
||||
|
||||
- id: "install-macos-sdk"
|
||||
name: golang:1
|
||||
waitFor: ['-']
|
||||
volumes:
|
||||
- name: 'macos-sdk'
|
||||
path: '/macos-sdk'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
apt-get update && apt-get install -y xz-utils
|
||||
echo "Downloading macOS 14.5 SDK..."
|
||||
curl -fL -o sdk.tar.xz https://github.com/alexey-lysiuk/macos-sdk/releases/download/14.5/MacOSX14.5.tar.xz
|
||||
|
||||
mkdir -p /macos-sdk/MacOSX14.5.sdk
|
||||
echo "Unpacking macOS 14.5 SDK..."
|
||||
tar -xf sdk.tar.xz -C /macos-sdk/MacOSX14.5.sdk --strip-components=1
|
||||
|
||||
- id: "build-linux-amd64"
|
||||
name: golang:1
|
||||
waitFor:
|
||||
waitFor:
|
||||
- "install-dependencies"
|
||||
- "install-zig"
|
||||
env:
|
||||
- 'GOPATH=/gopath'
|
||||
- 'CGO_ENABLED=1'
|
||||
- 'GOOS=linux'
|
||||
- 'GOARCH=amd64'
|
||||
- 'CC=/zig-tools/zig cc -target x86_64-linux-gnu'
|
||||
- 'CXX=/zig-tools/zig c++ -target x86_64-linux-gnu'
|
||||
volumes:
|
||||
- name: 'go'
|
||||
path: '/gopath'
|
||||
- name: 'zig'
|
||||
path: '/zig-tools'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.linux.amd64
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.linux.amd64
|
||||
|
||||
- id: "store-linux-amd64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -57,33 +94,53 @@ steps:
|
||||
|
||||
- id: "build-linux-amd64-geminicli"
|
||||
name: golang:1
|
||||
waitFor:
|
||||
waitFor:
|
||||
- "install-dependencies"
|
||||
env:
|
||||
- "install-zig"
|
||||
env:
|
||||
- 'GOPATH=/gopath'
|
||||
- 'CGO_ENABLED=1'
|
||||
- 'GOOS=linux'
|
||||
- 'GOARCH=amd64'
|
||||
- 'CC=/zig-tools/zig cc -target x86_64-linux-gnu'
|
||||
- 'CXX=/zig-tools/zig c++ -target x86_64-linux-gnu'
|
||||
volumes:
|
||||
- name: 'go'
|
||||
path: '/gopath'
|
||||
- name: 'zig'
|
||||
path: '/zig-tools'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.linux.amd64
|
||||
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.linux.amd64
|
||||
|
||||
- id: "build-darwin-arm64"
|
||||
name: golang:1
|
||||
waitFor:
|
||||
waitFor:
|
||||
- "install-dependencies"
|
||||
- "install-zig"
|
||||
- "install-macos-sdk"
|
||||
env:
|
||||
- 'GOPATH=/gopath'
|
||||
- 'CGO_ENABLED=1'
|
||||
- 'GOOS=darwin'
|
||||
- 'GOARCH=arm64'
|
||||
- 'SDK_PATH=/macos-sdk/MacOSX14.5.sdk'
|
||||
- 'MACOS_MIN_VER=10.14'
|
||||
- 'CGO_LDFLAGS=-mmacosx-version-min=10.14 --sysroot /macos-sdk/MacOSX14.5.sdk -F/macos-sdk/MacOSX14.5.sdk/System/Library/Frameworks -L/usr/lib'
|
||||
- 'COMMON_FLAGS=-mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
- 'CC=/zig-tools/zig cc -mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
- 'CXX=/zig-tools/zig c++ -mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
volumes:
|
||||
- name: 'go'
|
||||
path: '/gopath'
|
||||
- name: 'zig'
|
||||
path: '/zig-tools'
|
||||
- name: 'macos-sdk'
|
||||
path: '/macos-sdk'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.darwin.arm64
|
||||
go build -trimpath -buildmode=pie -ldflags "-s -w -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.darwin.arm64
|
||||
|
||||
- id: "store-darwin-arm64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -95,32 +152,59 @@ steps:
|
||||
|
||||
- id: "build-darwin-arm64-geminicli"
|
||||
name: golang:1
|
||||
waitFor:
|
||||
waitFor:
|
||||
- "install-dependencies"
|
||||
env:
|
||||
- "install-zig"
|
||||
- "install-macos-sdk"
|
||||
env:
|
||||
- 'GOPATH=/gopath'
|
||||
- 'CGO_ENABLED=1'
|
||||
- 'GOOS=darwin'
|
||||
- 'GOARCH=arm64'
|
||||
- 'SDK_PATH=/macos-sdk/MacOSX14.5.sdk'
|
||||
- 'MACOS_MIN_VER=10.14'
|
||||
- 'CGO_LDFLAGS=-mmacosx-version-min=10.14 --sysroot /macos-sdk/MacOSX14.5.sdk -F/macos-sdk/MacOSX14.5.sdk/System/Library/Frameworks -L/usr/lib'
|
||||
- 'COMMON_FLAGS=-mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
- 'CC=/zig-tools/zig cc -mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
- 'CXX=/zig-tools/zig c++ -mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
volumes:
|
||||
- name: 'go'
|
||||
path: '/gopath'
|
||||
- name: 'zig'
|
||||
path: '/zig-tools'
|
||||
- name: 'macos-sdk'
|
||||
path: '/macos-sdk'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.darwin.arm64
|
||||
go build -trimpath -buildmode=pie -ldflags "-s -w -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.darwin.arm64
|
||||
|
||||
- id: "build-darwin-amd64"
|
||||
name: golang:1
|
||||
waitFor:
|
||||
waitFor:
|
||||
- "install-dependencies"
|
||||
- "install-zig"
|
||||
- "install-macos-sdk"
|
||||
env:
|
||||
- 'GOPATH=/gopath'
|
||||
- 'CGO_ENABLED=1'
|
||||
- 'GOOS=darwin'
|
||||
- 'GOARCH=amd64'
|
||||
- 'SDK_PATH=/macos-sdk/MacOSX14.5.sdk'
|
||||
- 'MACOS_MIN_VER=10.14'
|
||||
- 'CGO_LDFLAGS=-mmacosx-version-min=10.14 --sysroot /macos-sdk/MacOSX14.5.sdk -F/macos-sdk/MacOSX14.5.sdk/System/Library/Frameworks -L/usr/lib'
|
||||
- 'COMMON_FLAGS=-mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
- 'CC=/zig-tools/zig cc -mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
- 'CXX=/zig-tools/zig c++ -mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
volumes:
|
||||
- name: 'go'
|
||||
path: '/gopath'
|
||||
- name: 'zig'
|
||||
path: '/zig-tools'
|
||||
- name: 'macos-sdk'
|
||||
path: '/macos-sdk'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.darwin.amd64
|
||||
go build -trimpath -buildmode=pie -ldflags "-s -w -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.darwin.amd64
|
||||
|
||||
- id: "store-darwin-amd64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -132,32 +216,52 @@ steps:
|
||||
|
||||
- id: "build-darwin-amd64-geminicli"
|
||||
name: golang:1
|
||||
waitFor:
|
||||
waitFor:
|
||||
- "install-dependencies"
|
||||
env:
|
||||
- "install-zig"
|
||||
- "install-macos-sdk"
|
||||
env:
|
||||
- 'GOPATH=/gopath'
|
||||
- 'CGO_ENABLED=1'
|
||||
- 'GOOS=darwin'
|
||||
- 'GOARCH=amd64'
|
||||
- 'SDK_PATH=/macos-sdk/MacOSX14.5.sdk'
|
||||
- 'MACOS_MIN_VER=10.14'
|
||||
- 'CGO_LDFLAGS=-mmacosx-version-min=10.14 --sysroot /macos-sdk/MacOSX14.5.sdk -F/macos-sdk/MacOSX14.5.sdk/System/Library/Frameworks -L/usr/lib'
|
||||
- 'COMMON_FLAGS=-mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
- 'CC=/zig-tools/zig cc -mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
- 'CXX=/zig-tools/zig c++ -mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
volumes:
|
||||
- name: 'go'
|
||||
path: '/gopath'
|
||||
- name: 'zig'
|
||||
path: '/zig-tools'
|
||||
- name: 'macos-sdk'
|
||||
path: '/macos-sdk'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.darwin.amd64
|
||||
go build -trimpath -buildmode=pie -ldflags "-s -w -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.darwin.amd64
|
||||
|
||||
- id: "build-windows-amd64"
|
||||
name: golang:1
|
||||
waitFor:
|
||||
waitFor:
|
||||
- "install-dependencies"
|
||||
- "install-zig"
|
||||
env:
|
||||
- 'GOPATH=/gopath'
|
||||
- 'CGO_ENABLED=1'
|
||||
- 'GOOS=windows'
|
||||
- 'GOARCH=amd64'
|
||||
- 'CC=/zig-tools/zig cc -target x86_64-windows-gnu'
|
||||
- 'CXX=/zig-tools/zig c++ -target x86_64-windows-gnu'
|
||||
volumes:
|
||||
- name: 'go'
|
||||
path: '/gopath'
|
||||
- name: 'zig'
|
||||
path: '/zig-tools'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
CGO_ENABLED=0 GOOS=windows GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.windows.amd64
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.windows.amd64
|
||||
|
||||
- id: "store-windows-amd64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -169,18 +273,25 @@ steps:
|
||||
|
||||
- id: "build-windows-amd64-geminicli"
|
||||
name: golang:1
|
||||
waitFor:
|
||||
waitFor:
|
||||
- "install-dependencies"
|
||||
env:
|
||||
- "install-zig"
|
||||
env:
|
||||
- 'GOPATH=/gopath'
|
||||
- 'CGO_ENABLED=1'
|
||||
- 'GOOS=windows'
|
||||
- 'GOARCH=amd64'
|
||||
- 'CC=/zig-tools/zig cc -target x86_64-windows-gnu'
|
||||
- 'CXX=/zig-tools/zig c++ -target x86_64-windows-gnu'
|
||||
volumes:
|
||||
- name: 'go'
|
||||
path: '/gopath'
|
||||
- name: 'zig'
|
||||
path: '/zig-tools'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
CGO_ENABLED=0 GOOS=windows GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.windows.amd64
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.windows.amd64
|
||||
|
||||
options:
|
||||
automapSubstitutions: true
|
||||
|
||||
@@ -34,9 +34,47 @@ steps:
|
||||
path: "/gopath"
|
||||
script: |
|
||||
go test -c -race -cover \
|
||||
-coverpkg=./internal/sources/...,./internal/tools/... ./tests/...
|
||||
-coverpkg=./internal/sources/...,./internal/tools/... \
|
||||
$(go list ./tests/... | grep -v '/tests/prompts')
|
||||
chmod +x .ci/test_with_coverage.sh
|
||||
|
||||
- id: "compile-prompt-test-binary"
|
||||
name: golang:1
|
||||
waitFor: ["install-dependencies"]
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
script: |
|
||||
for dir in ./tests/prompts/*; do
|
||||
if [ -d "$dir" ]; then
|
||||
PROMPT_TYPE=$(basename "$dir")
|
||||
echo "--- Compiling prompt test for ${PROMPT_TYPE} with targeted coverage ---"
|
||||
|
||||
go test -c -race -cover \
|
||||
-coverpkg=./internal/prompts/... \
|
||||
-o "prompt.${PROMPT_TYPE}.test" \
|
||||
"${dir}"
|
||||
fi
|
||||
done
|
||||
|
||||
chmod +x .ci/test_prompts_with_coverage.sh
|
||||
|
||||
- id: "prompts-custom"
|
||||
name: golang:1
|
||||
waitFor: ["compile-prompt-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_prompts_with_coverage.sh "custom"
|
||||
|
||||
- id: "cloud-sql-pg"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
|
||||
@@ -77,8 +77,10 @@ run_orch_test() {
|
||||
setup_orch_table
|
||||
|
||||
cd "$orch_dir"
|
||||
if [ -f "package.json" ]; then
|
||||
echo "Installing dependencies for $orch_name..."
|
||||
echo "Installing dependencies for $orch_name..."
|
||||
if [ -f "package-lock.json" ]; then
|
||||
npm ci
|
||||
else
|
||||
npm install
|
||||
fi
|
||||
|
||||
@@ -120,4 +122,4 @@ for ORCH_DIR in "$QUICKSTART_JS_DIR"/*/; do
|
||||
continue
|
||||
fi
|
||||
run_orch_test "$ORCH_DIR"
|
||||
done
|
||||
done
|
||||
|
||||
75
.ci/test_prompts_with_coverage.sh
Normal file
75
.ci/test_prompts_with_coverage.sh
Normal file
@@ -0,0 +1,75 @@
|
||||
#!/bin/bash
|
||||
# .ci/test_prompts_with_coverage.sh
|
||||
#
|
||||
# This script runs a specific prompt integration test, calculates its
|
||||
# code coverage, and checks if it meets a minimum threshold.
|
||||
#
|
||||
# It is called with one argument: the type of the prompt.
|
||||
# Example usage: .ci/test_prompts_with_coverage.sh "custom"
|
||||
|
||||
# Exit immediately if a command fails.
|
||||
set -e
|
||||
|
||||
# --- 1. Define Variables ---
|
||||
|
||||
# The first argument is the prompt type (e.g., "custom").
|
||||
PROMPT_TYPE=$1
|
||||
COVERAGE_THRESHOLD=80 # Minimum coverage percentage required.
|
||||
|
||||
if [ -z "$PROMPT_TYPE" ]; then
|
||||
echo "Error: No prompt type provided. Please call this script with an argument."
|
||||
echo "Usage: .ci/test_prompts_with_coverage.sh <prompt_type>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Construct names based on the prompt type.
|
||||
TEST_BINARY="./prompt.${PROMPT_TYPE}.test"
|
||||
TEST_NAME="$(tr '[:lower:]' '[:upper:]' <<< ${PROMPT_TYPE:0:1})${PROMPT_TYPE:1} Prompts"
|
||||
COVERAGE_FILE="coverage.prompts-${PROMPT_TYPE}.out"
|
||||
|
||||
|
||||
# --- 2. Run Integration Tests ---
|
||||
|
||||
echo "--- Running integration tests for ${TEST_NAME} ---"
|
||||
|
||||
# Safety check for the binary's existence.
|
||||
if [ ! -f "$TEST_BINARY" ]; then
|
||||
echo "Error: Test binary not found at ${TEST_BINARY}. Aborting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Execute the test binary and generate the coverage file.
|
||||
# If the tests fail, the 'set -e' command will cause the script to exit here.
|
||||
if ! ./"${TEST_BINARY}" -test.v -test.coverprofile="${COVERAGE_FILE}"; then
|
||||
echo "Error: Tests for ${TEST_NAME} failed. Exiting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "--- Tests for ${TEST_NAME} passed successfully ---"
|
||||
|
||||
|
||||
# --- 3. Calculate and Check Coverage ---
|
||||
|
||||
echo "Calculating coverage for ${TEST_NAME}..."
|
||||
|
||||
# Calculate the total coverage percentage from the generated file.
|
||||
# The '2>/dev/null' suppresses warnings if the coverage file is empty.
|
||||
total_coverage=$(go tool cover -func="${COVERAGE_FILE}" 2>/dev/null | grep "total:" | awk '{print $3}')
|
||||
|
||||
if [ -z "$total_coverage" ]; then
|
||||
echo "Warning: Could not calculate coverage for ${TEST_NAME}. The coverage report might be empty."
|
||||
total_coverage="0%"
|
||||
fi
|
||||
|
||||
echo "${TEST_NAME} total coverage: $total_coverage"
|
||||
|
||||
# Remove the '%' sign for numerical comparison.
|
||||
coverage_numeric=$(echo "$total_coverage" | sed 's/%//')
|
||||
|
||||
# Check if the coverage is below the defined threshold.
|
||||
if awk -v coverage="$coverage_numeric" -v threshold="$COVERAGE_THRESHOLD" 'BEGIN {exit !(coverage < threshold)}'; then
|
||||
echo "Coverage failure: ${TEST_NAME} total coverage (${total_coverage}) is below the ${COVERAGE_THRESHOLD}% threshold."
|
||||
exit 1
|
||||
else
|
||||
echo "Coverage for ${TEST_NAME} is sufficient."
|
||||
fi
|
||||
@@ -29,7 +29,7 @@ steps:
|
||||
- id: "install-dependencies"
|
||||
name: golang:1
|
||||
waitFor: ['-']
|
||||
env:
|
||||
env:
|
||||
- 'GOPATH=/gopath'
|
||||
volumes:
|
||||
- name: 'go'
|
||||
@@ -37,20 +37,56 @@ steps:
|
||||
script: |
|
||||
go get -d ./...
|
||||
|
||||
- id: "install-zig"
|
||||
name: golang:1
|
||||
waitFor: ['-']
|
||||
volumes:
|
||||
- name: 'zig'
|
||||
path: '/zig-tools'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
apt-get update && apt-get install -y xz-utils
|
||||
curl -fL "https://ziglang.org/download/0.15.2/zig-x86_64-linux-0.15.2.tar.xz" -o zig.tar.xz
|
||||
tar -xf zig.tar.xz -C /zig-tools --strip-components=1
|
||||
|
||||
- id: "install-macos-sdk"
|
||||
name: golang:1
|
||||
waitFor: ['-']
|
||||
volumes:
|
||||
- name: 'macos-sdk'
|
||||
path: '/macos-sdk'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
apt-get update && apt-get install -y xz-utils
|
||||
echo "Downloading macOS 14.5 SDK..."
|
||||
curl -fL -o sdk.tar.xz https://github.com/alexey-lysiuk/macos-sdk/releases/download/14.5/MacOSX14.5.tar.xz
|
||||
|
||||
mkdir -p /macos-sdk/MacOSX14.5.sdk
|
||||
echo "Unpacking macOS 14.5 SDK..."
|
||||
tar -xf sdk.tar.xz -C /macos-sdk/MacOSX14.5.sdk --strip-components=1
|
||||
|
||||
- id: "build-linux-amd64"
|
||||
name: golang:1
|
||||
waitFor:
|
||||
waitFor:
|
||||
- "install-dependencies"
|
||||
env:
|
||||
- "install-zig"
|
||||
env:
|
||||
- 'GOPATH=/gopath'
|
||||
- 'CGO_ENABLED=1'
|
||||
- 'GOOS=linux'
|
||||
- 'GOARCH=amd64'
|
||||
- 'CC=/zig-tools/zig cc -target x86_64-linux-gnu'
|
||||
- 'CXX=/zig-tools/zig c++ -target x86_64-linux-gnu'
|
||||
volumes:
|
||||
- name: 'go'
|
||||
path: '/gopath'
|
||||
- name: 'zig'
|
||||
path: '/zig-tools'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.linux.amd64
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.linux.amd64
|
||||
|
||||
- id: "store-linux-amd64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -63,18 +99,24 @@ steps:
|
||||
|
||||
- id: "build-linux-amd64-geminicli"
|
||||
name: golang:1
|
||||
waitFor:
|
||||
waitFor:
|
||||
- "install-dependencies"
|
||||
env:
|
||||
- "install-zig"
|
||||
env:
|
||||
- 'GOPATH=/gopath'
|
||||
- 'CGO_ENABLED=1'
|
||||
- 'GOOS=linux'
|
||||
- 'GOARCH=amd64'
|
||||
- 'CC=/zig-tools/zig cc -target x86_64-linux-gnu'
|
||||
- 'CXX=/zig-tools/zig c++ -target x86_64-linux-gnu'
|
||||
volumes:
|
||||
- name: 'go'
|
||||
path: '/gopath'
|
||||
- name: 'zig'
|
||||
path: '/zig-tools'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.linux.amd64
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.linux.amd64
|
||||
|
||||
- id: "store-linux-amd64-geminicli"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -87,18 +129,31 @@ steps:
|
||||
|
||||
- id: "build-darwin-arm64"
|
||||
name: golang:1
|
||||
waitFor:
|
||||
waitFor:
|
||||
- "install-dependencies"
|
||||
env:
|
||||
- "install-zig"
|
||||
- "install-macos-sdk"
|
||||
env:
|
||||
- 'GOPATH=/gopath'
|
||||
- 'CGO_ENABLED=1'
|
||||
- 'GOOS=darwin'
|
||||
- 'GOARCH=arm64'
|
||||
- 'SDK_PATH=/macos-sdk/MacOSX14.5.sdk'
|
||||
- 'MACOS_MIN_VER=10.14'
|
||||
- 'CGO_LDFLAGS=-mmacosx-version-min=10.14 --sysroot /macos-sdk/MacOSX14.5.sdk -F/macos-sdk/MacOSX14.5.sdk/System/Library/Frameworks -L/usr/lib'
|
||||
- 'COMMON_FLAGS=-mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
- 'CC=/zig-tools/zig cc -mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
- 'CXX=/zig-tools/zig c++ -mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
volumes:
|
||||
- name: 'go'
|
||||
path: '/gopath'
|
||||
- name: 'zig'
|
||||
path: '/zig-tools'
|
||||
- name: 'macos-sdk'
|
||||
path: '/macos-sdk'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.darwin.arm64
|
||||
go build -trimpath -buildmode=pie -ldflags "-s -w -X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.darwin.arm64
|
||||
|
||||
- id: "store-darwin-arm64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -107,22 +162,36 @@ steps:
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=v$(cat ./cmd/version.txt)
|
||||
gcloud storage cp toolbox.darwin.arm64 gs://$_BUCKET_NAME/$VERSION/darwin/arm64/toolbox
|
||||
gcloud storage cp toolbox.darwin.arm64 \
|
||||
gs://$_BUCKET_NAME/$VERSION/darwin/arm64/toolbox
|
||||
|
||||
- id: "build-darwin-arm64-geminicli"
|
||||
name: golang:1
|
||||
waitFor:
|
||||
waitFor:
|
||||
- "install-dependencies"
|
||||
env:
|
||||
- "install-zig"
|
||||
- "install-macos-sdk"
|
||||
env:
|
||||
- 'GOPATH=/gopath'
|
||||
- 'CGO_ENABLED=1'
|
||||
- 'GOOS=darwin'
|
||||
- 'GOARCH=arm64'
|
||||
- 'SDK_PATH=/macos-sdk/MacOSX14.5.sdk'
|
||||
- 'MACOS_MIN_VER=10.14'
|
||||
- 'CGO_LDFLAGS=-mmacosx-version-min=10.14 --sysroot /macos-sdk/MacOSX14.5.sdk -F/macos-sdk/MacOSX14.5.sdk/System/Library/Frameworks -L/usr/lib'
|
||||
- 'COMMON_FLAGS=-mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
- 'CC=/zig-tools/zig cc -mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
- 'CXX=/zig-tools/zig c++ -mmacosx-version-min=10.14 -target aarch64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
volumes:
|
||||
- name: 'go'
|
||||
path: '/gopath'
|
||||
- name: 'zig'
|
||||
path: '/zig-tools'
|
||||
- name: 'macos-sdk'
|
||||
path: '/macos-sdk'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.darwin.arm64
|
||||
go build -trimpath -buildmode=pie -ldflags "-s -w -X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.darwin.arm64
|
||||
|
||||
- id: "store-darwin-arm64-geminicli"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -135,18 +204,31 @@ steps:
|
||||
|
||||
- id: "build-darwin-amd64"
|
||||
name: golang:1
|
||||
waitFor:
|
||||
waitFor:
|
||||
- "install-dependencies"
|
||||
env:
|
||||
- "install-zig"
|
||||
- "install-macos-sdk"
|
||||
env:
|
||||
- 'GOPATH=/gopath'
|
||||
- 'CGO_ENABLED=1'
|
||||
- 'GOOS=darwin'
|
||||
- 'GOARCH=amd64'
|
||||
- 'SDK_PATH=/macos-sdk/MacOSX14.5.sdk'
|
||||
- 'MACOS_MIN_VER=10.14'
|
||||
- 'CGO_LDFLAGS=-mmacosx-version-min=10.14 --sysroot /macos-sdk/MacOSX14.5.sdk -F/macos-sdk/MacOSX14.5.sdk/System/Library/Frameworks -L/usr/lib'
|
||||
- 'COMMON_FLAGS=-mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
- 'CC=/zig-tools/zig cc -mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
- 'CXX=/zig-tools/zig c++ -mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
volumes:
|
||||
- name: 'go'
|
||||
path: '/gopath'
|
||||
- name: 'zig'
|
||||
path: '/zig-tools'
|
||||
- name: 'macos-sdk'
|
||||
path: '/macos-sdk'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.darwin.amd64
|
||||
go build -trimpath -buildmode=pie -ldflags "-s -w -X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.darwin.amd64
|
||||
|
||||
- id: "store-darwin-amd64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -159,18 +241,31 @@ steps:
|
||||
|
||||
- id: "build-darwin-amd64-geminicli"
|
||||
name: golang:1
|
||||
waitFor:
|
||||
waitFor:
|
||||
- "install-dependencies"
|
||||
env:
|
||||
- "install-zig"
|
||||
- "install-macos-sdk"
|
||||
env:
|
||||
- 'GOPATH=/gopath'
|
||||
- 'CGO_ENABLED=1'
|
||||
- 'GOOS=darwin'
|
||||
- 'GOARCH=amd64'
|
||||
- 'SDK_PATH=/macos-sdk/MacOSX14.5.sdk'
|
||||
- 'MACOS_MIN_VER=10.14'
|
||||
- 'CGO_LDFLAGS=-mmacosx-version-min=10.14 --sysroot /macos-sdk/MacOSX14.5.sdk -F/macos-sdk/MacOSX14.5.sdk/System/Library/Frameworks -L/usr/lib'
|
||||
- 'COMMON_FLAGS=-mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
- 'CC=/zig-tools/zig cc -mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
- 'CXX=/zig-tools/zig c++ -mmacosx-version-min=10.14 -target x86_64-macos.11.0.0-none -isysroot /macos-sdk/MacOSX14.5.sdk -iwithsysroot /usr/include -iframeworkwithsysroot /System/Library/Frameworks'
|
||||
volumes:
|
||||
- name: 'go'
|
||||
path: '/gopath'
|
||||
- name: 'zig'
|
||||
path: '/zig-tools'
|
||||
- name: 'macos-sdk'
|
||||
path: '/macos-sdk'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.darwin.amd64
|
||||
go build -trimpath -buildmode=pie -ldflags "-s -w -X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.darwin.amd64
|
||||
|
||||
- id: "store-darwin-amd64-geminicli"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -179,22 +274,29 @@ steps:
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=v$(cat ./cmd/version.txt)
|
||||
gcloud storage cp toolbox.geminicli.darwin.amd64 gs://$_BUCKET_NAME/geminicli/$VERSION/darwin/amd64/toolbox
|
||||
gcloud storage cp toolbox.geminicli.darwin.amd64 \
|
||||
gs://$_BUCKET_NAME/geminicli/$VERSION/darwin/amd64/toolbox
|
||||
|
||||
- id: "build-windows-amd64"
|
||||
name: golang:1
|
||||
waitFor:
|
||||
waitFor:
|
||||
- "install-dependencies"
|
||||
env:
|
||||
- "install-zig"
|
||||
env:
|
||||
- 'GOPATH=/gopath'
|
||||
- 'CGO_ENABLED=1'
|
||||
- 'GOOS=windows'
|
||||
- 'GOARCH=amd64'
|
||||
- 'CC=/zig-tools/zig cc -target x86_64-windows-gnu'
|
||||
- 'CXX=/zig-tools/zig c++ -target x86_64-windows-gnu'
|
||||
volumes:
|
||||
- name: 'go'
|
||||
path: '/gopath'
|
||||
- name: 'zig'
|
||||
path: '/zig-tools'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
CGO_ENABLED=0 GOOS=windows GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.windows.amd64
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.windows.amd64
|
||||
|
||||
- id: "store-windows-amd64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -207,18 +309,26 @@ steps:
|
||||
|
||||
- id: "build-windows-amd64-geminicli"
|
||||
name: golang:1
|
||||
waitFor:
|
||||
waitFor:
|
||||
- "install-dependencies"
|
||||
env:
|
||||
- "install-zig"
|
||||
env:
|
||||
- 'GOPATH=/gopath'
|
||||
- 'CGO_ENABLED=1'
|
||||
- 'GOOS=windows'
|
||||
- 'GOARCH=amd64'
|
||||
- 'CC=/zig-tools/zig cc -target x86_64-windows-gnu'
|
||||
- 'CXX=/zig-tools/zig c++ -target x86_64-windows-gnu'
|
||||
volumes:
|
||||
- name: 'go'
|
||||
path: '/gopath'
|
||||
- name: 'zig'
|
||||
path: '/zig-tools'
|
||||
- name: 'macos-sdk'
|
||||
path: '/macos-sdk'
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
CGO_ENABLED=0 GOOS=windows GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.windows.amd64
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.windows.amd64
|
||||
|
||||
- id: "store-windows-amd64-geminicli"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -243,4 +353,4 @@ substitutions:
|
||||
_AR_REPO_NAME: toolbox
|
||||
_BUCKET_NAME: genai-toolbox
|
||||
_DOCKER_URI: ${_AR_HOSTNAME}/${PROJECT_ID}/${_AR_REPO_NAME}/toolbox
|
||||
_PUSH_LATEST: "true"
|
||||
_PUSH_LATEST: "false" # Substituted in trigger
|
||||
11
.github/CODEOWNERS
vendored
11
.github/CODEOWNERS
vendored
@@ -4,3 +4,14 @@
|
||||
# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
|
||||
|
||||
* @googleapis/senseai-eco
|
||||
# Code & Tests
|
||||
**/alloydb*/ @googleapis/toolbox-alloydb
|
||||
**/bigquery/ @googleapis/toolbox-bigquery
|
||||
**/bigtable/ @googleapis/toolbox-bigtable
|
||||
**/cloudsqlmssql/ @googleapis/toolbox-cloud-sql-mssql
|
||||
**/cloudsqlmysql/ @googleapis/toolbox-cloud-sql-mysql
|
||||
**/cloudsqlpg/ @googleapis/toolbox-cloud-sql-postgres
|
||||
**/dataplex/ @googleapis/toolbox-dataplex
|
||||
**/firestore/ @googleapis/toolbox-firestore
|
||||
**/looker/ @googleapis/toolbox-looker
|
||||
**/spanner/ @googleapis/toolbox-spanner
|
||||
|
||||
85
.github/blunderbuss.yml
vendored
85
.github/blunderbuss.yml
vendored
@@ -18,18 +18,91 @@ assign_issues:
|
||||
- averikitsch
|
||||
- anubhav756
|
||||
- twishabansal
|
||||
- dishaprakash
|
||||
assign_issues_by:
|
||||
- labels:
|
||||
- 'product: alloydb'
|
||||
to:
|
||||
- 'googleapis/toolbox-alloydb'
|
||||
- labels:
|
||||
- 'product: bigquery'
|
||||
to:
|
||||
- Genesis929
|
||||
- shobsi
|
||||
- jiaxunwu
|
||||
- 'googleapis/toolbox-bigquery'
|
||||
- labels:
|
||||
- 'product: bigtable'
|
||||
to:
|
||||
- 'googleapis/toolbox-bigtable'
|
||||
- labels:
|
||||
- 'product: mssql'
|
||||
to:
|
||||
- 'googleapis/toolbox-cloud-sql-mssql'
|
||||
- labels:
|
||||
- 'product: mysql'
|
||||
to:
|
||||
- 'googleapis/toolbox-cloud-sql-mysql'
|
||||
- labels:
|
||||
- 'product: postgres'
|
||||
to:
|
||||
- 'googleapis/toolbox-cloud-sql-postgres'
|
||||
- labels:
|
||||
- 'product: dataplex'
|
||||
to:
|
||||
- 'googleapis/toolbox-dataplex'
|
||||
- labels:
|
||||
- 'product: firestore'
|
||||
to:
|
||||
- 'googleapis/toolbox-firestore'
|
||||
- labels:
|
||||
- 'product: looker'
|
||||
to:
|
||||
- drstrangelooker
|
||||
- 'googleapis/toolbox-looker'
|
||||
- labels:
|
||||
- 'product: spanner'
|
||||
to:
|
||||
- 'googleapis/toolbox-spanner'
|
||||
assign_prs:
|
||||
- Yuan325
|
||||
- duwenxin99
|
||||
- averikitsch
|
||||
- duwenxin99
|
||||
- averikitsch
|
||||
assign_prs_by:
|
||||
- labels:
|
||||
- 'product: alloydb'
|
||||
to:
|
||||
- 'googleapis/toolbox-alloydb'
|
||||
- labels:
|
||||
- 'product: bigquery'
|
||||
to:
|
||||
- 'googleapis/toolbox-bigquery'
|
||||
- labels:
|
||||
- 'product: bigtable'
|
||||
to:
|
||||
- 'googleapis/toolbox-bigtable'
|
||||
- labels:
|
||||
- 'product: mssql'
|
||||
to:
|
||||
- 'googleapis/toolbox-cloud-sql-mssql'
|
||||
- labels:
|
||||
- 'product: mysql'
|
||||
to:
|
||||
- 'googleapis/toolbox-cloud-sql-mysql'
|
||||
- labels:
|
||||
- 'product: postgres'
|
||||
to:
|
||||
- 'googleapis/toolbox-cloud-sql-postgres'
|
||||
- labels:
|
||||
- 'product: dataplex'
|
||||
to:
|
||||
- 'googleapis/toolbox-dataplex'
|
||||
- labels:
|
||||
- 'product: firestore'
|
||||
to:
|
||||
- 'googleapis/toolbox-firestore'
|
||||
- labels:
|
||||
- 'product: looker'
|
||||
to:
|
||||
- 'googleapis/toolbox-looker'
|
||||
- labels:
|
||||
- 'product: spanner'
|
||||
to:
|
||||
- 'googleapis/toolbox-spanner'
|
||||
|
||||
|
||||
86
.github/labels.yaml
vendored
86
.github/labels.yaml
vendored
@@ -93,10 +93,90 @@
|
||||
description: 'Use label to signal PR should be included in the next release.'
|
||||
|
||||
# Product Labels
|
||||
- name: 'product: alloydb'
|
||||
color: 5065c7
|
||||
description: 'AlloyDB'
|
||||
- name: 'product: bigquery'
|
||||
color: 5065c7
|
||||
description: 'Product: Assigned to the BigQuery team.'
|
||||
# Product Labels
|
||||
description: 'BigQuery'
|
||||
- name: 'product: bigtable'
|
||||
color: 5065c7
|
||||
description: 'Bigtable'
|
||||
- name: 'product: cassandra'
|
||||
color: 5065c7
|
||||
description: 'Cassandra'
|
||||
- name: 'product: clickhouse'
|
||||
color: 5065c7
|
||||
description: 'ClickHouse'
|
||||
- name: 'product: mssql'
|
||||
color: 5065c7
|
||||
description: 'SQL Server'
|
||||
- name: 'product: mysql'
|
||||
color: 5065c7
|
||||
description: 'MySQL'
|
||||
- name: 'product: postgres'
|
||||
color: 5065c7
|
||||
description: 'PostgreSQL'
|
||||
- name: 'product: couchbase'
|
||||
color: 5065c7
|
||||
description: 'Couchbase'
|
||||
- name: 'product: dataplex'
|
||||
color: 5065c7
|
||||
description: 'Dataplex'
|
||||
- name: 'product: dgraph'
|
||||
color: 5065c7
|
||||
description: 'Dgraph'
|
||||
- name: 'product: elasticsearch'
|
||||
color: 5065c7
|
||||
description: 'Elasticsearch'
|
||||
- name: 'product: firebird'
|
||||
color: 5065c7
|
||||
description: 'Firebird'
|
||||
- name: 'product: firestore'
|
||||
color: 5065c7
|
||||
description: 'Firestore'
|
||||
- name: 'product: looker'
|
||||
color: 5065c7
|
||||
description: 'Product: Assigned to the Looker team.'
|
||||
description: 'Looker'
|
||||
- name: 'product: mindsdb'
|
||||
color: 5065c7
|
||||
description: 'MindsDB'
|
||||
- name: 'product: mongodb'
|
||||
color: 5065c7
|
||||
description: 'MongoDB'
|
||||
- name: 'product: neo4j'
|
||||
color: 5065c7
|
||||
description: 'Neo4j'
|
||||
- name: 'product: oceanbase'
|
||||
color: 5065c7
|
||||
description: 'OceanBase'
|
||||
- name: 'product: oracle'
|
||||
color: 5065c7
|
||||
description: 'Oracle'
|
||||
- name: 'product: redis'
|
||||
color: 5065c7
|
||||
description: 'Redis'
|
||||
- name: 'product: serverlessspark'
|
||||
color: 5065c7
|
||||
description: 'Serverless Spark'
|
||||
- name: 'product: singlestore'
|
||||
color: 5065c7
|
||||
description: 'SingleStore'
|
||||
- name: 'product: spanner'
|
||||
color: 5065c7
|
||||
description: 'Spanner'
|
||||
- name: 'product: sqlite'
|
||||
color: 5065c7
|
||||
description: 'SQLite'
|
||||
- name: 'product: tidb'
|
||||
color: 5065c7
|
||||
description: 'TiDB'
|
||||
- name: 'product: trino'
|
||||
color: 5065c7
|
||||
description: 'Trino'
|
||||
- name: 'product: valkey'
|
||||
color: 5065c7
|
||||
description: 'Valkey'
|
||||
- name: 'product: yugabytedb'
|
||||
color: 5065c7
|
||||
description: 'YugabyteDB'
|
||||
|
||||
12
.github/release-please.yml
vendored
12
.github/release-please.yml
vendored
@@ -38,4 +38,14 @@ extraFiles: [
|
||||
"docs/en/how-to/connect-ide/neo4j_mcp.md",
|
||||
"docs/en/how-to/connect-ide/sqlite_mcp.md",
|
||||
"gemini-extension.json",
|
||||
]
|
||||
{
|
||||
"type": "json",
|
||||
"path": "server.json",
|
||||
"jsonpath": "$.version"
|
||||
},
|
||||
{
|
||||
"type": "json",
|
||||
"path": "server.json",
|
||||
"jsonpath": "$.packages[0].identifier"
|
||||
},
|
||||
]
|
||||
|
||||
2
.github/workflows/deploy_dev_docs.yaml
vendored
2
.github/workflows/deploy_dev_docs.yaml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
group: docs-deployment
|
||||
cancel-in-progress: false
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
|
||||
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
|
||||
with:
|
||||
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
|
||||
|
||||
|
||||
@@ -30,14 +30,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout main branch (for latest templates and theme)
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
|
||||
with:
|
||||
ref: 'main'
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Checkout old content from tag into a temporary directory
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
|
||||
with:
|
||||
ref: ${{ github.event.inputs.version_tag }}
|
||||
path: 'old_version_source' # Checkout into a temp subdir
|
||||
|
||||
2
.github/workflows/deploy_versioned_docs.yaml
vendored
2
.github/workflows/deploy_versioned_docs.yaml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
cancel-in-progress: false
|
||||
steps:
|
||||
- name: Checkout Code at Tag
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
|
||||
with:
|
||||
ref: ${{ github.event.release.tag_name }}
|
||||
|
||||
|
||||
79
.github/workflows/docs_deploy.yaml
vendored
79
.github/workflows/docs_deploy.yaml
vendored
@@ -1,79 +0,0 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
name: "docs"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- 'github/workflows/docs**'
|
||||
- '.hugo/**'
|
||||
|
||||
# Allow triggering manually.
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-24.04
|
||||
defaults:
|
||||
run:
|
||||
working-directory: .hugo
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
|
||||
with:
|
||||
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
|
||||
|
||||
- name: Setup Hugo
|
||||
uses: peaceiris/actions-hugo@75d2e84710de30f6ff7268e08f310b60ef14033f # v3
|
||||
with:
|
||||
hugo-version: "0.145.0"
|
||||
extended: true
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4
|
||||
with:
|
||||
path: ~/.npm
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-node-
|
||||
|
||||
- run: npm ci
|
||||
- run: hugo --minify
|
||||
env:
|
||||
HUGO_BASEURL: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/
|
||||
HUGO_RELATIVEURLS: false
|
||||
|
||||
- name: Deploy
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: .hugo/public
|
||||
# Do not delete previews on each production deploy.
|
||||
# CSS or JS changes will require manual clean-up.
|
||||
keep_files: true
|
||||
commit_message: "deploy: ${{ github.event.head_commit.message }}"
|
||||
2
.github/workflows/docs_preview_clean.yaml
vendored
2
.github/workflows/docs_preview_clean.yaml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
group: "preview-${{ github.event.number }}"
|
||||
cancel-in-progress: true
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
|
||||
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
|
||||
with:
|
||||
ref: versioned-gh-pages
|
||||
|
||||
|
||||
2
.github/workflows/docs_preview_deploy.yaml
vendored
2
.github/workflows/docs_preview_deploy.yaml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
group: "preview-${{ github.event.number }}"
|
||||
cancel-in-progress: true
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
|
||||
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
|
||||
with:
|
||||
# Checkout the PR's HEAD commit (supports forks).
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
4
.github/workflows/lint.yaml
vendored
4
.github/workflows/lint.yaml
vendored
@@ -55,7 +55,7 @@ jobs:
|
||||
with:
|
||||
go-version: "1.25"
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||
@@ -69,4 +69,4 @@ jobs:
|
||||
uses: golangci/golangci-lint-action@4afd733a84b1f43292c63897423277bb7f4313a9 # v8.0.0
|
||||
with:
|
||||
version: latest
|
||||
args: --timeout 4m
|
||||
args: --timeout 10m
|
||||
|
||||
73
.github/workflows/publish-mcp.yml
vendored
Normal file
73
.github/workflows/publish-mcp.yml
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
name: Publish to MCP Registry
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: ["v*"] # Triggers on version tags like v1.0.0
|
||||
# allow manual triggering with no inputs required
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write # Required for OIDC authentication
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
|
||||
|
||||
- name: Wait for image in Artifact Registry
|
||||
shell: bash
|
||||
run: |
|
||||
MAX_ATTEMPTS=10
|
||||
VERSION=$(jq -r '.version' server.json)
|
||||
REGISTRY_URL="https://us-central1-docker.pkg.dev/v2/database-toolbox/toolbox/toolbox/manifests/${VERSION}"
|
||||
|
||||
# initially sleep time to wait for the version release
|
||||
sleep 3m
|
||||
|
||||
for i in $(seq 1 ${MAX_ATTEMPTS}); do
|
||||
echo "Attempt $i: Checking for image ${REGISTRY_URL}..."
|
||||
# Use curl to check the manifest header
|
||||
# Using -I to fetch headers only, -s silent, -f fail fast on errors.
|
||||
curl -Isf "${REGISTRY_URL}" > /dev/null
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "✅ Image found! Continuing to next steps."
|
||||
exit 0
|
||||
else
|
||||
echo "❌ Image not found (likely 404 error) on attempt $i."
|
||||
if [ $i -lt ${MAX_ATTEMPTS} ]; then
|
||||
echo "Sleeping for 5 minutes before next attempt..."
|
||||
sleep 2m
|
||||
else
|
||||
echo "Maximum attempts reached. Image not found."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Install MCP Publisher
|
||||
run: |
|
||||
curl -L "https://github.com/modelcontextprotocol/registry/releases/latest/download/mcp-publisher_$(uname -s | tr '[:upper:]' '[:lower:]')_$(uname -m | sed 's/x86_64/amd64/;s/aarch64/arm64/').tar.gz" | tar xz mcp-publisher
|
||||
|
||||
- name: Login to MCP Registry
|
||||
run: ./mcp-publisher login github-oidc
|
||||
|
||||
- name: Publish to MCP Registry
|
||||
run: ./mcp-publisher publish
|
||||
2
.github/workflows/sync-labels.yaml
vendored
2
.github/workflows/sync-labels.yaml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
issues: 'write'
|
||||
pull-requests: 'write'
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
- uses: micnncim/action-label-syncer@3abd5ab72fda571e69fffd97bd4e0033dd5f495c # v1.3.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
7
.github/workflows/tests.yaml
vendored
7
.github/workflows/tests.yaml
vendored
@@ -62,7 +62,7 @@ jobs:
|
||||
go-version: "1.24"
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||
@@ -81,11 +81,12 @@ jobs:
|
||||
run: |
|
||||
source_dir="./internal/sources/*"
|
||||
tool_dir="./internal/tools/*"
|
||||
prompt_dir="./internal/prompts/*"
|
||||
auth_dir="./internal/auth/*"
|
||||
int_test_dir="./tests/*"
|
||||
included_packages=$(go list ./... | grep -v -e "$source_dir" -e "$tool_dir" -e "$auth_dir" -e "$int_test_dir")
|
||||
included_packages=$(go list ./... | grep -v -e "$source_dir" -e "$tool_dir" -e "$prompt_dir" -e "$auth_dir" -e "$int_test_dir")
|
||||
go test -race -cover -coverprofile=coverage.out -v $included_packages
|
||||
go test -race -v ./internal/sources/... ./internal/tools/... ./internal/auth/...
|
||||
go test -race -v ./internal/sources/... ./internal/tools/... ./internal/prompts/... ./internal/auth/...
|
||||
|
||||
- name: Run tests without coverage
|
||||
if: ${{ runner.os != 'Linux' }}
|
||||
|
||||
@@ -51,6 +51,14 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
|
||||
# Add a new version block here before every release
|
||||
# The order of versions in this file is mirrored into the dropdown
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.21.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.21.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.20.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.20.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.19.1"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.19.1/"
|
||||
|
||||
32
CHANGELOG.md
32
CHANGELOG.md
@@ -1,5 +1,37 @@
|
||||
# Changelog
|
||||
|
||||
## [0.21.0](https://github.com/googleapis/genai-toolbox/compare/v0.20.0...v0.21.0) (2025-11-19)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **tools/spanner-list-tables:** Unmarshal `object_details` json string into map to make response have nested json ([#1894](https://github.com/googleapis/genai-toolbox/issues/1894)) ([446d62a](https://github.com/googleapis/genai-toolbox/commit/446d62acd995d5128f52e9db254dd1c7138227c6))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **tools/postgres:** Add `long_running_transactions`, `list_locks` and `replication_stats` tools ([#1751](https://github.com/googleapis/genai-toolbox/issues/1751)) ([5abad5d](https://github.com/googleapis/genai-toolbox/commit/5abad5d56c6cc5ba86adc5253b948bf8230fa830))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **tools/alloydbgetinstance:** Remove parameter duplication ([#1993](https://github.com/googleapis/genai-toolbox/issues/1993)) ([0e269a1](https://github.com/googleapis/genai-toolbox/commit/0e269a1d125eed16a51ead27db4398e6e48cb948))
|
||||
* **tools:** Check for query execution error for pgxpool.Pool ([#1969](https://github.com/googleapis/genai-toolbox/issues/1969)) ([2bff138](https://github.com/googleapis/genai-toolbox/commit/2bff1384a3570ef46bc03ebebc507923af261987))
|
||||
|
||||
|
||||
## [0.20.0](https://github.com/googleapis/genai-toolbox/compare/v0.19.1...v0.20.0) (2025-11-14)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Added prompt support for toolbox ([#1798](https://github.com/googleapis/genai-toolbox/issues/1798)) ([cd56ea4](https://github.com/googleapis/genai-toolbox/commit/cd56ea44fbdd149fcb92324e70ee36ac747635db))
|
||||
* **source/alloydb, source/cloud-sql-postgres,source/cloud-sql-mysql,source/cloud-sql-mssql:** Use project from env for alloydb and cloud sql control plane tools ([#1588](https://github.com/googleapis/genai-toolbox/issues/1588)) ([12bdd95](https://github.com/googleapis/genai-toolbox/commit/12bdd954597e49d3ec6b247cc104584c5a4d1943))
|
||||
* **source/mysql:** Set default host and port for MySQL source ([#1922](https://github.com/googleapis/genai-toolbox/issues/1922)) ([2c228ef](https://github.com/googleapis/genai-toolbox/commit/2c228ef4f2d4cb8dfc41e845466bfe3566d141a1))
|
||||
* **source/Postgresql:** Set default host and port for Postgresql source ([#1927](https://github.com/googleapis/genai-toolbox/issues/1927)) ([7e6e88a](https://github.com/googleapis/genai-toolbox/commit/7e6e88a21f2b9b60e0d645cdde33a95892d31a04))
|
||||
* **tool/looker-generate-embed-url:** Adding generate embed url tool ([#1877](https://github.com/googleapis/genai-toolbox/issues/1877)) ([ef63860](https://github.com/googleapis/genai-toolbox/commit/ef63860559798fbad54c1051d9f53bce42d66464))
|
||||
* **tools/postgres:** Add `list_triggers`, `database_overview` tools for postgres ([#1912](https://github.com/googleapis/genai-toolbox/issues/1912)) ([a4c9287](https://github.com/googleapis/genai-toolbox/commit/a4c9287aecf848faa98d973a9ce5b13fa309a58e))
|
||||
* **tools/postgres:** Add list_indexes, list_sequences tools for postgres ([#1765](https://github.com/googleapis/genai-toolbox/issues/1765)) ([897c63d](https://github.com/googleapis/genai-toolbox/commit/897c63dcea43226262d2062088c59f2d1068fca7))
|
||||
|
||||
## [0.19.1](https://github.com/googleapis/genai-toolbox/compare/v0.18.0...v0.19.1) (2025-11-07)
|
||||
|
||||
|
||||
|
||||
@@ -93,7 +93,8 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s
|
||||
### Adding a New Tool
|
||||
|
||||
> [!NOTE]
|
||||
> Please follow the tool naming convention detailed [here](./DEVELOPER.md#tool-naming-conventions).
|
||||
> Please follow the tool naming convention detailed
|
||||
> [here](./DEVELOPER.md#tool-naming-conventions).
|
||||
|
||||
We recommend looking at an [example tool
|
||||
implementation](https://github.com/googleapis/genai-toolbox/tree/main/internal/tools/postgres/postgressql).
|
||||
@@ -129,10 +130,10 @@ tools.
|
||||
|
||||
* **Add a test file** under a new directory `tests/newdb`.
|
||||
* **Add pre-defined integration test suites** in the
|
||||
`/tests/newdb/newdb_integration_test.go` that are **required** to be run as long as your
|
||||
code contains related features. Please check each test suites for the config
|
||||
defaults, if your source require test suites config updates, please refer to
|
||||
[config option](./tests/option.go):
|
||||
`/tests/newdb/newdb_integration_test.go` that are **required** to be run as
|
||||
long as your code contains related features. Please check each test suites for
|
||||
the config defaults, if your source require test suites config updates, please
|
||||
refer to [config option](./tests/option.go):
|
||||
|
||||
1. [RunToolGetTest][tool-get]: tests for the `GET` endpoint that returns the
|
||||
tool's manifest.
|
||||
|
||||
26
DEVELOPER.md
26
DEVELOPER.md
@@ -255,18 +255,25 @@ Follow these steps to preview documentation changes locally using a Hugo server:
|
||||
There are 3 GHA workflows we use to achieve document versioning:
|
||||
|
||||
1. **Deploy In-development docs:**
|
||||
This workflow is run on every commit merged into the main branch. It deploys the built site to the `/dev/` subdirectory for the in-development documentation.
|
||||
This workflow is run on every commit merged into the main branch. It deploys
|
||||
the built site to the `/dev/` subdirectory for the in-development
|
||||
documentation.
|
||||
|
||||
1. **Deploy Versioned Docs:**
|
||||
When a new GitHub Release is published, it performs two deployments based on the new release tag.
|
||||
One to the new version subdirectory and one to the root directory of the versioned-gh-pages branch.
|
||||
When a new GitHub Release is published, it performs two deployments based on
|
||||
the new release tag. One to the new version subdirectory and one to the root
|
||||
directory of the versioned-gh-pages branch.
|
||||
|
||||
**Note:** Before the release PR from release-please is merged, add the newest version into the hugo.toml file.
|
||||
**Note:** Before the release PR from release-please is merged, add the
|
||||
newest version into the hugo.toml file.
|
||||
|
||||
1. **Deploy Previous Version Docs:**
|
||||
This is a manual workflow, started from the GitHub Actions UI.
|
||||
To rebuild and redeploy documentation for an already released version that were released before this new system was in place. This workflow can be started on the UI by providing the git version tag which you want to create the documentation for.
|
||||
The specific versioned subdirectory and the root docs are updated on the versioned-gh-pages branch.
|
||||
To rebuild and redeploy documentation for an already released version that
|
||||
were released before this new system was in place. This workflow can be
|
||||
started on the UI by providing the git version tag which you want to create
|
||||
the documentation for. The specific versioned subdirectory and the root docs
|
||||
are updated on the versioned-gh-pages branch.
|
||||
|
||||
#### Contributors
|
||||
|
||||
@@ -337,7 +344,9 @@ for instructions on developing Toolbox SDKs.
|
||||
|
||||
Team `@googleapis/senseai-eco` has been set as
|
||||
[CODEOWNERS](.github/CODEOWNERS). The GitHub TeamSync tool is used to create
|
||||
this team from MDB Group, `senseai-eco`.
|
||||
this team from MDB Group, `senseai-eco`. Additionally, database-specific GitHub
|
||||
teams (e.g., `@googleapis/toolbox-alloydb`) have been created from MDB groups to
|
||||
manage code ownership and review for individual database products.
|
||||
|
||||
Team `@googleapis/toolbox-contributors` has write access to this repo. They
|
||||
can create branches and approve test runs. But they do not have the ability
|
||||
@@ -441,7 +450,8 @@ Trigger pull request tests for external contributors by:
|
||||
|
||||
## Repo Setup & Automation
|
||||
|
||||
* .github/blunderbuss.yml - Auto-assign issues and PRs from GitHub teams
|
||||
* .github/blunderbuss.yml - Auto-assign issues and PRs from GitHub teams. Use a
|
||||
product label to assign to a product-specific team member.
|
||||
* .github/renovate.json5 - Tooling for dependency updates. Dependabot is built
|
||||
into the GitHub repo for GitHub security warnings
|
||||
* go/github-issue-mirror - GitHub issues are automatically mirrored into buganizer
|
||||
|
||||
28
Dockerfile
28
Dockerfile
@@ -11,10 +11,15 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Use the latest stable golang 1.x to compile to a binary
|
||||
FROM --platform=$BUILDPLATFORM golang:1 AS build
|
||||
|
||||
# Install Zig for CGO cross-compilation
|
||||
RUN apt-get update && apt-get install -y xz-utils
|
||||
RUN curl -fL "https://ziglang.org/download/0.15.2/zig-x86_64-linux-0.15.2.tar.xz" -o zig.tar.xz && \
|
||||
mkdir -p /zig && \
|
||||
tar -xf zig.tar.xz -C /zig --strip-components=1 && \
|
||||
rm zig.tar.xz
|
||||
|
||||
WORKDIR /go/src/genai-toolbox
|
||||
COPY . .
|
||||
|
||||
@@ -24,14 +29,27 @@ ARG BUILD_TYPE="container.dev"
|
||||
ARG COMMIT_SHA=""
|
||||
|
||||
RUN go get ./...
|
||||
RUN CGO_ENABLED=0 GOOS=${TARGETOS} GOARCH=${TARGETARCH} \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=${BUILD_TYPE} -X github.com/googleapis/genai-toolbox/cmd.commitSha=${COMMIT_SHA}"
|
||||
|
||||
RUN export ZIG_TARGET="" && \
|
||||
case "${TARGETARCH}" in \
|
||||
("amd64") ZIG_TARGET="x86_64-linux-gnu" ;; \
|
||||
("arm64") ZIG_TARGET="aarch64-linux-gnu" ;; \
|
||||
(*) echo "Unsupported architecture: ${TARGETARCH}" && exit 1 ;; \
|
||||
esac && \
|
||||
CGO_ENABLED=1 GOOS=${TARGETOS} GOARCH=${TARGETARCH} \
|
||||
CC="/zig/zig cc -target ${ZIG_TARGET}" \
|
||||
CXX="/zig/zig c++ -target ${ZIG_TARGET}" \
|
||||
go build \
|
||||
-ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=${BUILD_TYPE} -X github.com/googleapis/genai-toolbox/cmd.commitSha=${COMMIT_SHA}" \
|
||||
-o genai-toolbox .
|
||||
|
||||
# Final Stage
|
||||
FROM gcr.io/distroless/static:nonroot
|
||||
FROM gcr.io/distroless/cc-debian12:nonroot
|
||||
|
||||
WORKDIR /app
|
||||
COPY --from=build --chown=nonroot /go/src/genai-toolbox/genai-toolbox /toolbox
|
||||
USER nonroot
|
||||
|
||||
LABEL io.modelcontextprotocol.server.name="io.github.googleapis/genai-toolbox"
|
||||
|
||||
ENTRYPOINT ["/toolbox"]
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
This document helps you find and install the right Gemini CLI extension to interact with your databases.
|
||||
This document helps you find and install the right Gemini CLI extension to
|
||||
interact with your databases.
|
||||
|
||||
## How to Install an Extension
|
||||
|
||||
To install any of the extensions listed below, use the `gemini extensions install` command followed by the extension's GitHub repository URL.
|
||||
To install any of the extensions listed below, use the `gemini extensions
|
||||
install` command followed by the extension's GitHub repository URL.
|
||||
|
||||
For complete instructions on finding, installing, and managing extensions, please see the [official Gemini CLI extensions documentation](https://github.com/google-gemini/gemini-cli/blob/main/docs/extensions/index.md).
|
||||
For complete instructions on finding, installing, and managing extensions,
|
||||
please see the [official Gemini CLI extensions
|
||||
documentation](https://github.com/google-gemini/gemini-cli/blob/main/docs/extensions/index.md).
|
||||
|
||||
**Example Installation Command:**
|
||||
|
||||
@@ -13,46 +17,63 @@ gemini extensions install https://github.com/gemini-cli-extensions/EXTENSION_NAM
|
||||
```
|
||||
|
||||
Make sure the user knows:
|
||||
|
||||
* These commands are not supported from within the CLI
|
||||
* These commands will only be reflected in active CLI sessions on restart
|
||||
* Extensions require Application Default Credentials in your environment. See [Set up ADC for a local development environment](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment) to learn how you can provide either your user credentials or service account credentials to ADC in a local development environment.
|
||||
* Most extensions require you to set environment variables to connect to a database. If there is a link provided for the configuration, fetch the web page and return the configuration.
|
||||
* Extensions require Application Default Credentials in your environment. See
|
||||
[Set up ADC for a local development
|
||||
environment](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment)
|
||||
to learn how you can provide either your user credentials or service account
|
||||
credentials to ADC in a local development environment.
|
||||
* Most extensions require you to set environment variables to connect to a
|
||||
database. If there is a link provided for the configuration, fetch the web
|
||||
page and return the configuration.
|
||||
|
||||
-----
|
||||
|
||||
## Find Your Database Extension
|
||||
|
||||
Find your database or service in the list below to get the correct installation command.
|
||||
Find your database or service in the list below to get the correct installation
|
||||
command.
|
||||
|
||||
**Note on Observability:** Extensions with `-observability` in their name are designed to help you understand the health and performance of your database instances, often by analyzing metrics and logs.
|
||||
**Note on Observability:** Extensions with `-observability` in their name are
|
||||
designed to help you understand the health and performance of your database
|
||||
instances, often by analyzing metrics and logs.
|
||||
|
||||
### Google Cloud Managed Databases
|
||||
|
||||
#### BigQuery
|
||||
|
||||
* For data analytics and querying:
|
||||
* For data analytics and querying:
|
||||
|
||||
```bash
|
||||
gemini extensions install https://github.com/gemini-cli-extensions/bigquery-data-analytics
|
||||
```
|
||||
|
||||
Configuration: https://github.com/gemini-cli-extensions/bigquery-data-analytics/tree/main?tab=readme-ov-file#configuration
|
||||
Configuration:
|
||||
https://github.com/gemini-cli-extensions/bigquery-data-analytics/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
* For conversational analytics (using natural language):
|
||||
|
||||
* For conversational analytics (using natural language):
|
||||
```bash
|
||||
gemini extensions install https://github.com/gemini-cli-extensions/bigquery-conversational-analytics
|
||||
```
|
||||
|
||||
|
||||
Configuration: https://github.com/gemini-cli-extensions/bigquery-conversational-analytics/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
#### Cloud SQL for MySQL
|
||||
|
||||
* Main Extension:
|
||||
* Main Extension:
|
||||
|
||||
```bash
|
||||
gemini extensions install https://github.com/gemini-cli-extensions/cloud-sql-mysql
|
||||
```
|
||||
Configuration: https://github.com/gemini-cli-extensions/cloud-sql-mysql/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
* Observability:
|
||||
Configuration:
|
||||
https://github.com/gemini-cli-extensions/cloud-sql-mysql/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
* Observability:
|
||||
|
||||
```bash
|
||||
gemini extensions install https://github.com/gemini-cli-extensions/cloud-sql-mysql-observability
|
||||
```
|
||||
@@ -61,129 +82,166 @@ Find your database or service in the list below to get the correct installation
|
||||
|
||||
#### Cloud SQL for PostgreSQL
|
||||
|
||||
* Main Extension:
|
||||
* Main Extension:
|
||||
|
||||
```bash
|
||||
gemini extensions install https://github.com/gemini-cli-extensions/cloud-sql-postgresql
|
||||
```
|
||||
Configuration: https://github.com/gemini-cli-extensions/cloud-sql-postgresql/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
* Observability:
|
||||
Configuration:
|
||||
https://github.com/gemini-cli-extensions/cloud-sql-postgresql/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
* Observability:
|
||||
|
||||
```bash
|
||||
gemini extensions install https://github.com/gemini-cli-extensions/cloud-sql-postgresql-observability
|
||||
```
|
||||
|
||||
If you are looking for other PostgreSQL options, consider the `postgres` extension for self-hosted instances, or the `alloydb` extension for AlloyDB for PostgreSQL.
|
||||
If you are looking for other PostgreSQL options, consider the `postgres`
|
||||
extension for self-hosted instances, or the `alloydb` extension for AlloyDB
|
||||
for PostgreSQL.
|
||||
|
||||
#### Cloud SQL for SQL Server
|
||||
|
||||
* Main Extension:
|
||||
* Main Extension:
|
||||
|
||||
```bash
|
||||
gemini extensions install https://github.com/gemini-cli-extensions/cloud-sql-sqlserver
|
||||
```
|
||||
|
||||
Configuration: https://github.com/gemini-cli-extensions/cloud-sql-sqlserver/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
* Observability:
|
||||
Configuration:
|
||||
https://github.com/gemini-cli-extensions/cloud-sql-sqlserver/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
* Observability:
|
||||
|
||||
```bash
|
||||
gemini extensions install https://github.com/gemini-cli-extensions/cloud-sql-sqlserver-observability
|
||||
```
|
||||
|
||||
If you are looking for self-hosted SQL Server, consider the `sql-server` extension.
|
||||
If you are looking for self-hosted SQL Server, consider the `sql-server`
|
||||
extension.
|
||||
|
||||
#### AlloyDB for PostgreSQL
|
||||
|
||||
* Main Extension:
|
||||
* Main Extension:
|
||||
|
||||
```bash
|
||||
gemini extensions install https://github.com/gemini-cli-extensions/alloydb
|
||||
```
|
||||
|
||||
Configuration: https://github.com/gemini-cli-extensions/alloydb/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
* Observability:
|
||||
Configuration:
|
||||
https://github.com/gemini-cli-extensions/alloydb/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
* Observability:
|
||||
|
||||
```bash
|
||||
gemini extensions install https://github.com/gemini-cli-extensions/alloydb-observability
|
||||
```
|
||||
|
||||
If you are looking for other PostgreSQL options, consider the `postgres` extension for self-hosted instances, or the `cloud-sql-postgresql` extension for Cloud SQL for PostgreSQL.
|
||||
If you are looking for other PostgreSQL options, consider the `postgres`
|
||||
extension for self-hosted instances, or the `cloud-sql-postgresql` extension
|
||||
for Cloud SQL for PostgreSQL.
|
||||
|
||||
#### Spanner
|
||||
|
||||
* For querying Spanner databases:
|
||||
* For querying Spanner databases:
|
||||
|
||||
```bash
|
||||
gemini extensions install https://github.com/gemini-cli-extensions/spanner
|
||||
```
|
||||
|
||||
Configuration: https://github.com/gemini-cli-extensions/spanner/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
Configuration:
|
||||
https://github.com/gemini-cli-extensions/spanner/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
#### Firestore
|
||||
|
||||
* For querying Firestore in Native Mode:
|
||||
* For querying Firestore in Native Mode:
|
||||
|
||||
```bash
|
||||
gemini extensions install https://github.com/gemini-cli-extensions/firestore-native
|
||||
```
|
||||
|
||||
Configuration: https://github.com/gemini-cli-extensions/firestore-native/tree/main?tab=readme-ov-file#configuration
|
||||
Configuration:
|
||||
https://github.com/gemini-cli-extensions/firestore-native/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
### Other Google Cloud Data Services
|
||||
|
||||
#### Dataplex
|
||||
|
||||
* For interacting with Dataplex data lakes and assets:
|
||||
* For interacting with Dataplex data lakes and assets:
|
||||
|
||||
```bash
|
||||
gemini extensions install https://github.com/gemini-cli-extensions/dataplex
|
||||
```
|
||||
|
||||
Configuration: https://github.com/gemini-cli-extensions/dataplex/tree/main?tab=readme-ov-file#configuration
|
||||
Configuration:
|
||||
https://github.com/gemini-cli-extensions/dataplex/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
#### Looker
|
||||
|
||||
* For querying Looker instances:
|
||||
* For querying Looker instances:
|
||||
|
||||
```bash
|
||||
gemini extensions install https://github.com/gemini-cli-extensions/looker
|
||||
```
|
||||
|
||||
Configuration: https://github.com/gemini-cli-extensions/looker/tree/main?tab=readme-ov-file#configuration
|
||||
Configuration:
|
||||
https://github.com/gemini-cli-extensions/looker/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
### Other Database Engines
|
||||
|
||||
These extensions are for connecting to database instances not managed by Cloud SQL (e.g., self-hosted on-prem, on a VM, or in another cloud).
|
||||
These extensions are for connecting to database instances not managed by Cloud
|
||||
SQL (e.g., self-hosted on-prem, on a VM, or in another cloud).
|
||||
|
||||
* MySQL:
|
||||
|
||||
* MySQL:
|
||||
```bash
|
||||
gemini extensions install https://github.com/gemini-cli-extensions/mysql
|
||||
```
|
||||
|
||||
Configuration: https://github.com/gemini-cli-extensions/mysql/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
If you are looking for Google Cloud managed MySQL, consider the `cloud-sql-mysql` extension.
|
||||
Configuration:
|
||||
https://github.com/gemini-cli-extensions/mysql/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
If you are looking for Google Cloud managed MySQL, consider the
|
||||
`cloud-sql-mysql` extension.
|
||||
|
||||
* PostgreSQL:
|
||||
|
||||
* PostgreSQL:
|
||||
```bash
|
||||
gemini extensions install https://github.com/gemini-cli-extensions/postgres
|
||||
```
|
||||
|
||||
Configuration: https://github.com/gemini-cli-extensions/postgres/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
If you are looking for Google Cloud managed PostgreSQL, consider the `cloud-sql-postgresql` or `alloydb` extensions.
|
||||
Configuration:
|
||||
https://github.com/gemini-cli-extensions/postgres/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
If you are looking for Google Cloud managed PostgreSQL, consider the
|
||||
`cloud-sql-postgresql` or `alloydb` extensions.
|
||||
|
||||
* SQL Server:
|
||||
|
||||
* SQL Server:
|
||||
```bash
|
||||
gemini extensions install https://github.com/gemini-cli-extensions/sql-server
|
||||
```
|
||||
|
||||
Configuration: https://github.com/gemini-cli-extensions/sql-server/tree/main?tab=readme-ov-file#configuration
|
||||
Configuration:
|
||||
https://github.com/gemini-cli-extensions/sql-server/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
If you are looking for Google Cloud managed SQL Server, consider the `cloud-sql-sqlserver` extension.
|
||||
If you are looking for Google Cloud managed SQL Server, consider the
|
||||
`cloud-sql-sqlserver` extension.
|
||||
|
||||
### Custom Tools
|
||||
|
||||
#### MCP Toolbox
|
||||
|
||||
* For connecting to MCP Toolbox servers:
|
||||
* For connecting to MCP Toolbox servers:
|
||||
|
||||
This extension can be used with any Google Cloud database to build custom
|
||||
tools. For more information, see the [MCP Toolbox
|
||||
documentation](https://googleapis.github.io/genai-toolbox/getting-started/introduction/).
|
||||
|
||||
This extension can be used with any Google Cloud database to build custom tools. For more information, see the [MCP Toolbox documentation](https://googleapis.github.io/genai-toolbox/getting-started/introduction/).
|
||||
```bash
|
||||
gemini extensions install https://github.com/gemini-cli-extensions/mcp-toolbox
|
||||
```
|
||||
|
||||
Configuration: https://github.com/gemini-cli-extensions/mcp-toolbox/tree/main?tab=readme-ov-file#configuration
|
||||
Configuration:
|
||||
https://github.com/gemini-cli-extensions/mcp-toolbox/tree/main?tab=readme-ov-file#configuration
|
||||
|
||||
38
README.md
38
README.md
@@ -39,6 +39,7 @@ documentation](https://googleapis.github.io/genai-toolbox/).
|
||||
- [Sources](#sources)
|
||||
- [Tools](#tools)
|
||||
- [Toolsets](#toolsets)
|
||||
- [Prompts](#prompts)
|
||||
- [Versioning](#versioning)
|
||||
- [Pre-1.0.0 Versioning](#pre-100-versioning)
|
||||
- [Post-1.0.0 Versioning](#post-100-versioning)
|
||||
@@ -124,7 +125,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.19.1
|
||||
> export VERSION=0.21.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -137,7 +138,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.19.1
|
||||
> export VERSION=0.21.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -150,7 +151,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.19.1
|
||||
> export VERSION=0.21.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -162,9 +163,9 @@ To install Toolbox as a binary:
|
||||
> To install Toolbox as a binary on Windows (AMD64):
|
||||
>
|
||||
> ```powershell
|
||||
> # see releases page for other versions
|
||||
> $VERSION = "0.19.1"
|
||||
> Invoke-WebRequest -Uri "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe" -OutFile "toolbox.exe"
|
||||
> :: see releases page for other versions
|
||||
> set VERSION=0.21.0
|
||||
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
> ```
|
||||
>
|
||||
> </details>
|
||||
@@ -176,7 +177,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.19.1
|
||||
export VERSION=0.21.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -200,7 +201,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.19.1
|
||||
go install github.com/googleapis/genai-toolbox@v0.21.0
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -803,8 +804,6 @@ For more detailed instructions on using the Toolbox Core SDK, see the
|
||||
For more detailed instructions on using the Toolbox Go SDK, see the
|
||||
[project's README][toolbox-core-go-readme].
|
||||
|
||||
[toolbox-go]: https://pkg.go.dev/github.com/googleapis/mcp-toolbox-sdk-go/core
|
||||
[toolbox-core-go-readme]: https://github.com/googleapis/mcp-toolbox-sdk-go/blob/main/core/README.md
|
||||
|
||||
</details>
|
||||
</details>
|
||||
@@ -932,6 +931,25 @@ all_tools = client.load_toolset()
|
||||
my_second_toolset = client.load_toolset("my_second_toolset")
|
||||
```
|
||||
|
||||
### Prompts
|
||||
|
||||
The `prompts` section of a `tools.yaml` defines prompts that can be used for
|
||||
interactions with LLMs.
|
||||
|
||||
```yaml
|
||||
prompts:
|
||||
code_review:
|
||||
description: "Asks the LLM to analyze code quality and suggest improvements."
|
||||
messages:
|
||||
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
||||
arguments:
|
||||
- name: "code"
|
||||
description: "The code to review"
|
||||
```
|
||||
|
||||
For more details on configuring prompts, see the
|
||||
[Prompts](https://googleapis.github.io/genai-toolbox/resources/prompts).
|
||||
|
||||
## Versioning
|
||||
|
||||
This project uses [semantic versioning](https://semver.org/) (`MAJOR.MINOR.PATCH`).
|
||||
|
||||
46
cmd/root.go
46
cmd/root.go
@@ -35,12 +35,16 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/auth"
|
||||
"github.com/googleapis/genai-toolbox/internal/log"
|
||||
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
|
||||
"github.com/googleapis/genai-toolbox/internal/prompts"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/telemetry"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
|
||||
// Import prompt packages for side effect of registration
|
||||
_ "github.com/googleapis/genai-toolbox/internal/prompts/custom"
|
||||
|
||||
// Import tool packages for side effect of registration
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreatecluster"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreateinstance"
|
||||
@@ -94,6 +98,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmssql/cloudsqlmssqlcreateinstance"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmysql/cloudsqlmysqlcreateinstance"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlpg/cloudsqlpgcreateinstances"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlpg/cloudsqlpgupgradeprecheck"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/couchbase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataform/dataformcompilelocal"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexlookupentry"
|
||||
@@ -118,6 +123,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookercreateprojectfile"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdeleteprojectfile"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdevmode"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergenerateembedurl"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiondatabases"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnections"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectionschemas"
|
||||
@@ -172,13 +178,20 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbasesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/oracle/oracleexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/oracle/oraclesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresdatabaseoverview"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistactivequeries"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistavailableextensions"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistindexes"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistinstalledextensions"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistlocks"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistschemas"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistsequences"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttriggers"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistviews"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslongrunningtransactions"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresreplicationstats"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/redis"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcancelbatch"
|
||||
@@ -360,12 +373,13 @@ type ToolsFile struct {
|
||||
AuthServices server.AuthServiceConfigs `yaml:"authServices"`
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
Toolsets server.ToolsetConfigs `yaml:"toolsets"`
|
||||
Prompts server.PromptConfigs `yaml:"prompts"`
|
||||
}
|
||||
|
||||
// parseEnv replaces environment variables ${ENV_NAME} with their values.
|
||||
// also support ${ENV_NAME:default_value}.
|
||||
func parseEnv(input string) (string, error) {
|
||||
re := regexp.MustCompile(`\$\{(\w+)(:(\w*))?\}`)
|
||||
re := regexp.MustCompile(`\$\{(\w+)(:([^}]*))?\}`)
|
||||
|
||||
var err error
|
||||
output := re.ReplaceAllStringFunc(input, func(match string) string {
|
||||
@@ -376,7 +390,7 @@ func parseEnv(input string) (string, error) {
|
||||
if value, found := os.LookupEnv(variableName); found {
|
||||
return value
|
||||
}
|
||||
if parts[2] != "" {
|
||||
if len(parts) >= 4 && parts[2] != "" {
|
||||
return parts[3]
|
||||
}
|
||||
err = fmt.Errorf("environment variable not found: %q", variableName)
|
||||
@@ -412,6 +426,7 @@ func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) {
|
||||
AuthServices: make(server.AuthServiceConfigs),
|
||||
Tools: make(server.ToolConfigs),
|
||||
Toolsets: make(server.ToolsetConfigs),
|
||||
Prompts: make(server.PromptConfigs),
|
||||
}
|
||||
|
||||
var conflicts []string
|
||||
@@ -461,11 +476,20 @@ func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) {
|
||||
merged.Toolsets[name] = toolset
|
||||
}
|
||||
}
|
||||
|
||||
// Check for conflicts and merge prompts
|
||||
for name, prompt := range file.Prompts {
|
||||
if _, exists := merged.Prompts[name]; exists {
|
||||
conflicts = append(conflicts, fmt.Sprintf("prompt '%s' (file #%d)", name, fileIndex+1))
|
||||
} else {
|
||||
merged.Prompts[name] = prompt
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If conflicts were detected, return an error
|
||||
if len(conflicts) > 0 {
|
||||
return ToolsFile{}, fmt.Errorf("resource conflicts detected:\n - %s\n\nPlease ensure each source, authService, tool, and toolset has a unique name across all files", strings.Join(conflicts, "\n - "))
|
||||
return ToolsFile{}, fmt.Errorf("resource conflicts detected:\n - %s\n\nPlease ensure each source, authService, tool, toolset and prompt has a unique name across all files", strings.Join(conflicts, "\n - "))
|
||||
}
|
||||
|
||||
return merged, nil
|
||||
@@ -539,14 +563,14 @@ func handleDynamicReload(ctx context.Context, toolsFile ToolsFile, s *server.Ser
|
||||
panic(err)
|
||||
}
|
||||
|
||||
sourcesMap, authServicesMap, toolsMap, toolsetsMap, err := validateReloadEdits(ctx, toolsFile)
|
||||
sourcesMap, authServicesMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := validateReloadEdits(ctx, toolsFile)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to validate reloaded edits: %w", err)
|
||||
logger.WarnContext(ctx, errMsg.Error())
|
||||
return err
|
||||
}
|
||||
|
||||
s.ResourceMgr.SetResources(sourcesMap, authServicesMap, toolsMap, toolsetsMap)
|
||||
s.ResourceMgr.SetResources(sourcesMap, authServicesMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap)
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -554,7 +578,7 @@ func handleDynamicReload(ctx context.Context, toolsFile ToolsFile, s *server.Ser
|
||||
// validateReloadEdits checks that the reloaded tools file configs can initialized without failing
|
||||
func validateReloadEdits(
|
||||
ctx context.Context, toolsFile ToolsFile,
|
||||
) (map[string]sources.Source, map[string]auth.AuthService, map[string]tools.Tool, map[string]tools.Toolset, error,
|
||||
) (map[string]sources.Source, map[string]auth.AuthService, map[string]tools.Tool, map[string]tools.Toolset, map[string]prompts.Prompt, map[string]prompts.Promptset, error,
|
||||
) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
@@ -577,16 +601,17 @@ func validateReloadEdits(
|
||||
AuthServiceConfigs: toolsFile.AuthServices,
|
||||
ToolConfigs: toolsFile.Tools,
|
||||
ToolsetConfigs: toolsFile.Toolsets,
|
||||
PromptConfigs: toolsFile.Prompts,
|
||||
}
|
||||
|
||||
sourcesMap, authServicesMap, toolsMap, toolsetsMap, err := server.InitializeConfigs(ctx, reloadedConfig)
|
||||
sourcesMap, authServicesMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, reloadedConfig)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to initialize reloaded configs: %w", err)
|
||||
logger.WarnContext(ctx, errMsg.Error())
|
||||
return nil, nil, nil, nil, err
|
||||
return nil, nil, nil, nil, nil, nil, err
|
||||
}
|
||||
|
||||
return sourcesMap, authServicesMap, toolsMap, toolsetsMap, nil
|
||||
return sourcesMap, authServicesMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, nil
|
||||
}
|
||||
|
||||
// watchChanges checks for changes in the provided yaml tools file(s) or folder.
|
||||
@@ -877,7 +902,8 @@ func run(cmd *Command) error {
|
||||
}
|
||||
}
|
||||
|
||||
cmd.cfg.SourceConfigs, cmd.cfg.AuthServiceConfigs, cmd.cfg.ToolConfigs, cmd.cfg.ToolsetConfigs = toolsFile.Sources, toolsFile.AuthServices, toolsFile.Tools, toolsFile.Toolsets
|
||||
cmd.cfg.SourceConfigs, cmd.cfg.AuthServiceConfigs, cmd.cfg.ToolConfigs, cmd.cfg.ToolsetConfigs, cmd.cfg.PromptConfigs = toolsFile.Sources, toolsFile.AuthServices, toolsFile.Tools, toolsFile.Toolsets, toolsFile.Prompts
|
||||
|
||||
authSourceConfigs := toolsFile.AuthSources
|
||||
if authSourceConfigs != nil {
|
||||
cmd.logger.WarnContext(ctx, "`authSources` is deprecated, use `authServices` instead")
|
||||
|
||||
212
cmd/root_test.go
212
cmd/root_test.go
@@ -34,6 +34,8 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/auth/google"
|
||||
"github.com/googleapis/genai-toolbox/internal/log"
|
||||
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
|
||||
"github.com/googleapis/genai-toolbox/internal/prompts"
|
||||
"github.com/googleapis/genai-toolbox/internal/prompts/custom"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
cloudsqlpgsrc "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg"
|
||||
httpsrc "github.com/googleapis/genai-toolbox/internal/sources/http"
|
||||
@@ -43,6 +45,7 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/http"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
@@ -513,8 +516,8 @@ func TestParseToolFile(t *testing.T) {
|
||||
Source: "my-pg-instance",
|
||||
Description: "some description",
|
||||
Statement: "SELECT * FROM SQL_STATEMENT;\n",
|
||||
Parameters: []tools.Parameter{
|
||||
tools.NewStringParameter("country", "some description"),
|
||||
Parameters: []parameters.Parameter{
|
||||
parameters.NewStringParameter("country", "some description"),
|
||||
},
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
@@ -525,6 +528,38 @@ func TestParseToolFile(t *testing.T) {
|
||||
ToolNames: []string{"example_tool"},
|
||||
},
|
||||
},
|
||||
Prompts: nil,
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "with prompts example",
|
||||
in: `
|
||||
prompts:
|
||||
my-prompt:
|
||||
description: A prompt template for data analysis.
|
||||
arguments:
|
||||
- name: country
|
||||
description: The country to analyze.
|
||||
messages:
|
||||
- content: Analyze the data for {{.country}}.
|
||||
`,
|
||||
wantToolsFile: ToolsFile{
|
||||
Sources: nil,
|
||||
AuthServices: nil,
|
||||
Tools: nil,
|
||||
Toolsets: nil,
|
||||
Prompts: server.PromptConfigs{
|
||||
"my-prompt": &custom.Config{
|
||||
Name: "my-prompt",
|
||||
Description: "A prompt template for data analysis.",
|
||||
Arguments: prompts.Arguments{
|
||||
{Parameter: parameters.NewStringParameter("country", "The country to analyze.")},
|
||||
},
|
||||
Messages: []prompts.Message{
|
||||
{Role: "user", Content: "Analyze the data for {{.country}}."},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -544,7 +579,10 @@ func TestParseToolFile(t *testing.T) {
|
||||
t.Fatalf("incorrect tools parse: diff %v", diff)
|
||||
}
|
||||
if diff := cmp.Diff(tc.wantToolsFile.Toolsets, toolsFile.Toolsets); diff != "" {
|
||||
t.Fatalf("incorrect tools parse: diff %v", diff)
|
||||
t.Fatalf("incorrect toolsets parse: diff %v", diff)
|
||||
}
|
||||
if diff := cmp.Diff(tc.wantToolsFile.Prompts, toolsFile.Prompts); diff != "" {
|
||||
t.Fatalf("incorrect prompts parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -645,10 +683,10 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
||||
Description: "some description",
|
||||
Statement: "SELECT * FROM SQL_STATEMENT;\n",
|
||||
AuthRequired: []string{},
|
||||
Parameters: []tools.Parameter{
|
||||
tools.NewStringParameter("country", "some description"),
|
||||
tools.NewIntParameterWithAuth("id", "user id", []tools.ParamAuthService{{Name: "my-google-service", Field: "user_id"}}),
|
||||
tools.NewStringParameterWithAuth("email", "user email", []tools.ParamAuthService{{Name: "my-google-service", Field: "email"}, {Name: "other-google-service", Field: "other_email"}}),
|
||||
Parameters: []parameters.Parameter{
|
||||
parameters.NewStringParameter("country", "some description"),
|
||||
parameters.NewIntParameterWithAuth("id", "user id", []parameters.ParamAuthService{{Name: "my-google-service", Field: "user_id"}}),
|
||||
parameters.NewStringParameterWithAuth("email", "user email", []parameters.ParamAuthService{{Name: "my-google-service", Field: "email"}, {Name: "other-google-service", Field: "other_email"}}),
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -658,6 +696,7 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
||||
ToolNames: []string{"example_tool"},
|
||||
},
|
||||
},
|
||||
Prompts: nil,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -744,10 +783,10 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
||||
Description: "some description",
|
||||
Statement: "SELECT * FROM SQL_STATEMENT;\n",
|
||||
AuthRequired: []string{},
|
||||
Parameters: []tools.Parameter{
|
||||
tools.NewStringParameter("country", "some description"),
|
||||
tools.NewIntParameterWithAuth("id", "user id", []tools.ParamAuthService{{Name: "my-google-service", Field: "user_id"}}),
|
||||
tools.NewStringParameterWithAuth("email", "user email", []tools.ParamAuthService{{Name: "my-google-service", Field: "email"}, {Name: "other-google-service", Field: "other_email"}}),
|
||||
Parameters: []parameters.Parameter{
|
||||
parameters.NewStringParameter("country", "some description"),
|
||||
parameters.NewIntParameterWithAuth("id", "user id", []parameters.ParamAuthService{{Name: "my-google-service", Field: "user_id"}}),
|
||||
parameters.NewStringParameterWithAuth("email", "user email", []parameters.ParamAuthService{{Name: "my-google-service", Field: "email"}, {Name: "other-google-service", Field: "other_email"}}),
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -757,6 +796,7 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
||||
ToolNames: []string{"example_tool"},
|
||||
},
|
||||
},
|
||||
Prompts: nil,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -845,10 +885,10 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
||||
Description: "some description",
|
||||
Statement: "SELECT * FROM SQL_STATEMENT;\n",
|
||||
AuthRequired: []string{"my-google-service"},
|
||||
Parameters: []tools.Parameter{
|
||||
tools.NewStringParameter("country", "some description"),
|
||||
tools.NewIntParameterWithAuth("id", "user id", []tools.ParamAuthService{{Name: "my-google-service", Field: "user_id"}}),
|
||||
tools.NewStringParameterWithAuth("email", "user email", []tools.ParamAuthService{{Name: "my-google-service", Field: "email"}, {Name: "other-google-service", Field: "other_email"}}),
|
||||
Parameters: []parameters.Parameter{
|
||||
parameters.NewStringParameter("country", "some description"),
|
||||
parameters.NewIntParameterWithAuth("id", "user id", []parameters.ParamAuthService{{Name: "my-google-service", Field: "user_id"}}),
|
||||
parameters.NewStringParameterWithAuth("email", "user email", []parameters.ParamAuthService{{Name: "my-google-service", Field: "email"}, {Name: "other-google-service", Field: "other_email"}}),
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -858,6 +898,7 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
||||
ToolNames: []string{"example_tool"},
|
||||
},
|
||||
},
|
||||
Prompts: nil,
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -877,7 +918,10 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
||||
t.Fatalf("incorrect tools parse: diff %v", diff)
|
||||
}
|
||||
if diff := cmp.Diff(tc.wantToolsFile.Toolsets, toolsFile.Toolsets); diff != "" {
|
||||
t.Fatalf("incorrect tools parse: diff %v", diff)
|
||||
t.Fatalf("incorrect toolsets parse: diff %v", diff)
|
||||
}
|
||||
if diff := cmp.Diff(tc.wantToolsFile.Prompts, toolsFile.Prompts); diff != "" {
|
||||
t.Fatalf("incorrect prompts parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -894,6 +938,8 @@ func TestEnvVarReplacement(t *testing.T) {
|
||||
t.Setenv("cat_string", "cat")
|
||||
t.Setenv("food_string", "food")
|
||||
t.Setenv("TestHeader", "ACTUAL_HEADER")
|
||||
t.Setenv("prompt_name", "ACTUAL_PROMPT_NAME")
|
||||
t.Setenv("prompt_content", "ACTUAL_CONTENT")
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
@@ -967,6 +1013,14 @@ func TestEnvVarReplacement(t *testing.T) {
|
||||
toolsets:
|
||||
${toolset_name}:
|
||||
- example_tool
|
||||
|
||||
|
||||
prompts:
|
||||
${prompt_name}:
|
||||
description: A test prompt for {{.name}}.
|
||||
messages:
|
||||
- role: user
|
||||
content: ${prompt_content}
|
||||
`,
|
||||
wantToolsFile: ToolsFile{
|
||||
Sources: server.SourceConfigs{
|
||||
@@ -1000,9 +1054,9 @@ func TestEnvVarReplacement(t *testing.T) {
|
||||
Path: "search?name=alice&pet=cat",
|
||||
Description: "some description",
|
||||
AuthRequired: []string{"my-google-auth-service", "other-auth-service"},
|
||||
QueryParams: []tools.Parameter{
|
||||
tools.NewStringParameterWithAuth("country", "some description",
|
||||
[]tools.ParamAuthService{{Name: "my-google-auth-service", Field: "user_id"},
|
||||
QueryParams: []parameters.Parameter{
|
||||
parameters.NewStringParameterWithAuth("country", "some description",
|
||||
[]parameters.ParamAuthService{{Name: "my-google-auth-service", Field: "user_id"},
|
||||
{Name: "other-auth-service", Field: "user_id"}}),
|
||||
},
|
||||
RequestBody: `{
|
||||
@@ -1012,9 +1066,9 @@ func TestEnvVarReplacement(t *testing.T) {
|
||||
"other": "$OTHER"
|
||||
}
|
||||
`,
|
||||
BodyParams: []tools.Parameter{tools.NewIntParameter("age", "age num"), tools.NewStringParameter("city", "city string")},
|
||||
BodyParams: []parameters.Parameter{parameters.NewIntParameter("age", "age num"), parameters.NewStringParameter("city", "city string")},
|
||||
Headers: map[string]string{"Authorization": "API_KEY", "Content-Type": "application/json"},
|
||||
HeaderParams: []tools.Parameter{tools.NewStringParameter("Language", "language string")},
|
||||
HeaderParams: []parameters.Parameter{parameters.NewStringParameter("Language", "language string")},
|
||||
},
|
||||
},
|
||||
Toolsets: server.ToolsetConfigs{
|
||||
@@ -1023,6 +1077,19 @@ func TestEnvVarReplacement(t *testing.T) {
|
||||
ToolNames: []string{"example_tool"},
|
||||
},
|
||||
},
|
||||
Prompts: server.PromptConfigs{
|
||||
"ACTUAL_PROMPT_NAME": &custom.Config{
|
||||
Name: "ACTUAL_PROMPT_NAME",
|
||||
Description: "A test prompt for {{.name}}.",
|
||||
Messages: []prompts.Message{
|
||||
{
|
||||
Role: "user",
|
||||
Content: "ACTUAL_CONTENT",
|
||||
},
|
||||
},
|
||||
Arguments: nil,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -1042,11 +1109,13 @@ func TestEnvVarReplacement(t *testing.T) {
|
||||
t.Fatalf("incorrect tools parse: diff %v", diff)
|
||||
}
|
||||
if diff := cmp.Diff(tc.wantToolsFile.Toolsets, toolsFile.Toolsets); diff != "" {
|
||||
t.Fatalf("incorrect tools parse: diff %v", diff)
|
||||
t.Fatalf("incorrect toolsets parse: diff %v", diff)
|
||||
}
|
||||
if diff := cmp.Diff(tc.wantToolsFile.Prompts, toolsFile.Prompts); diff != "" {
|
||||
t.Fatalf("incorrect prompts parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// normalizeFilepaths is a helper function to allow same filepath formats for Mac and Windows.
|
||||
@@ -1379,7 +1448,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_postgres_admin_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_postgres_admin_tools",
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation"},
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1409,7 +1478,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"alloydb_postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "alloydb_postgres_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas"},
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1439,7 +1508,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_postgres_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas"},
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1539,7 +1608,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "postgres_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas"},
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1662,6 +1731,10 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
if diff := cmp.Diff(tc.wantToolset, toolsFile.Toolsets); diff != "" {
|
||||
t.Fatalf("incorrect tools parse: diff %v", diff)
|
||||
}
|
||||
// Prebuilt configs do not have prompts, so assert empty maps.
|
||||
if len(toolsFile.Prompts) != 0 {
|
||||
t.Fatalf("expected empty prompts map for prebuilt config, got: %v", toolsFile.Prompts)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1734,3 +1807,88 @@ func TestFileLoadingErrors(t *testing.T) {
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestMergeToolsFiles(t *testing.T) {
|
||||
file1 := ToolsFile{
|
||||
Sources: server.SourceConfigs{"source1": httpsrc.Config{Name: "source1"}},
|
||||
Tools: server.ToolConfigs{"tool1": http.Config{Name: "tool1"}},
|
||||
Toolsets: server.ToolsetConfigs{"set1": tools.ToolsetConfig{Name: "set1"}},
|
||||
}
|
||||
file2 := ToolsFile{
|
||||
AuthServices: server.AuthServiceConfigs{"auth1": google.Config{Name: "auth1"}},
|
||||
Tools: server.ToolConfigs{"tool2": http.Config{Name: "tool2"}},
|
||||
Toolsets: server.ToolsetConfigs{"set2": tools.ToolsetConfig{Name: "set2"}},
|
||||
}
|
||||
fileWithConflicts := ToolsFile{
|
||||
Sources: server.SourceConfigs{"source1": httpsrc.Config{Name: "source1"}},
|
||||
Tools: server.ToolConfigs{"tool2": http.Config{Name: "tool2"}},
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
files []ToolsFile
|
||||
want ToolsFile
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "merge two distinct files",
|
||||
files: []ToolsFile{file1, file2},
|
||||
want: ToolsFile{
|
||||
Sources: server.SourceConfigs{"source1": httpsrc.Config{Name: "source1"}},
|
||||
AuthServices: server.AuthServiceConfigs{"auth1": google.Config{Name: "auth1"}},
|
||||
Tools: server.ToolConfigs{"tool1": http.Config{Name: "tool1"}, "tool2": http.Config{Name: "tool2"}},
|
||||
Toolsets: server.ToolsetConfigs{"set1": tools.ToolsetConfig{Name: "set1"}, "set2": tools.ToolsetConfig{Name: "set2"}},
|
||||
Prompts: server.PromptConfigs{},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "merge with conflicts",
|
||||
files: []ToolsFile{file1, file2, fileWithConflicts},
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "merge single file",
|
||||
files: []ToolsFile{file1},
|
||||
want: ToolsFile{
|
||||
Sources: file1.Sources,
|
||||
AuthServices: make(server.AuthServiceConfigs),
|
||||
Tools: file1.Tools,
|
||||
Toolsets: file1.Toolsets,
|
||||
Prompts: server.PromptConfigs{},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "merge empty list",
|
||||
files: []ToolsFile{},
|
||||
want: ToolsFile{
|
||||
Sources: make(server.SourceConfigs),
|
||||
AuthServices: make(server.AuthServiceConfigs),
|
||||
Tools: make(server.ToolConfigs),
|
||||
Toolsets: make(server.ToolsetConfigs),
|
||||
Prompts: server.PromptConfigs{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
got, err := mergeToolsFiles(tc.files...)
|
||||
if (err != nil) != tc.wantErr {
|
||||
t.Fatalf("mergeToolsFiles() error = %v, wantErr %v", err, tc.wantErr)
|
||||
}
|
||||
if !tc.wantErr {
|
||||
if diff := cmp.Diff(tc.want, got); diff != "" {
|
||||
t.Errorf("mergeToolsFiles() mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
} else {
|
||||
if err == nil {
|
||||
t.Fatal("expected an error for conflicting files but got none")
|
||||
}
|
||||
if !strings.Contains(err.Error(), "resource conflicts detected") {
|
||||
t.Errorf("expected conflict error, but got: %v", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
0.19.1
|
||||
0.21.0
|
||||
|
||||
88
docs/ALLOYDBADMIN_README.md
Normal file
88
docs/ALLOYDBADMIN_README.md
Normal file
@@ -0,0 +1,88 @@
|
||||
# AlloyDB for PostgreSQL Admin MCP Server
|
||||
|
||||
The AlloyDB Model Context Protocol (MCP) Server gives AI-powered development tools the ability to work with your Google Cloud AlloyDB for PostgreSQL resources. It supports full lifecycle control, from creating clusters and instances to exploring schemas and running queries.
|
||||
|
||||
## Features
|
||||
|
||||
An editor configured to use the AlloyDB MCP server can use its AI capabilities to help you:
|
||||
|
||||
* **Provision & Manage Infrastructure**: Create and manage AlloyDB clusters, instances, and users
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* A Google Cloud project with the **AlloyDB API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* AlloyDB Admin (`roles/alloydb.admin`) (for managing infrastructure)
|
||||
* Service Usage Consumer (`roles/serviceusage.serviceUsageConsumer`)
|
||||
|
||||
### Configuration
|
||||
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "AlloyDB for PostgreSQL Admin", and click "Install".
|
||||
|
||||
## Usage
|
||||
|
||||
Once configured, the MCP server will automatically provide AlloyDB capabilities to your AI assistant. You can:
|
||||
|
||||
* "Create a new AlloyDB cluster named 'e-commerce-prod' in the 'my-gcp-project' project."
|
||||
* "Add a read-only instance to my 'e-commerce-prod' cluster."
|
||||
* "Create a new user named 'analyst' with read access to all tables."
|
||||
|
||||
## Server Capabilities
|
||||
|
||||
The AlloyDB MCP server provides the following tools:
|
||||
|
||||
| Tool Name | Description |
|
||||
|:---------------------|:-------------------------------------------------------------------|
|
||||
| `create_cluster` | Create an AlloyDB cluster. |
|
||||
| `create_instance` | Create an AlloyDB instance (PRIMARY, READ-POOL, or SECONDARY). |
|
||||
| `create_user` | Create ALLOYDB-BUILT-IN or IAM-based users for an AlloyDB cluster. |
|
||||
| `get_cluster` | Get details about an AlloyDB cluster. |
|
||||
| `get_instance` | Get details about an AlloyDB instance. |
|
||||
| `get_user` | Get details about a user in an AlloyDB cluster. |
|
||||
| `list_clusters` | List clusters in a given project and location. |
|
||||
| `list_instances` | List instances in a given project and location. |
|
||||
| `list_users` | List users in a given project and location. |
|
||||
| `wait_for_operation` | Poll the operations API until the operation is done. |
|
||||
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [AlloyDB for PostgreSQL documentation](https://cloud.google.com/alloydb/docs).
|
||||
103
docs/ALLOYDBPG_README.md
Normal file
103
docs/ALLOYDBPG_README.md
Normal file
@@ -0,0 +1,103 @@
|
||||
# AlloyDB for PostgreSQL MCP Server
|
||||
|
||||
The AlloyDB Model Context Protocol (MCP) Server gives AI-powered development tools the ability to work with your Google Cloud AlloyDB for PostgreSQL resources. It supports full lifecycle control, from exploring schemas and running queries to monitoring your database.
|
||||
|
||||
## Features
|
||||
|
||||
An editor configured to use the AlloyDB MCP server can use its AI capabilities to help you:
|
||||
|
||||
- **Explore Schemas and Data** - List tables, get table details, and view data
|
||||
- **Execute SQL** - Run SQL queries directly from your editor
|
||||
- **Monitor Performance** - View active queries, query plans, and other performance metrics (via observability tools)
|
||||
- **Manage Extensions** - List available and installed PostgreSQL extensions
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* A Google Cloud project with the **AlloyDB API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* AlloyDB Client (`roles/alloydb.client`) (for connecting and querying)
|
||||
* Service Usage Consumer (`roles/serviceusage.serviceUsageConsumer`)
|
||||
|
||||
### Configuration
|
||||
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "AlloyDB for PostgreSQL", and click "Install".
|
||||
3. **Configuration**: The following configuration is needed for the server:
|
||||
* AlloyDB Project ID: The GCP project ID.
|
||||
* AlloyDB Region: The region of your AlloyDB instance.
|
||||
* AlloyDB Cluster ID: The ID of your AlloyDB cluster.
|
||||
* AlloyDB Instance ID: The ID of your AlloyDB instance.
|
||||
* AlloyDB Database Name: The name of the database.
|
||||
* AlloyDB Database User: (Optional) The database username. Defaults to IAM authentication if unspecified.
|
||||
* AlloyDB Database Password: (Optional) The password for the database user. Defaults to IAM authentication if unspecified.
|
||||
* AlloyDB IP Type: (Optional) The IP type i.e. “Public” or “Private”. Defaults to "Public" if unspecified.
|
||||
|
||||
> [!NOTE]
|
||||
> If your AlloyDB instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
|
||||
|
||||
## Usage
|
||||
|
||||
Once configured, the MCP server will automatically provide AlloyDB capabilities to your AI assistant. You can:
|
||||
|
||||
* "Show me all tables in the 'orders' database."
|
||||
* "What are the columns in the 'products' table?"
|
||||
* "How many orders were placed in the last 30 days?"
|
||||
|
||||
## Server Capabilities
|
||||
|
||||
The AlloyDB MCP server provides the following tools:
|
||||
|
||||
| Tool Name | Description |
|
||||
|:---------------------------------|:-----------------------------------------------------------|
|
||||
| `list_tables` | Lists detailed schema information for user-created tables. |
|
||||
| `execute_sql` | Executes a SQL query. |
|
||||
| `list_active_queries` | List currently running queries. |
|
||||
| `list_available_extensions` | List available extensions for installation. |
|
||||
| `list_installed_extensions` | List installed extensions. |
|
||||
| `get_query_plan` | Get query plan for a SQL statement. |
|
||||
| `list_autovacuum_configurations` | List autovacuum configurations and their values. |
|
||||
| `list_memory_configurations` | List memory configurations and their values. |
|
||||
| `list_top_bloated_tables` | List top bloated tables. |
|
||||
| `list_replication_slots` | List replication slots. |
|
||||
| `list_invalid_indexes` | List invalid indexes. |
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [AlloyDB for PostgreSQL documentation](https://cloud.google.com/alloydb/docs).
|
||||
96
docs/BIGQUERY_README.md
Normal file
96
docs/BIGQUERY_README.md
Normal file
@@ -0,0 +1,96 @@
|
||||
# BigQuery MCP Server
|
||||
|
||||
The BigQuery Model Context Protocol (MCP) Server enables AI-powered development tools to seamlessly connect, interact, and generate data insights with your BigQuery datasets and data using natural language commands.
|
||||
|
||||
## Features
|
||||
|
||||
An editor configured to use the BigQuery MCP server can use its AI capabilities to help you:
|
||||
|
||||
- **Natural Language to Data Analytics:** Easily find required BigQuery tables and ask analytical questions in plain English.
|
||||
- **Seamless Workflow:** Stay within your CLI, eliminating the need to constantly switch to the GCP console for generating analytical insights.
|
||||
- **Run Advanced Analytics:** Generate forecasts and perform contribution analysis using built-in advanced tools.
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* A Google Cloud project with the **BigQuery API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* BigQuery User (`roles/bigquery.user`)
|
||||
|
||||
### Configuration
|
||||
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "BigQuery", and click "Install".
|
||||
3. **Configuration**: The following configuration is needed for the server:
|
||||
* BigQuery Project ID: The GCP project ID.
|
||||
* BigQuery Location: (Optional) The location of your BigQuery dataset (e.g. "US", "EU").
|
||||
|
||||
### Usage
|
||||
|
||||
Once configured, the MCP server will automatically provide BigQuery capabilities to your AI assistant. You can:
|
||||
|
||||
|
||||
* **Find Data:**
|
||||
|
||||
* "Find tables related to PyPi downloads"
|
||||
* "Find tables related to Google analytics data in the dataset bigquery-public-data"
|
||||
|
||||
* **Generate Analytics and Insights:**
|
||||
|
||||
* "Using bigquery-public-data.pypi.file_downloads show me the top 10 downloaded pypi packages this month."
|
||||
* "Using bigquery-public-data.pypi.file_downloads can you forecast downloads for the last four months of 2025 for package urllib3?"
|
||||
|
||||
## Server Capabilities
|
||||
|
||||
The BigQuery MCP server provides the following tools:
|
||||
|
||||
| Tool Name | Description |
|
||||
|:-----------------------|:----------------------------------------------------------------|
|
||||
| `execute_sql` | Executes a SQL query. |
|
||||
| `forecast` | Forecast time series data. |
|
||||
| `get_dataset_info` | Get dataset metadata. |
|
||||
| `get_table_info` | Get table metadata. |
|
||||
| `list_dataset_ids` | Lists dataset IDs in the database. |
|
||||
| `list_table_ids` | Lists table IDs in the database. |
|
||||
| `analyze_contribution` | Perform contribution analysis, also called key driver analysis. |
|
||||
| `search_catalog` | Search for tables based on the provided query. |
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [BigQuery documentation](https://cloud.google.com/bigquery/docs).
|
||||
83
docs/CLOUDSQLMSSQLADMIN_README.md
Normal file
83
docs/CLOUDSQLMSSQLADMIN_README.md
Normal file
@@ -0,0 +1,83 @@
|
||||
# Cloud SQL for SQL Server Admin MCP Server
|
||||
|
||||
The Cloud SQL for SQL Server Model Context Protocol (MCP) Server gives AI-powered development tools the ability to work with your Google Cloud SQL for SQL Server databases. It supports connecting to instances, exploring schemas, and running queries.
|
||||
|
||||
## Features
|
||||
|
||||
An editor configured to use the Cloud SQL for SQL Server MCP server can use its AI capabilities to help you:
|
||||
|
||||
- **Provision & Manage Infrastructure** - Create and manage Cloud SQL instances and users
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* A Google Cloud project with the **Cloud SQL Admin API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* Cloud SQL Viewer (`roles/cloudsql.viewer`)
|
||||
* Cloud SQL Admin (`roles/cloudsql.admin`)
|
||||
|
||||
### Configuration
|
||||
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "Cloud SQL for SQL Server Admin", and click "Install".
|
||||
|
||||
## Usage
|
||||
|
||||
Once configured, the MCP server will automatically provide Cloud SQL for SQL Server capabilities to your AI assistant. You can:
|
||||
|
||||
* "Create a new Cloud SQL for SQL Server instance named 'e-commerce-prod' in the 'my-gcp-project' project."
|
||||
* "Create a new user named 'analyst' with read access to all tables."
|
||||
|
||||
## Server Capabilities
|
||||
|
||||
The Cloud SQL for SQL Server MCP server provides the following tools:
|
||||
|
||||
| Tool Name | Description |
|
||||
|:---------------------|:-------------------------------------------------------|
|
||||
| `create_instance` | Create an instance (PRIMARY, READ-POOL, or SECONDARY). |
|
||||
| `create_user` | Create BUILT-IN or IAM-based users for an instance. |
|
||||
| `get_instance` | Get details about an instance. |
|
||||
| `get_user` | Get details about a user in an instance. |
|
||||
| `list_instances` | List instances in a given project and location. |
|
||||
| `list_users` | List users in a given project and location. |
|
||||
| `wait_for_operation` | Poll the operations API until the operation is done. |
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [Cloud SQL for SQL Server documentation](https://cloud.google.com/sql/docs/sqlserver).
|
||||
90
docs/CLOUDSQLMSSQL_README.md
Normal file
90
docs/CLOUDSQLMSSQL_README.md
Normal file
@@ -0,0 +1,90 @@
|
||||
# Cloud SQL for SQL Server MCP Server
|
||||
|
||||
The Cloud SQL for SQL Server Model Context Protocol (MCP) Server gives AI-powered development tools the ability to work with your Google Cloud SQL for SQL Server databases. It supports connecting to instances, exploring schemas, and running queries.
|
||||
|
||||
## Features
|
||||
|
||||
An editor configured to use the Cloud SQL for SQL Server MCP server can use its AI capabilities to help you:
|
||||
|
||||
- **Query Data** - Execute SQL queries
|
||||
- **Explore Schema** - List tables and view schema details
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* A Google Cloud project with the **Cloud SQL Admin API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* Cloud SQL Client (`roles/cloudsql.client`)
|
||||
|
||||
### Configuration
|
||||
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "Cloud SQL for SQL Server", and click "Install".
|
||||
3. **Configuration**: The following configuration is needed for the server:
|
||||
* Cloud SQL Project ID: The GCP project ID.
|
||||
* Cloud SQL Region: The region of your Cloud SQL instance.
|
||||
* Cloud SQL Instance ID: The ID of your Cloud SQL instance.
|
||||
* Cloud SQL Database Name: The name of the database.
|
||||
* Cloud SQL Database User: The database username.
|
||||
* Cloud SQL Database Password: The password for the database user.
|
||||
* Cloud SQL IP Type: (Optional) The IP type i.e. “Public” or “Private”. Defaults to "Public" if unspecified.
|
||||
|
||||
> [!NOTE]
|
||||
> If your instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
Once configured, the MCP server will automatically provide Cloud SQL for SQL Server capabilities to your AI assistant. You can:
|
||||
|
||||
* "Select top 10 rows from the customers table."
|
||||
* "List all tables in the database."
|
||||
|
||||
## Server Capabilities
|
||||
|
||||
The Cloud SQL for SQL Server MCP server provides the following tools:
|
||||
|
||||
| Tool Name | Description |
|
||||
|:--------------|:-----------------------------------------------------------|
|
||||
| `execute_sql` | Use this tool to execute SQL. |
|
||||
| `list_tables` | Lists detailed schema information for user-created tables. |
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [Cloud SQL for SQL Server documentation](https://cloud.google.com/sql/docs/sqlserver).
|
||||
83
docs/CLOUDSQLMYSQLADMIN_README.md
Normal file
83
docs/CLOUDSQLMYSQLADMIN_README.md
Normal file
@@ -0,0 +1,83 @@
|
||||
# Cloud SQL for MySQL Admin MCP Server
|
||||
|
||||
The Cloud SQL for MySQL Model Context Protocol (MCP) Server gives AI-powered development tools the ability to work with your Google Cloud SQL for MySQL databases. It supports connecting to instances, exploring schemas, and running queries.
|
||||
|
||||
## Features
|
||||
|
||||
An editor configured to use the Cloud SQL for MySQL MCP server can use its AI capabilities to help you:
|
||||
|
||||
- **Provision & Manage Infrastructure** - Create and manage Cloud SQL instances and users
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* A Google Cloud project with the **Cloud SQL Admin API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* Cloud SQL Viewer (`roles/cloudsql.viewer`)
|
||||
* Cloud SQL Admin (`roles/cloudsql.admin`)
|
||||
|
||||
### Configuration
|
||||
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "Cloud SQL for MySQL Admin", and click "Install".
|
||||
|
||||
## Usage
|
||||
|
||||
Once configured, the MCP server will automatically provide Cloud SQL for MySQL capabilities to your AI assistant. You can:
|
||||
|
||||
* "Create a new Cloud SQL for MySQL instance named 'e-commerce-prod' in the 'my-gcp-project' project."
|
||||
* "Create a new user named 'analyst' with read access to all tables."
|
||||
|
||||
## Server Capabilities
|
||||
|
||||
The Cloud SQL for MySQL MCP server provides the following tools:
|
||||
|
||||
| Tool Name | Description |
|
||||
|:---------------------|:-------------------------------------------------------|
|
||||
| `create_instance` | Create an instance (PRIMARY, READ-POOL, or SECONDARY). |
|
||||
| `create_user` | Create BUILT-IN or IAM-based users for an instance. |
|
||||
| `get_instance` | Get details about an instance. |
|
||||
| `get_user` | Get details about a user in an instance. |
|
||||
| `list_instances` | List instances in a given project and location. |
|
||||
| `list_users` | List users in a given project and location. |
|
||||
| `wait_for_operation` | Poll the operations API until the operation is done. |
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [Cloud SQL for MySQL documentation](https://cloud.google.com/sql/docs/mysql).
|
||||
96
docs/CLOUDSQLMYSQL_README.md
Normal file
96
docs/CLOUDSQLMYSQL_README.md
Normal file
@@ -0,0 +1,96 @@
|
||||
# Cloud SQL for MySQL MCP Server
|
||||
|
||||
The Cloud SQL for MySQL Model Context Protocol (MCP) Server gives AI-powered development tools the ability to work with your Google Cloud SQL for MySQL databases. It supports connecting to instances, exploring schemas, and running queries.
|
||||
|
||||
## Features
|
||||
|
||||
An editor configured to use the Cloud SQL for MySQL MCP server can use its AI capabilities to help you:
|
||||
|
||||
- **Query Data** - Execute SQL queries and analyze query plans
|
||||
- **Explore Schema** - List tables and view schema details
|
||||
- **Database Maintenance** - Check for fragmentation and missing indexes
|
||||
- **Monitor Performance** - View active queries
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* A Google Cloud project with the **Cloud SQL Admin API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* Cloud SQL Client (`roles/cloudsql.client`)
|
||||
|
||||
### Configuration
|
||||
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "Cloud SQL for MySQL", and click "Install".
|
||||
3. **Configuration**: The following configuration is needed for the server:
|
||||
* Cloud SQL Project ID: The GCP project ID.
|
||||
* Cloud SQL Region: The region of your Cloud SQL instance.
|
||||
* Cloud SQL Instance ID: The ID of your Cloud SQL instance.
|
||||
* Cloud SQL Database Name: The name of the database.
|
||||
* Cloud SQL Database User: The database username.
|
||||
* Cloud SQL Database Password: The password for the database user.
|
||||
* Cloud SQL IP Type: (Optional) The IP type i.e. “Public” or “Private”. Defaults to "Public" if unspecified.
|
||||
|
||||
> [!NOTE]
|
||||
> If your instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
|
||||
|
||||
## Usage
|
||||
|
||||
Once configured, the MCP server will automatically provide Cloud SQL for MySQL capabilities to your AI assistant. You can:
|
||||
|
||||
* "Show me the schema for the 'orders' table."
|
||||
* "List the top 10 active queries."
|
||||
* "Check for tables missing unique indexes."
|
||||
|
||||
## Server Capabilities
|
||||
|
||||
The Cloud SQL for MySQL MCP server provides the following tools:
|
||||
|
||||
| Tool Name | Description |
|
||||
|:-------------------------------------|:------------------------------------------------------------------------|
|
||||
| `execute_sql` | Use this tool to execute SQL. |
|
||||
| `list_active_queries` | Lists top N ongoing queries from processlist and innodb_trx. |
|
||||
| `get_query_plan` | Provide information about how MySQL executes a SQL statement (EXPLAIN). |
|
||||
| `list_tables` | Lists detailed schema information for user-created tables. |
|
||||
| `list_tables_missing_unique_indexes` | Find tables that do not have primary or unique key constraint. |
|
||||
| `list_table_fragmentation` | List table fragmentation in MySQL. |
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [Cloud SQL for MySQL documentation](https://cloud.google.com/sql/docs/mysql).
|
||||
83
docs/CLOUDSQLPGADMIN_README.md
Normal file
83
docs/CLOUDSQLPGADMIN_README.md
Normal file
@@ -0,0 +1,83 @@
|
||||
# Cloud SQL for PostgreSQL Admin MCP Server
|
||||
|
||||
The Cloud SQL for PostgreSQL Model Context Protocol (MCP) Server gives AI-powered development tools the ability to work with your Google Cloud SQL for PostgreSQL databases. It supports connecting to instances, exploring schemas, running queries, and analyzing performance.
|
||||
|
||||
## Features
|
||||
|
||||
An editor configured to use the Cloud SQL for PostgreSQL MCP server can use its AI capabilities to help you:
|
||||
|
||||
- **Provision & Manage Infrastructure** - Create and manage Cloud SQL instances and users
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* A Google Cloud project with the **Cloud SQL Admin API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* Cloud SQL Viewer (`roles/cloudsql.viewer`)
|
||||
* Cloud SQL Admin (`roles/cloudsql.admin`)
|
||||
|
||||
### Configuration
|
||||
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "Cloud SQL for PostgreSQL Admin", and click "Install".
|
||||
|
||||
## Usage
|
||||
|
||||
Once configured, the MCP server will automatically provide Cloud SQL for PostgreSQL capabilities to your AI assistant. You can:
|
||||
|
||||
* "Create a new Cloud SQL for Postgres instance named 'e-commerce-prod' in the 'my-gcp-project' project."
|
||||
* "Create a new user named 'analyst' with read access to all tables."
|
||||
|
||||
## Server Capabilities
|
||||
|
||||
The Cloud SQL for PostgreSQL MCP server provides the following tools:
|
||||
|
||||
| Tool Name | Description |
|
||||
|:---------------------|:-------------------------------------------------------|
|
||||
| `create_instance` | Create an instance (PRIMARY, READ-POOL, or SECONDARY). |
|
||||
| `create_user` | Create BUILT-IN or IAM-based users for an instance. |
|
||||
| `get_instance` | Get details about an instance. |
|
||||
| `get_user` | Get details about a user in an instance. |
|
||||
| `list_instances` | List instances in a given project and location. |
|
||||
| `list_users` | List users in a given project and location. |
|
||||
| `wait_for_operation` | Poll the operations API until the operation is done. |
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [Cloud SQL for PostgreSQL documentation](https://cloud.google.com/sql/docs/postgres).
|
||||
108
docs/CLOUDSQLPG_README.md
Normal file
108
docs/CLOUDSQLPG_README.md
Normal file
@@ -0,0 +1,108 @@
|
||||
# Cloud SQL for PostgreSQL MCP Server
|
||||
|
||||
The Cloud SQL for PostgreSQL Model Context Protocol (MCP) Server gives AI-powered development tools the ability to work with your Google Cloud SQL for PostgreSQL databases. It supports connecting to instances, exploring schemas, running queries, and analyzing performance.
|
||||
|
||||
## Features
|
||||
|
||||
An editor configured to use the Cloud SQL for PostgreSQL MCP server can use its AI capabilities to help you:
|
||||
|
||||
- **Query Data** - Execute SQL queries and analyze query plans
|
||||
- **Explore Schema** - List tables, views, indexes, and triggers
|
||||
- **Monitor Performance** - View active queries, bloat, and memory configurations
|
||||
- **Manage Extensions** - List available and installed extensions
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* A Google Cloud project with the **Cloud SQL Admin API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* Cloud SQL Client (`roles/cloudsql.client`)
|
||||
|
||||
### Configuration
|
||||
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "Cloud SQL for PostgreSQL", and click "Install".
|
||||
3. **Configuration**: The following configuration is needed for the server:
|
||||
* Cloud SQL Project ID: The GCP project ID.
|
||||
* Cloud SQL Region: The region of your Cloud SQL instance.
|
||||
* Cloud SQL Instance ID: The ID of your Cloud SQL instance.
|
||||
* Cloud SQL Database Name: The name of the database.
|
||||
* Cloud SQL Database User: (Optional) The database username. Defaults to IAM authentication if unspecified.
|
||||
* Cloud SQL Database Password: (Optional) The password for the database user. Defaults to IAM authentication if unspecified.
|
||||
* Cloud SQL IP Type: (Optional) The IP type i.e. “Public” or “Private”. Defaults to "Public" if unspecified.
|
||||
|
||||
> [!NOTE]
|
||||
> If your instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
Once configured, the MCP server will automatically provide Cloud SQL for PostgreSQL capabilities to your AI assistant. You can:
|
||||
|
||||
* "Show me the top 5 bloated tables."
|
||||
* "List all installed extensions."
|
||||
* "Explain the query plan for SELECT * FROM users."
|
||||
|
||||
## Server Capabilities
|
||||
|
||||
The Cloud SQL for PostgreSQL MCP server provides the following tools:
|
||||
|
||||
| Tool Name | Description |
|
||||
|:---------------------------------|:---------------------------------------------------------------|
|
||||
| `execute_sql` | Use this tool to execute sql. |
|
||||
| `list_tables` | Lists detailed schema information for user-created tables. |
|
||||
| `list_active_queries` | List the top N currently running queries. |
|
||||
| `list_available_extensions` | Discover all PostgreSQL extensions available for installation. |
|
||||
| `list_installed_extensions` | List all installed PostgreSQL extensions. |
|
||||
| `list_autovacuum_configurations` | List PostgreSQL autovacuum-related configurations. |
|
||||
| `list_memory_configurations` | List PostgreSQL memory-related configurations. |
|
||||
| `list_top_bloated_tables` | List the top tables by dead-tuple (approximate bloat signal). |
|
||||
| `list_replication_slots` | List key details for all PostgreSQL replication slots. |
|
||||
| `list_invalid_indexes` | Lists all invalid PostgreSQL indexes. |
|
||||
| `get_query_plan` | Generate a PostgreSQL EXPLAIN plan in JSON format. |
|
||||
| `list_views` | Lists views in the database. |
|
||||
| `list_schemas` | Lists all schemas in the database. |
|
||||
| `database_overview` | Fetches the current state of the PostgreSQL server. |
|
||||
| `list_triggers` | Lists all non-internal triggers in a database. |
|
||||
| `list_indexes` | Lists available user indexes in the database. |
|
||||
| `list_sequences` | Lists sequences in the database. |
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [Cloud SQL for PostgreSQL documentation](https://cloud.google.com/sql/docs/postgres).
|
||||
81
docs/DATAPLEX_README.md
Normal file
81
docs/DATAPLEX_README.md
Normal file
@@ -0,0 +1,81 @@
|
||||
# Dataplex MCP Server
|
||||
|
||||
The Dataplex Model Context Protocol (MCP) Server gives AI-powered development tools the ability to work with your Google Cloud Dataplex Catalog. It supports searching and looking up entries and aspect types.
|
||||
|
||||
## Features
|
||||
|
||||
An editor configured to use the Dataplex MCP server can use its AI capabilities to help you:
|
||||
|
||||
- **Search Catalog** - Search for entries in Dataplex Catalog
|
||||
- **Explore Metadata** - Lookup specific entries and search aspect types
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* A Google Cloud project with the **Dataplex API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* Dataplex Viewer (`roles/dataplex.viewer`) or equivalent permissions to read catalog entries.
|
||||
|
||||
### Configuration
|
||||
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "Dataplex", and click "Install".
|
||||
3. **Configuration**: The following configuration is needed for the server:
|
||||
* Dataplex Project ID: The GCP project ID.
|
||||
|
||||
## Usage
|
||||
|
||||
Once configured, the MCP server will automatically provide Dataplex capabilities to your AI assistant. You can:
|
||||
|
||||
* "Search for entries related to 'sales' in Dataplex."
|
||||
* "Look up details for the entry 'projects/my-project/locations/us-central1/entryGroups/my-group/entries/my-entry'."
|
||||
|
||||
## Server Capabilities
|
||||
|
||||
The Dataplex MCP server provides the following tools:
|
||||
|
||||
| Tool Name | Description |
|
||||
|:----------------------|:-------------------------------------------------|
|
||||
| `search_entries` | Search for entries in Dataplex Catalog. |
|
||||
| `lookup_entry` | Retrieve a specific entry from Dataplex Catalog. |
|
||||
| `search_aspect_types` | Find aspect types relevant to the query. |
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [Dataplex documentation](https://cloud.google.com/dataplex/docs).
|
||||
101
docs/LOOKER_README.md
Normal file
101
docs/LOOKER_README.md
Normal file
@@ -0,0 +1,101 @@
|
||||
# Looker MCP Server
|
||||
|
||||
The Looker Model Context Protocol (MCP) Server gives AI-powered development tools the ability to work with your Looker instance. It supports exploring models, running queries, managing dashboards, and more.
|
||||
|
||||
## Features
|
||||
|
||||
An editor configured to use the Looker MCP server can use its AI capabilities to help you:
|
||||
|
||||
- **Explore Models** - Get models, explores, dimensions, measures, filters, and parameters
|
||||
- **Run Queries** - Execute Looker queries, generate SQL, and create query URLs
|
||||
- **Manage Dashboards** - Create, run, and modify dashboards
|
||||
- **Manage Looks** - Search for and run saved looks
|
||||
- **Health Checks** - Analyze instance health and performance
|
||||
- **Developer Tools** - Manage project files and toggle dev mode
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* Access to a Looker instance.
|
||||
* API Credentials (`Client ID` and `Client Secret`) or OAuth configuration.
|
||||
|
||||
### Configuration
|
||||
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "Looker", and click "Install".
|
||||
3. **Configuration**: The following configuration is needed for the server:
|
||||
* Looker Base URL: The URL of your Looker instance.
|
||||
* Looker Client ID: The client ID for the Looker API.
|
||||
* Looker Client Secret: The client secret for the Looker API.
|
||||
* Looker Verify SSL: Whether to verify SSL certificates.
|
||||
* Looker Use Client OAuth: Whether to use OAuth for authentication.
|
||||
* Looker Show Hidden Models: Whether to show hidden models.
|
||||
* Looker Show Hidden Explores: Whether to show hidden explores.
|
||||
* Looker Show Hidden Fields: Whether to show hidden fields.
|
||||
|
||||
## Usage
|
||||
|
||||
Once configured, the MCP server will automatically provide Looker capabilities to your AI assistant. You can:
|
||||
|
||||
* "Find explores in the 'ecommerce' model."
|
||||
* "Run a query to show total sales by month."
|
||||
* "Create a new dashboard named 'Sales Overview'."
|
||||
|
||||
## Server Capabilities
|
||||
|
||||
The Looker MCP server provides a wide range of tools. Here are some of the key capabilities:
|
||||
|
||||
| Tool Name | Description |
|
||||
|:------------------------|:----------------------------------------------------------|
|
||||
| `get_models` | Retrieves the list of LookML models. |
|
||||
| `get_explores` | Retrieves the list of explores defined in a LookML model. |
|
||||
| `query` | Run a query against the LookML model. |
|
||||
| `query_sql` | Generate the SQL that Looker would run. |
|
||||
| `run_look` | Runs a saved look. |
|
||||
| `run_dashboard` | Runs all tiles in a dashboard. |
|
||||
| `make_dashboard` | Creates a new dashboard. |
|
||||
| `add_dashboard_element` | Adds a tile to a dashboard. |
|
||||
| `health_pulse` | Checks the status of the Looker instance. |
|
||||
| `dev_mode` | Toggles development mode. |
|
||||
| `get_projects` | Lists LookML projects. |
|
||||
|
||||
*(See the full list of tools in the extension)*
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [Looker documentation](https://cloud.google.com/looker/docs).
|
||||
86
docs/SPANNER_README.md
Normal file
86
docs/SPANNER_README.md
Normal file
@@ -0,0 +1,86 @@
|
||||
# Cloud Spanner MCP Server
|
||||
|
||||
The Cloud Spanner Model Context Protocol (MCP) Server gives AI-powered development tools the ability to work with your Google Cloud Spanner databases. It supports executing SQL queries and exploring schemas.
|
||||
|
||||
## Features
|
||||
|
||||
An editor configured to use the Cloud Spanner MCP server can use its AI capabilities to help you:
|
||||
|
||||
- **Query Data** - Execute DML and DQL SQL queries
|
||||
- **Explore Schema** - List tables and view schema details
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* A Google Cloud project with the **Cloud Spanner API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* Cloud Spanner Database User (`roles/spanner.databaseUser`) (for data access)
|
||||
* Cloud Spanner Viewer (`roles/spanner.viewer`) (for schema access)
|
||||
|
||||
### Configuration
|
||||
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "Spanner", and click "Install".
|
||||
3. **Configuration**: The following configuration is needed for the server:
|
||||
* Spanner Project ID: The GCP project ID.
|
||||
* Spanner Instance ID: The Spanner instance ID.
|
||||
* Spanner Database ID: The Spanner database ID.
|
||||
* Spanner Dialect: (Optional) The database dialect, which can be "googlesql" or "postgresql". Defaults to "googlesql" if unspecified.
|
||||
|
||||
## Usage
|
||||
|
||||
Once configured, the MCP server will automatically provide Cloud Spanner capabilities to your AI assistant. You can:
|
||||
|
||||
* "Execute a DML query to update customer names."
|
||||
* "List all tables in the `my-database`."
|
||||
* "Execute a DQL query to select data from `orders` table."
|
||||
|
||||
## Server Capabilities
|
||||
|
||||
The Cloud Spanner MCP server provides the following tools:
|
||||
|
||||
| Tool Name | Description |
|
||||
|:------------------|:-----------------------------------------------------------|
|
||||
| `execute_sql` | Use this tool to execute DML SQL. |
|
||||
| `execute_sql_dql` | Use this tool to execute DQL SQL. |
|
||||
| `list_tables` | Lists detailed schema information for user-created tables. |
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [Cloud Spanner documentation](https://cloud.google.com/spanner/docs).
|
||||
61
docs/TOOLBOX_README.md
Normal file
61
docs/TOOLBOX_README.md
Normal file
@@ -0,0 +1,61 @@
|
||||
# MCP Toolbox for Databases Server
|
||||
|
||||
The MCP Toolbox for Databases Server gives AI-powered development tools the ability to work with your custom tools. It is designed to simplify and secure the development of tools for interacting with databases.
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* Any required APIs and permissions for connecting to your database.
|
||||
|
||||
### Configuration
|
||||
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "MCP Toolbox for Databases", and click "Install".
|
||||
3. **Configuration**: The following configuration is needed for the server:
|
||||
* Add your [`tools.yaml` configuration
|
||||
file](https://googleapis.github.io/genai-toolbox/getting-started/configure/)
|
||||
to the directory you are running Antigravity
|
||||
|
||||
## Usage
|
||||
|
||||
Interact with your custom tools using natural language.
|
||||
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [MCP Toolbox for Databases documentation](https://googleapis.github.io/genai-toolbox/getting-started/introduction/).
|
||||
@@ -183,11 +183,11 @@ Protocol (OTLP). If you would like to use a collector, please refer to this
|
||||
|
||||
The following flags are used to determine Toolbox's telemetry configuration:
|
||||
|
||||
| **flag** | **type** | **description** |
|
||||
|----------------------------|----------|----------------------------------------------------------------------------------------------------------------|
|
||||
| `--telemetry-gcp` | bool | Enable exporting directly to Google Cloud Monitoring. Default is `false`. |
|
||||
| **flag** | **type** | **description** |
|
||||
|----------------------------|----------|------------------------------------------------------------------------------------------------------------------|
|
||||
| `--telemetry-gcp` | bool | Enable exporting directly to Google Cloud Monitoring. Default is `false`. |
|
||||
| `--telemetry-otlp` | string | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. "<http://127.0.0.1:4318>"). |
|
||||
| `--telemetry-service-name` | string | Sets the value of the `service.name` resource attribute. Default is `toolbox`. |
|
||||
| `--telemetry-service-name` | string | Sets the value of the `service.name` resource attribute. Default is `toolbox`. |
|
||||
|
||||
In addition to the flags noted above, you can also make additional configuration
|
||||
for OpenTelemetry via the [General SDK Configuration][sdk-configuration] through
|
||||
|
||||
@@ -234,7 +234,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.19.1\" # x-release-please-version\n",
|
||||
"version = \"0.21.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -96,3 +96,22 @@ all_tools = client.load_toolset()
|
||||
# This will only load the tools listed in 'my_second_toolset'
|
||||
my_second_toolset = client.load_toolset("my_second_toolset")
|
||||
```
|
||||
|
||||
### Prompts
|
||||
|
||||
The `prompts` section of your `tools.yaml` defines the templates containing
|
||||
structured messages and instructions for interacting with language models.
|
||||
|
||||
```yaml
|
||||
prompts:
|
||||
code_review:
|
||||
description: "Asks the LLM to analyze code quality and suggest improvements."
|
||||
messages:
|
||||
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
||||
arguments:
|
||||
- name: "code"
|
||||
description: "The code to review"
|
||||
```
|
||||
|
||||
For more details on configuring different types of prompts, see the
|
||||
[Prompts](../resources/prompts/).
|
||||
|
||||
@@ -84,38 +84,46 @@ following instructions for your OS and CPU architecture.
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Linux (AMD64)" lang="en" %}}
|
||||
To install Toolbox as a binary on Linux (AMD64):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.19.1
|
||||
export VERSION=0.21.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="macOS (Apple Silicon)" lang="en" %}}
|
||||
To install Toolbox as a binary on macOS (Apple Silicon):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.19.1
|
||||
export VERSION=0.21.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="macOS (Intel)" lang="en" %}}
|
||||
To install Toolbox as a binary on macOS (Intel):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.19.1
|
||||
export VERSION=0.21.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Windows (AMD64)" lang="en" %}}
|
||||
To install Toolbox as a binary on Windows (AMD64):
|
||||
|
||||
```powershell
|
||||
# see releases page for other versions
|
||||
$VERSION = "0.19.1"
|
||||
Invoke-WebRequest -Uri "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe" -OutFile "toolbox.exe"
|
||||
:: see releases page for other versions
|
||||
set VERSION=0.21.0
|
||||
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
{{% /tab %}}
|
||||
@@ -124,7 +132,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.19.1
|
||||
export VERSION=0.21.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -143,7 +151,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.19.1
|
||||
go install github.com/googleapis/genai-toolbox@v0.21.0
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
@@ -13,7 +13,7 @@ description: >
|
||||
|
||||
This guide assumes you have already done the following:
|
||||
|
||||
1. Installed [Python 3.9+][install-python] (including [pip][install-pip] and
|
||||
1. Installed [Python 3.10+][install-python] (including [pip][install-pip] and
|
||||
your preferred virtual environment tool for managing dependencies e.g.
|
||||
[venv][install-venv]).
|
||||
1. Installed [PostgreSQL 16+ and the `psql` client][install-postgres].
|
||||
@@ -25,12 +25,15 @@ This guide assumes you have already done the following:
|
||||
[install-postgres]: https://www.postgresql.org/download/
|
||||
|
||||
### Cloud Setup (Optional)
|
||||
|
||||
{{< regionInclude "quickstart/shared/cloud_setup.md" "cloud_setup" >}}
|
||||
|
||||
## Step 1: Set up your database
|
||||
|
||||
{{< regionInclude "quickstart/shared/database_setup.md" "database_setup" >}}
|
||||
|
||||
## Step 2: Install and configure Toolbox
|
||||
|
||||
{{< regionInclude "quickstart/shared/configure_toolbox.md" "configure_toolbox" >}}
|
||||
|
||||
## Step 3: Connect your agent to Toolbox
|
||||
|
||||
@@ -59,7 +59,7 @@ npm install genkit @genkit-ai/googleai
|
||||
npm install llamaindex @llamaindex/google @llamaindex/workflow
|
||||
{{< /tab >}}
|
||||
{{< tab header="GoogleGenAI" lang="bash" >}}
|
||||
npm install @google/genai
|
||||
npm install @google/genai
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
|
||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -4,8 +4,8 @@ go 1.24.4
|
||||
|
||||
require (
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.4.0
|
||||
google.golang.org/adk v0.0.0-20251105212711-ccd61aa4a1b6
|
||||
google.golang.org/genai v1.34.0
|
||||
google.golang.org/adk v0.1.0
|
||||
google.golang.org/genai v1.35.0
|
||||
)
|
||||
|
||||
require (
|
||||
|
||||
@@ -104,12 +104,12 @@ golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
|
||||
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
|
||||
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
|
||||
google.golang.org/adk v0.0.0-20251105212711-ccd61aa4a1b6 h1:LiCwUK/a39m3ZJYOBfJX0WAZLaHZjgU0DsJJsekPxLU=
|
||||
google.golang.org/adk v0.0.0-20251105212711-ccd61aa4a1b6/go.mod h1:NvtSLoNx7UzZIiUAI1KoJQLMmt9sG3oCgiCx1TLqKFw=
|
||||
google.golang.org/adk v0.1.0 h1:+w/fHuqRVolotOATlujRA+2DKUuDrFH2poRdEX2QjB8=
|
||||
google.golang.org/adk v0.1.0/go.mod h1:NvtSLoNx7UzZIiUAI1KoJQLMmt9sG3oCgiCx1TLqKFw=
|
||||
google.golang.org/api v0.255.0 h1:OaF+IbRwOottVCYV2wZan7KUq7UeNUQn1BcPc4K7lE4=
|
||||
google.golang.org/api v0.255.0/go.mod h1:d1/EtvCLdtiWEV4rAEHDHGh2bCnqsWhw+M8y2ECN4a8=
|
||||
google.golang.org/genai v1.34.0 h1:lPRJRO+HqRX1SwFo1Xb/22nZ5MBEPUbXDl61OoDxlbY=
|
||||
google.golang.org/genai v1.34.0/go.mod h1:7pAilaICJlQBonjKKJNhftDFv3SREhZcTe9F6nRcjbg=
|
||||
google.golang.org/genai v1.35.0 h1:Jo6g25CzVqFzGrX5mhWyBgQqXAUzxcx5jeK7U74zv9c=
|
||||
google.golang.org/genai v1.35.0/go.mod h1:A3kkl0nyBjyFlNjgxIwKq70julKbIxpSxqKO5gw/gmk=
|
||||
google.golang.org/genproto v0.0.0-20251014184007-4626949a642f h1:vLd1CJuJOUgV6qijD7KT5Y2ZtC97ll4dxjTUappMnbo=
|
||||
google.golang.org/genproto v0.0.0-20251014184007-4626949a642f/go.mod h1:PI3KrSadr00yqfv6UDvgZGFsmLqeRIwt8x4p5Oo7CdM=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20251014184007-4626949a642f h1:OiFuztEyBivVKDvguQJYWq1yDcfAHIID/FVrPR4oiI0=
|
||||
|
||||
@@ -4,7 +4,7 @@ go 1.24.6
|
||||
|
||||
require (
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.4.0
|
||||
google.golang.org/genai v1.34.0
|
||||
google.golang.org/genai v1.35.0
|
||||
)
|
||||
|
||||
require (
|
||||
|
||||
@@ -102,8 +102,8 @@ gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
|
||||
google.golang.org/api v0.255.0 h1:OaF+IbRwOottVCYV2wZan7KUq7UeNUQn1BcPc4K7lE4=
|
||||
google.golang.org/api v0.255.0/go.mod h1:d1/EtvCLdtiWEV4rAEHDHGh2bCnqsWhw+M8y2ECN4a8=
|
||||
google.golang.org/genai v1.34.0 h1:lPRJRO+HqRX1SwFo1Xb/22nZ5MBEPUbXDl61OoDxlbY=
|
||||
google.golang.org/genai v1.34.0/go.mod h1:7pAilaICJlQBonjKKJNhftDFv3SREhZcTe9F6nRcjbg=
|
||||
google.golang.org/genai v1.35.0 h1:Jo6g25CzVqFzGrX5mhWyBgQqXAUzxcx5jeK7U74zv9c=
|
||||
google.golang.org/genai v1.35.0/go.mod h1:A3kkl0nyBjyFlNjgxIwKq70julKbIxpSxqKO5gw/gmk=
|
||||
google.golang.org/genproto v0.0.0-20251014184007-4626949a642f h1:vLd1CJuJOUgV6qijD7KT5Y2ZtC97ll4dxjTUappMnbo=
|
||||
google.golang.org/genproto v0.0.0-20251014184007-4626949a642f/go.mod h1:PI3KrSadr00yqfv6UDvgZGFsmLqeRIwt8x4p5Oo7CdM=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20251014184007-4626949a642f h1:OiFuztEyBivVKDvguQJYWq1yDcfAHIID/FVrPR4oiI0=
|
||||
|
||||
@@ -3,7 +3,7 @@ module genkit-quickstart
|
||||
go 1.24.6
|
||||
|
||||
require (
|
||||
github.com/firebase/genkit/go v1.1.0
|
||||
github.com/firebase/genkit/go v1.2.0
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.4.0
|
||||
)
|
||||
|
||||
|
||||
@@ -40,8 +40,8 @@ github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfU
|
||||
github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU=
|
||||
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
||||
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||
github.com/firebase/genkit/go v1.1.0 h1:SQqzQt19gEubvUUCFV98TARFAzD30zT3QhseF3oTKqo=
|
||||
github.com/firebase/genkit/go v1.1.0/go.mod h1:ru1cIuxG1s3HeUjhnadVveDJ1yhinj+j+uUh0f0pyxE=
|
||||
github.com/firebase/genkit/go v1.2.0 h1:C31p32vdMZhhSSQQvXouH/kkcleTH4jlgFmpqlJtBS4=
|
||||
github.com/firebase/genkit/go v1.2.0/go.mod h1:ru1cIuxG1s3HeUjhnadVveDJ1yhinj+j+uUh0f0pyxE=
|
||||
github.com/go-jose/go-jose/v4 v4.1.3 h1:CVLmWDhDVRa6Mi/IgCgaopNosCaHz7zrMeF9MlZRkrs=
|
||||
github.com/go-jose/go-jose/v4 v4.1.3/go.mod h1:x4oUasVrzR7071A4TnHLGSPpNOm2a21K9Kf04k1rs08=
|
||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
|
||||
@@ -4,7 +4,7 @@ go 1.24.6
|
||||
|
||||
require (
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.4.0
|
||||
github.com/openai/openai-go v1.12.0
|
||||
github.com/openai/openai-go/v3 v3.8.1
|
||||
)
|
||||
|
||||
require (
|
||||
@@ -17,7 +17,7 @@ require (
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.15.0 // indirect
|
||||
github.com/tidwall/gjson v1.14.4 // indirect
|
||||
github.com/tidwall/gjson v1.18.0 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.1 // indirect
|
||||
github.com/tidwall/sjson v1.2.5 // indirect
|
||||
|
||||
@@ -56,8 +56,8 @@ github.com/googleapis/gax-go/v2 v2.15.0 h1:SyjDc1mGgZU5LncH8gimWo9lW1DtIfPibOG81
|
||||
github.com/googleapis/gax-go/v2 v2.15.0/go.mod h1:zVVkkxAQHa1RQpg9z2AUCMnKhi0Qld9rcmyfL1OZhoc=
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.4.0 h1:SYJRZzmOyXs9anKp+dfq4rprO92KKnxNyJCCsLkW7nw=
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.4.0/go.mod h1:ivRLILO1B72J2HWCjSStemdhTWccjfW9FnEj4SAM81w=
|
||||
github.com/openai/openai-go v1.12.0 h1:NBQCnXzqOTv5wsgNC36PrFEiskGfO5wccfCWDo9S1U0=
|
||||
github.com/openai/openai-go v1.12.0/go.mod h1:g461MYGXEXBVdV5SaR/5tNzNbSfwTBBefwc+LlDCK0Y=
|
||||
github.com/openai/openai-go/v3 v3.8.1 h1:b+YWsmwqXnbpSHWQEntZAkKciBZ5CJXwL68j+l59UDg=
|
||||
github.com/openai/openai-go/v3 v3.8.1/go.mod h1:UOpNxkqC9OdNXNUfpNByKOtB4jAL0EssQXq5p8gO0Xs=
|
||||
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo=
|
||||
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
||||
@@ -67,8 +67,8 @@ github.com/spiffe/go-spiffe/v2 v2.6.0/go.mod h1:gm2SeUoMZEtpnzPNs2Csc0D/gX33k1xI
|
||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/gjson v1.14.4 h1:uo0p8EbA09J7RQaflQ1aBRffTR7xedD2bcIVSYxLnkM=
|
||||
github.com/tidwall/gjson v1.14.4/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
|
||||
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
||||
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
|
||||
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||
|
||||
@@ -7,29 +7,31 @@ import (
|
||||
"log"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
openai "github.com/openai/openai-go"
|
||||
openai "github.com/openai/openai-go/v3"
|
||||
)
|
||||
|
||||
// ConvertToOpenAITool converts a ToolboxTool into the go-openai library's Tool format.
|
||||
func ConvertToOpenAITool(toolboxTool *core.ToolboxTool) openai.ChatCompletionToolParam {
|
||||
func ConvertToOpenAITool(toolboxTool *core.ToolboxTool) openai.ChatCompletionToolUnionParam {
|
||||
// Get the input schema
|
||||
jsonSchemaBytes, err := toolboxTool.InputSchema()
|
||||
if err != nil {
|
||||
return openai.ChatCompletionToolParam{}
|
||||
return openai.ChatCompletionToolUnionParam{}
|
||||
}
|
||||
|
||||
// Unmarshal the JSON bytes into FunctionParameters
|
||||
var paramsSchema openai.FunctionParameters
|
||||
if err := json.Unmarshal(jsonSchemaBytes, ¶msSchema); err != nil {
|
||||
return openai.ChatCompletionToolParam{}
|
||||
return openai.ChatCompletionToolUnionParam{}
|
||||
}
|
||||
|
||||
// Create and return the final tool parameter struct.
|
||||
return openai.ChatCompletionToolParam{
|
||||
Function: openai.FunctionDefinitionParam{
|
||||
Name: toolboxTool.Name(),
|
||||
Description: openai.String(toolboxTool.Description()),
|
||||
Parameters: paramsSchema,
|
||||
return openai.ChatCompletionToolUnionParam{
|
||||
OfFunction: &openai.ChatCompletionFunctionToolParam{
|
||||
Function: openai.FunctionDefinitionParam{
|
||||
Name: toolboxTool.Name(),
|
||||
Description: openai.String(toolboxTool.Description()),
|
||||
Parameters: paramsSchema,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -69,7 +71,7 @@ func main() {
|
||||
log.Fatalf("Failed to load tool : %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
openAITools := make([]openai.ChatCompletionToolParam, len(tools))
|
||||
openAITools := make([]openai.ChatCompletionToolUnionParam, len(tools))
|
||||
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
|
||||
|
||||
for i, tool := range tools {
|
||||
|
||||
@@ -669,9 +669,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/glob": {
|
||||
"version": "10.4.5",
|
||||
"resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz",
|
||||
"integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==",
|
||||
"version": "10.5.0",
|
||||
"resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz",
|
||||
"integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"foreground-child": "^3.1.0",
|
||||
"jackspeak": "^3.1.2",
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
google-adk==1.18.0
|
||||
toolbox-core==0.5.2
|
||||
pytest==8.4.2
|
||||
google-adk==1.19.0
|
||||
toolbox-core==0.5.3
|
||||
pytest==9.0.1
|
||||
@@ -1,3 +1,3 @@
|
||||
google-genai==1.47.0
|
||||
toolbox-core==0.5.2
|
||||
pytest==8.4.2
|
||||
google-genai==1.52.0
|
||||
toolbox-core==0.5.3
|
||||
pytest==9.0.1
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
langchain==0.3.27
|
||||
langchain-google-vertexai==2.1.2
|
||||
langgraph==1.0.1
|
||||
toolbox-langchain==0.5.2
|
||||
pytest==8.4.2
|
||||
langchain==1.0.8
|
||||
langchain-google-vertexai==3.0.3
|
||||
langgraph==1.0.3
|
||||
toolbox-langchain==0.5.3
|
||||
pytest==9.0.1
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
llama-index==0.14.6
|
||||
llama-index-llms-google-genai==0.7.1
|
||||
toolbox-llamaindex==0.5.2
|
||||
pytest==8.4.2
|
||||
llama-index==0.14.8
|
||||
llama-index-llms-google-genai==0.7.3
|
||||
toolbox-llamaindex==0.5.3
|
||||
pytest==9.0.1
|
||||
|
||||
@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -116,7 +116,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
```
|
||||
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the
|
||||
`--disable-reload` flag.
|
||||
Toolbox enables dynamic reloading by default. To disable, use the
|
||||
`--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
<!-- [END configure_toolbox] -->
|
||||
@@ -8,7 +8,7 @@ description: >
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://cloud.google.com/alloydb/docs/quickstart/create-and-connect"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/alloydb/docs/quickstart/create-and-connect"/>
|
||||
<link rel="canonical" href="https://cloud.google.com/alloydb/docs/connect-ide-using-mcp-toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/alloydb/docs/connect-ide-using-mcp-toolbox"/>
|
||||
</head>
|
||||
</html>
|
||||
|
||||
@@ -254,6 +254,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
@@ -278,6 +279,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
@@ -287,6 +289,7 @@ Your AI tool is now connected to Cloud SQL for SQL Server using MCP.
|
||||
|
||||
The `cloud-sql-mssql-admin` server provides tools for managing your Cloud SQL
|
||||
instances and interacting with your database:
|
||||
|
||||
* **create_instance**: Creates a new Cloud SQL for SQL Server instance.
|
||||
* **get_instance**: Gets information about a Cloud SQL instance.
|
||||
* **list_instances**: Lists Cloud SQL instances in a project.
|
||||
|
||||
@@ -254,6 +254,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
@@ -278,6 +279,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
@@ -287,6 +289,7 @@ Your AI tool is now connected to Cloud SQL for MySQL using MCP.
|
||||
|
||||
The `cloud-sql-mysql-admin` server provides tools for managing your Cloud SQL
|
||||
instances and interacting with your database:
|
||||
|
||||
* **create_instance**: Creates a new Cloud SQL for MySQL instance.
|
||||
* **get_instance**: Gets information about a Cloud SQL instance.
|
||||
* **list_instances**: Lists Cloud SQL instances in a project.
|
||||
|
||||
@@ -254,6 +254,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
@@ -278,6 +279,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
@@ -287,6 +289,7 @@ Your AI tool is now connected to Cloud SQL for PostgreSQL using MCP.
|
||||
|
||||
The `cloud-sql-postgres-admin` server provides tools for managing your Cloud SQL
|
||||
instances and interacting with your database:
|
||||
|
||||
* **create_instance**: Creates a new Cloud SQL for PostgreSQL instance.
|
||||
* **get_instance**: Gets information about a Cloud SQL instance.
|
||||
* **list_instances**: Lists Cloud SQL instances in a project.
|
||||
|
||||
@@ -46,21 +46,22 @@ to expose your developer assistant tools to a Looker instance:
|
||||
v0.10.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
@@ -82,7 +83,8 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Gemini-CLI" lang="en" %}}
|
||||
|
||||
1. Install [Gemini-CLI](https://github.com/google-gemini/gemini-cli#install-globally-with-npm).
|
||||
1. Install
|
||||
[Gemini-CLI](https://github.com/google-gemini/gemini-cli#install-globally-with-npm).
|
||||
1. Create a directory `.gemini` in your home directory if it doesn't exist.
|
||||
1. Create the file `.gemini/settings.json` if it doesn't exist.
|
||||
1. Add the following configuration, or add the mcpServers stanza if you already
|
||||
@@ -287,7 +289,8 @@ Your AI tool is now connected to Looker using MCP. Try asking your AI
|
||||
assistant to list models, explores, dimensions, and measures. Run a
|
||||
query, retrieve the SQL for a query, and run a saved Look.
|
||||
|
||||
The full tool list is available in the [Prebuilt Tools Reference](../../reference/prebuilt-tools/#looker).
|
||||
The full tool list is available in the [Prebuilt Tools
|
||||
Reference](../../reference/prebuilt-tools/#looker).
|
||||
|
||||
The following tools are available to the LLM:
|
||||
|
||||
@@ -314,8 +317,10 @@ instance and create new saved content.
|
||||
1. **get_looks**: Return the saved Looks that match a title or description
|
||||
1. **run_look**: Run a saved Look and return the data
|
||||
1. **make_look**: Create a saved Look in Looker and return the URL
|
||||
1. **get_dashboards**: Return the saved dashboards that match a title or description
|
||||
1. **run_dashbaord**: Run the queries associated with a dashboard and return the data
|
||||
1. **get_dashboards**: Return the saved dashboards that match a title or
|
||||
description
|
||||
1. **run_dashboard**: Run the queries associated with a dashboard and return the
|
||||
data
|
||||
1. **make_dashboard**: Create a saved dashboard in Looker and return the URL
|
||||
1. **add_dashboard_element**: Add a tile to a dashboard
|
||||
|
||||
@@ -344,7 +349,8 @@ as well as get the database schema needed to write LookML effectively.
|
||||
1. **get_connection_schemas**: Get the list of schemas for a connection
|
||||
1. **get_connection_databases**: Get the list of databases for a connection
|
||||
1. **get_connection_tables**: Get the list of tables for a connection
|
||||
1. **get_connection_table_columns**: Get the list of columns for a table in a connection
|
||||
1. **get_connection_table_columns**: Get the list of columns for a table in a
|
||||
connection
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
|
||||
@@ -45,19 +45,19 @@ instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
@@ -217,6 +217,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
@@ -243,6 +244,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini CLI" lang="en" %}}
|
||||
|
||||
@@ -270,6 +272,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
|
||||
@@ -299,6 +302,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
|
||||
@@ -43,19 +43,19 @@ expose your developer assistant tools to a MySQL instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
@@ -215,6 +215,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
@@ -241,6 +242,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini CLI" lang="en" %}}
|
||||
|
||||
@@ -268,6 +270,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
|
||||
@@ -297,6 +300,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
|
||||
@@ -44,19 +44,19 @@ expose your developer assistant tools to a Neo4j instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
@@ -79,10 +79,10 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
{{% tab header="Claude code" lang="en" %}}
|
||||
|
||||
1. Install [Claude
|
||||
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
1. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
@@ -108,7 +108,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
|
||||
1. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
@@ -129,15 +129,15 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
|
||||
1. Restart Claude desktop.
|
||||
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
|
||||
new MCP server available.
|
||||
new MCP server available.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cline" lang="en" %}}
|
||||
|
||||
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and
|
||||
tap the **MCP Servers** icon.
|
||||
tap the **MCP Servers** icon.
|
||||
1. Tap Configure MCP Servers to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
@@ -156,13 +156,15 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
}
|
||||
```
|
||||
|
||||
1. You should see a green active status after the server is successfully connected.
|
||||
1. You should see a green active status after the server is successfully
|
||||
connected.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cursor" lang="en" %}}
|
||||
|
||||
1. Create a `.cursor` directory in your project root if it doesn't exist.
|
||||
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
@@ -211,6 +213,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
@@ -236,6 +239,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini CLI" lang="en" %}}
|
||||
|
||||
@@ -262,6 +266,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
|
||||
@@ -290,6 +295,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
|
||||
@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
@@ -287,6 +287,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
@@ -313,6 +314,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
|
||||
@@ -43,19 +43,19 @@ to expose your developer assistant tools to a SQLite instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
@@ -195,6 +195,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
@@ -217,6 +218,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini CLI" lang="en" %}}
|
||||
|
||||
@@ -240,6 +242,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
|
||||
@@ -265,6 +268,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.19.1/windows/amd64/toolb
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
|
||||
@@ -7,12 +7,20 @@ description: "Connect to Toolbox via Gemini CLI Extensions."
|
||||
|
||||
## Gemini CLI Extensions
|
||||
|
||||
[Gemini CLI][gemini-cli] is an open-source AI agent designed to assist with development workflows by assisting with coding, debugging, data exploration, and content creation. Its mission is to provide an agentic interface for interacting with database and analytics services and popular open-source databases.
|
||||
[Gemini CLI][gemini-cli] is an open-source AI agent designed to assist with
|
||||
development workflows by assisting with coding, debugging, data exploration, and
|
||||
content creation. Its mission is to provide an agentic interface for interacting
|
||||
with database and analytics services and popular open-source databases.
|
||||
|
||||
### How extensions work
|
||||
Gemini CLI is highly extensible, allowing for the addition of new tools and capabilities through extensions. You can load the extensions from a GitHub URL, a local directory, or a configurable registry. They provide new tools, slash commands, and prompts to assist with your workflow.
|
||||
|
||||
Use the Gemini CLI Extensions to load prebuilt or custom tools to interact with your databases.
|
||||
Gemini CLI is highly extensible, allowing for the addition of new tools and
|
||||
capabilities through extensions. You can load the extensions from a GitHub URL,
|
||||
a local directory, or a configurable registry. They provide new tools, slash
|
||||
commands, and prompts to assist with your workflow.
|
||||
|
||||
Use the Gemini CLI Extensions to load prebuilt or custom tools to interact with
|
||||
your databases.
|
||||
|
||||
[gemini-cli]: https://google-gemini.github.io/gemini-cli/
|
||||
|
||||
@@ -35,4 +43,4 @@ Below are a list of Gemini CLI Extensions powered by MCP Toolbox:
|
||||
* [mysql](https://github.com/gemini-cli-extensions/mysql)
|
||||
* [postgres](https://github.com/gemini-cli-extensions/postgres)
|
||||
* [spanner](https://github.com/gemini-cli-extensions/spanner)
|
||||
* [sql-server](https://github.com/gemini-cli-extensions/sql-server)
|
||||
* [sql-server](https://github.com/gemini-cli-extensions/sql-server)
|
||||
|
||||
@@ -169,10 +169,10 @@ testing and debugging Toolbox server.
|
||||
|
||||
### Tested Clients
|
||||
|
||||
| Client | SSE Works | MCP Config Docs |
|
||||
|--------|--------|--------|
|
||||
| Claude Desktop | ✅ | <https://modelcontextprotocol.io/quickstart/user#1-download-claude-for-desktop> |
|
||||
| MCP Inspector | ✅ | <https://github.com/modelcontextprotocol/inspector> |
|
||||
| Cursor | ✅ | <https://docs.cursor.com/context/model-context-protocol> |
|
||||
| Windsurf | ✅ | <https://docs.windsurf.com/windsurf/mcp> |
|
||||
| VS Code (Insiders) | ✅ | <https://code.visualstudio.com/docs/copilot/chat/mcp-servers> |
|
||||
| Client | SSE Works | MCP Config Docs |
|
||||
|--------------------|------------|---------------------------------------------------------------------------------|
|
||||
| Claude Desktop | ✅ | <https://modelcontextprotocol.io/quickstart/user#1-download-claude-for-desktop> |
|
||||
| MCP Inspector | ✅ | <https://github.com/modelcontextprotocol/inspector> |
|
||||
| Cursor | ✅ | <https://docs.cursor.com/context/model-context-protocol> |
|
||||
| Windsurf | ✅ | <https://docs.windsurf.com/windsurf/mcp> |
|
||||
| VS Code (Insiders) | ✅ | <https://code.visualstudio.com/docs/copilot/chat/mcp-servers> |
|
||||
|
||||
@@ -164,7 +164,8 @@ You can connect to Toolbox Cloud Run instances directly through the SDK.
|
||||
{{< tab header="Python" lang="python" >}}
|
||||
from toolbox_core import ToolboxClient, auth_methods
|
||||
|
||||
# Replace with the Cloud Run service URL generated in the previous step.
|
||||
# Replace with the Cloud Run service URL generated in the previous step
|
||||
|
||||
URL = "https://cloud-run-url.app"
|
||||
|
||||
auth_token_provider = auth_methods.aget_google_id_token(URL) # can also use sync method
|
||||
@@ -204,7 +205,6 @@ func main() {
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
|
||||
Now, you can use this client to connect to the deployed Cloud Run instance!
|
||||
|
||||
## Troubleshooting
|
||||
@@ -215,21 +215,21 @@ for your service in the Google Cloud Console's Cloud Run section. They often
|
||||
contain the specific error message needed to diagnose the problem.
|
||||
{{< /notice >}}
|
||||
|
||||
* **Deployment Fails with "Container failed to start":** This is almost always
|
||||
- **Deployment Fails with "Container failed to start":** This is almost always
|
||||
caused by a port mismatch. Ensure your container's `--port` argument is set to
|
||||
`8080` to match the `$PORT` environment variable provided by Cloud Run.
|
||||
|
||||
* **Client Receives Permission Denied Error (401 or 403):** If your client
|
||||
- **Client Receives Permission Denied Error (401 or 403):** If your client
|
||||
application (e.g., your local SDK) gets a `401 Unauthorized` or `403
|
||||
Forbidden` error when trying to call your Cloud Run service, it means the
|
||||
client is not properly authenticated as an invoker.
|
||||
* Ensure the user or service account calling the service has the **Cloud Run
|
||||
- Ensure the user or service account calling the service has the **Cloud Run
|
||||
Invoker** (`roles/run.invoker`) IAM role.
|
||||
* If running locally, make sure your Application Default Credentials are set
|
||||
- If running locally, make sure your Application Default Credentials are set
|
||||
up correctly by running `gcloud auth application-default login`.
|
||||
|
||||
* **Service Fails to Access Secrets (in logs):** If your application starts but
|
||||
- **Service Fails to Access Secrets (in logs):** If your application starts but
|
||||
the logs show errors like "permission denied" when trying to access Secret
|
||||
Manager, it means the Toolbox service account is missing permissions.
|
||||
* Ensure the `toolbox-identity` service account has the **Secret Manager
|
||||
- Ensure the `toolbox-identity` service account has the **Secret Manager
|
||||
Secret Accessor** (`roles/secretmanager.secretAccessor`) IAM role.
|
||||
|
||||
@@ -69,7 +69,7 @@ response field (e.g. empty string).
|
||||
|
||||
To edit headers, press the "Edit Headers" button to display the header modal.
|
||||
Within this modal, users can make direct edits by typing into the header's text
|
||||
area.
|
||||
area.
|
||||
|
||||
Toolbox UI validates that the headers are in correct JSON format. Other
|
||||
header-related errors (e.g., incorrect header names or values required by the
|
||||
|
||||
@@ -32,10 +32,12 @@ description: >
|
||||
### Transport Configuration
|
||||
|
||||
**Server Settings:**
|
||||
|
||||
- `--address`, `-a`: Server listening address (default: "127.0.0.1")
|
||||
- `--port`, `-p`: Server listening port (default: 5000)
|
||||
|
||||
**STDIO:**
|
||||
|
||||
- `--stdio`: Run in MCP STDIO mode instead of HTTP server
|
||||
|
||||
#### Usage Examples
|
||||
@@ -50,15 +52,19 @@ description: >
|
||||
The CLI supports multiple mutually exclusive ways to specify tool configurations:
|
||||
|
||||
**Single File:** (default)
|
||||
|
||||
- `--tools-file`: Path to a single YAML configuration file (default: `tools.yaml`)
|
||||
|
||||
**Multiple Files:**
|
||||
|
||||
- `--tools-files`: Comma-separated list of YAML files to merge
|
||||
|
||||
**Directory:**
|
||||
|
||||
- `--tools-folder`: Directory containing YAML files to load and merge
|
||||
|
||||
**Prebuilt Configurations:**
|
||||
|
||||
- `--prebuilt`: Use predefined configurations for specific database types (e.g.,
|
||||
'bigquery', 'postgres', 'spanner'). See [Prebuilt Tools
|
||||
Reference](prebuilt-tools.md) for allowed values.
|
||||
@@ -79,4 +85,4 @@ Toolbox enables dynamic reloading by default. To disable, use the
|
||||
|
||||
To launch Toolbox's interactive UI, use the `--ui` flag. This allows you to test
|
||||
tools and toolsets with features such as authorized parameters. To learn more,
|
||||
visit [Toolbox UI](../how-to/toolbox-ui/index.md).
|
||||
visit [Toolbox UI](../how-to/toolbox-ui/index.md).
|
||||
|
||||
@@ -46,6 +46,10 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `list_views`: Lists views in the database from pg_views with a default
|
||||
limit of 50 rows. Returns schemaname, viewname and the ownername.
|
||||
* `list_schemas`: Lists schemas in the database.
|
||||
* `database_overview`: Fetches the current state of the PostgreSQL server.
|
||||
* `list_triggers`: Lists triggers in the database.
|
||||
* `list_indexes`: List available user indexes in a PostgreSQL database.
|
||||
* `list_sequences`: List sequences in a PostgreSQL database.
|
||||
|
||||
## AlloyDB Postgres Admin
|
||||
|
||||
@@ -216,6 +220,10 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `list_views`: Lists views in the database from pg_views with a default
|
||||
limit of 50 rows. Returns schemaname, viewname and the ownername.
|
||||
* `list_schemas`: Lists schemas in the database.
|
||||
* `database_overview`: Fetches the current state of the PostgreSQL server.
|
||||
* `list_triggers`: Lists triggers in the database.
|
||||
* `list_indexes`: List available user indexes in a PostgreSQL database.
|
||||
* `list_sequences`: List sequences in a PostgreSQL database.
|
||||
|
||||
## Cloud SQL for PostgreSQL Observability
|
||||
|
||||
@@ -489,8 +497,8 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
|
||||
* `--prebuilt` value: `postgres`
|
||||
* **Environment Variables:**
|
||||
* `POSTGRES_HOST`: The hostname or IP address of the PostgreSQL server.
|
||||
* `POSTGRES_PORT`: The port number for the PostgreSQL server.
|
||||
* `POSTGRES_HOST`: (Optional) The hostname or IP address of the PostgreSQL server.
|
||||
* `POSTGRES_PORT`: (Optional) The port number for the PostgreSQL server.
|
||||
* `POSTGRES_DATABASE`: The name of the database to connect to.
|
||||
* `POSTGRES_USER`: The database username.
|
||||
* `POSTGRES_PASSWORD`: The password for the database user.
|
||||
@@ -513,6 +521,10 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `list_views`: Lists views in the database from pg_views with a default
|
||||
limit of 50 rows. Returns schemaname, viewname and the ownername.
|
||||
* `list_schemas`: Lists schemas in the database.
|
||||
* `database_overview`: Fetches the current state of the PostgreSQL server.
|
||||
* `list_triggers`: Lists triggers in the database.
|
||||
* `list_indexes`: List available user indexes in a PostgreSQL database.
|
||||
* `list_sequences`: List sequences in a PostgreSQL database.
|
||||
|
||||
## Google Cloud Serverless for Apache Spark
|
||||
|
||||
|
||||
81
docs/en/resources/prompts/_index.md
Normal file
81
docs/en/resources/prompts/_index.md
Normal file
@@ -0,0 +1,81 @@
|
||||
---
|
||||
title: "Prompts"
|
||||
type: docs
|
||||
weight: 3
|
||||
description: >
|
||||
Prompts allow servers to provide structured messages and instructions for interacting with language models.
|
||||
---
|
||||
|
||||
A `prompt` represents a reusable prompt template that can be retrieved and used
|
||||
by MCP clients.
|
||||
|
||||
A Prompt is essentially a template for a message or a series of messages that
|
||||
can be sent to a Large Language Model (LLM). The Toolbox server implements the
|
||||
`prompts/list` and `prompts/get` methods from the [Model Context Protocol
|
||||
(MCP)](https://modelcontextprotocol.io/docs/getting-started/intro)
|
||||
specification, allowing clients to discover and retrieve these prompts.
|
||||
|
||||
```yaml
|
||||
prompts:
|
||||
code_review:
|
||||
description: "Asks the LLM to analyze code quality and suggest improvements."
|
||||
messages:
|
||||
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
||||
arguments:
|
||||
- name: "code"
|
||||
description: "The code to review"
|
||||
```
|
||||
|
||||
## Prompt Schema
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|--------------------------------|--------------|--------------------------------------------------------------------------|
|
||||
| description | string | No | A brief explanation of what the prompt does. |
|
||||
| kind | string | No | The kind of prompt. Defaults to `"custom"`. |
|
||||
| messages | [][Message](#message-schema) | Yes | A list of one or more message objects that make up the prompt's content. |
|
||||
| arguments | [][Argument](#argument-schema) | No | A list of arguments that can be interpolated into the prompt's content. |
|
||||
|
||||
## Message Schema
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|----------|--------------|--------------------------------------------------------------------------------------------------------|
|
||||
| role | string | No | The role of the sender. Can be `"user"` or `"assistant"`. Defaults to `"user"`. |
|
||||
| content | string | Yes | The text of the message. You can include placeholders for arguments using `{{.argument_name}}` syntax. |
|
||||
|
||||
## Argument Schema
|
||||
|
||||
An argument can be any [Parameter](../tools/_index.md#specifying-parameters)
|
||||
type. If the `type` field is not specified, it will default to `string`.
|
||||
|
||||
## Usage with Gemini CLI
|
||||
|
||||
Prompts defined in your `tools.yaml` can be seamlessly integrated with the
|
||||
Gemini CLI to create [custom slash
|
||||
commands](https://github.com/google-gemini/gemini-cli/blob/main/docs/tools/mcp-server.md#mcp-prompts-as-slash-commands).
|
||||
The workflow is as follows:
|
||||
|
||||
1. **Discovery:** When the Gemini CLI connects to your Toolbox server, it
|
||||
automatically calls `prompts/list` to discover all available prompts.
|
||||
|
||||
2. **Conversion:** Each discovered prompt is converted into a corresponding
|
||||
slash command. For example, a prompt named `code_review` becomes the
|
||||
`/code_review` command in the CLI.
|
||||
|
||||
3. **Execution:** You can execute the command as follows:
|
||||
|
||||
```bash
|
||||
/code_review --code="def hello():\n print('world')"
|
||||
```
|
||||
|
||||
4. **Interpolation:** Once all arguments are collected, the CLI calls prompts/get
|
||||
with your provided values to retrieve the final, interpolated prompt.
|
||||
Eg.
|
||||
|
||||
```bash
|
||||
Please review the following code for quality, correctness, and potential improvements: \ndef hello():\n print('world')
|
||||
```
|
||||
|
||||
5. **Response:** This completed prompt is then sent to the Gemini model, and the
|
||||
model's response is displayed back to you in the CLI.
|
||||
|
||||
## Kinds of prompts
|
||||
68
docs/en/resources/prompts/custom/_index.md
Normal file
68
docs/en/resources/prompts/custom/_index.md
Normal file
@@ -0,0 +1,68 @@
|
||||
---
|
||||
title: "Custom"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Custom prompts defined by the user.
|
||||
---
|
||||
|
||||
Custom prompts are defined by the user to be exposed through their MCP server.
|
||||
They are the default type for prompts.
|
||||
|
||||
## Examples
|
||||
|
||||
### Basic Prompt
|
||||
|
||||
Here is an example of a simple prompt that takes a single argument, code, and
|
||||
asks an LLM to review it.
|
||||
|
||||
```yaml
|
||||
prompts:
|
||||
code_review:
|
||||
description: "Asks the LLM to analyze code quality and suggest improvements."
|
||||
messages:
|
||||
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
||||
arguments:
|
||||
- name: "code"
|
||||
description: "The code to review"
|
||||
```
|
||||
|
||||
### Multi-message prompt
|
||||
|
||||
You can define prompts with multiple messages to set up more complex
|
||||
conversational contexts, like a role-playing scenario.
|
||||
|
||||
```yaml
|
||||
prompts:
|
||||
roleplay_scenario:
|
||||
description: "Sets up a roleplaying scenario with initial messages."
|
||||
arguments:
|
||||
- name: "character"
|
||||
description: "The character the AI should embody."
|
||||
- name: "situation"
|
||||
description: "The initial situation for the roleplay."
|
||||
messages:
|
||||
- role: "user"
|
||||
content: "Let's roleplay. You are {{.character}}. The situation is: {{.situation}}"
|
||||
- role: "assistant"
|
||||
content: "Okay, I understand. I am ready. What happens next?"
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
### Prompt Schema
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|--------------------------------|--------------|--------------------------------------------------------------------------|
|
||||
| kind | string | No | The kind of prompt. Must be `"custom"`. |
|
||||
| description | string | No | A brief explanation of what the prompt does. |
|
||||
| messages | [][Message](#message-schema) | Yes | A list of one or more message objects that make up the prompt's content. |
|
||||
| arguments | [][Argument](#argument-schema) | No | A list of arguments that can be interpolated into the prompt's content. |
|
||||
|
||||
### Message Schema
|
||||
|
||||
Refer to the default prompt [Message Schema](../_index.md#message-schema).
|
||||
|
||||
### Argument Schema
|
||||
|
||||
Refer to the default prompt [Argument Schema](../_index.md#argument-schema).
|
||||
@@ -1,12 +1,10 @@
|
||||
---
|
||||
title: "AlloyDB Admin"
|
||||
linkTitle: "AlloyDB Admin"
|
||||
title: AlloyDB Admin
|
||||
linkTitle: AlloyDB Admin
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
The "alloydb-admin" source provides a client for the AlloyDB API.
|
||||
aliases:
|
||||
- /resources/sources/alloydb-admin
|
||||
weight: 1
|
||||
description: "The \"alloydb-admin\" source provides a client for the AlloyDB API.\n"
|
||||
aliases: [/resources/sources/alloydb-admin]
|
||||
---
|
||||
|
||||
## About
|
||||
@@ -17,6 +15,7 @@ tools to perform administrative tasks on AlloyDB resources, such as managing
|
||||
clusters, instances, and users.
|
||||
|
||||
Authentication can be handled in two ways:
|
||||
|
||||
1. **Application Default Credentials (ADC):** By default, the source uses ADC
|
||||
to authenticate with the API.
|
||||
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will
|
||||
@@ -36,7 +35,9 @@ sources:
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------|:--------:|:------------:|------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| -------------- | :------: | :----------: | ---------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "alloydb-admin". |
|
||||
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |
|
||||
| defaultProject | string | false | The Google Cloud project ID to use for AlloyDB infrastructure tools. |
|
||||
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |
|
||||
|
||||
@@ -51,6 +51,26 @@ cluster][alloydb-free-trial].
|
||||
- [`postgres-list-schemas`](../tools/postgres/postgres-list-schemas.md)
|
||||
List schemas in an AlloyDB for PostgreSQL database.
|
||||
|
||||
- [`postgres-database-overview`](../tools/postgres/postgres-database-overview.md)
|
||||
Fetches the current state of the PostgreSQL server.
|
||||
|
||||
- [`postgres-list-triggers`](../tools/postgres/postgres-list-triggers.md)
|
||||
List triggers in an AlloyDB for PostgreSQL database.
|
||||
|
||||
- [`postgres-list-indexes`](../tools/postgres/postgres-list-indexes.md)
|
||||
List available user indexes in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-sequences`](../tools/postgres/postgres-list-sequences.md)
|
||||
List sequences in a PostgreSQL database.
|
||||
- [`postgres-long-running-transactions`](../tools/postgres/postgres-long-running-transactions.md)
|
||||
List long running transactions in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-locks`](../tools/postgres/postgres-list-locks.md)
|
||||
List lock stats in a PostgreSQL database.
|
||||
|
||||
- [`postgres-replication-stats`](../tools/postgres/postgres-replication-stats.md)
|
||||
List replication stats in a PostgreSQL database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [AlloyDB using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/alloydb_pg_mcp/)
|
||||
|
||||
@@ -8,7 +8,10 @@ description: >
|
||||
|
||||
## About
|
||||
|
||||
[Apache Cassandra][cassandra-docs] is a NoSQL distributed database. By design, NoSQL databases are lightweight, open-source, non-relational, and largely distributed. Counted among their strengths are horizontal scalability, distributed architectures, and a flexible approach to schema definition.
|
||||
[Apache Cassandra][cassandra-docs] is a NoSQL distributed database. By design,
|
||||
NoSQL databases are lightweight, open-source, non-relational, and largely
|
||||
distributed. Counted among their strengths are horizontal scalability,
|
||||
distributed architectures, and a flexible approach to schema definition.
|
||||
|
||||
[cassandra-docs]: https://cassandra.apache.org/
|
||||
|
||||
@@ -17,7 +20,6 @@ description: >
|
||||
- [`cassandra-cql`](../tools/cassandra/cassandra-cql.md)
|
||||
Run parameterized CQL queries in Cassandra.
|
||||
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
@@ -43,15 +45,15 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|------------------------|:---------:|:------------:|-------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "cassandra". |
|
||||
| hosts | string[] | true | List of IP addresses to connect to (e.g., ["192.168.1.1:9042", "192.168.1.2:9042","192.168.1.3:9042"]). The default port is 9042 if not specified. |
|
||||
| keyspace | string | true | Name of the Cassandra keyspace to connect to (e.g., "my_keyspace"). |
|
||||
| protoVersion | integer | false | Protocol version for the Cassandra connection (e.g., 4). |
|
||||
| username | string | false | Name of the Cassandra user to connect as (e.g., "my-cassandra-user"). |
|
||||
| password | string | false | Password of the Cassandra user (e.g., "my-password"). |
|
||||
| caPath | string | false | Path to the CA certificate for SSL/TLS (e.g., "/path/to/ca.crt"). |
|
||||
| certPath | string | false | Path to the client certificate for SSL/TLS (e.g., "/path/to/client.crt"). |
|
||||
| keyPath | string | false | Path to the client key for SSL/TLS (e.g., "/path/to/client.key"). |
|
||||
| enableHostVerification | boolean | false | Enable host verification for SSL/TLS (e.g., true). By default, host verification is disabled. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|------------------------|:--------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "cassandra". |
|
||||
| hosts | string[] | true | List of IP addresses to connect to (e.g., ["192.168.1.1:9042", "192.168.1.2:9042","192.168.1.3:9042"]). The default port is 9042 if not specified. |
|
||||
| keyspace | string | true | Name of the Cassandra keyspace to connect to (e.g., "my_keyspace"). |
|
||||
| protoVersion | integer | false | Protocol version for the Cassandra connection (e.g., 4). |
|
||||
| username | string | false | Name of the Cassandra user to connect as (e.g., "my-cassandra-user"). |
|
||||
| password | string | false | Password of the Cassandra user (e.g., "my-password"). |
|
||||
| caPath | string | false | Path to the CA certificate for SSL/TLS (e.g., "/path/to/ca.crt"). |
|
||||
| certPath | string | false | Path to the client certificate for SSL/TLS (e.g., "/path/to/client.crt"). |
|
||||
| keyPath | string | false | Path to the client key for SSL/TLS (e.g., "/path/to/client.key"). |
|
||||
| enableHostVerification | boolean | false | Enable host verification for SSL/TLS (e.g., true). By default, host verification is disabled. |
|
||||
|
||||
@@ -21,7 +21,6 @@ description: >
|
||||
- [`clickhouse-sql`](../tools/clickhouse/clickhouse-sql.md)
|
||||
Execute SQL queries as prepared statements in ClickHouse.
|
||||
|
||||
|
||||
## Requirements
|
||||
|
||||
### Database User
|
||||
|
||||
@@ -23,9 +23,10 @@ A dataset is a container in your Google Cloud project that holds modality-specif
|
||||
healthcare data. Datasets contain other data stores, such as FHIR stores and DICOM
|
||||
stores, which in turn hold their own types of healthcare data.
|
||||
|
||||
A single dataset can contain one or many data stores, and those stores can all service
|
||||
the same modality or different modalities as application needs dictate. Using multiple
|
||||
stores in the same dataset might be appropriate in various situations.
|
||||
A single dataset can contain one or many data stores, and those stores can all
|
||||
service the same modality or different modalities as application needs dictate.
|
||||
Using multiple stores in the same dataset might be appropriate in various
|
||||
situations.
|
||||
|
||||
If you are new to the Cloud Healthcare API, you can try to
|
||||
[create and view datasets and stores using curl][healthcare-quickstart-curl].
|
||||
@@ -85,8 +86,9 @@ If you are new to the Cloud Healthcare API, you can try to
|
||||
|
||||
### IAM Permissions
|
||||
|
||||
The Cloud Healthcare API uses [Identity and Access Management (IAM)][iam-overview] to control
|
||||
user and group access to Cloud Healthcare resources like projects, datasets, and stores.
|
||||
The Cloud Healthcare API uses [Identity and Access Management
|
||||
(IAM)][iam-overview] to control user and group access to Cloud Healthcare
|
||||
resources like projects, datasets, and stores.
|
||||
|
||||
### Authentication via Application Default Credentials (ADC)
|
||||
|
||||
@@ -96,9 +98,9 @@ By **default**, Toolbox will use your [Application Default Credentials
|
||||
|
||||
When using this method, you need to ensure the IAM identity associated with your
|
||||
ADC (such as a service account) has the correct permissions for the queries you
|
||||
intend to run. Common roles include `roles/healthcare.fhirResourceReader` (which includes
|
||||
permissions to read and search for FHIR resources) or `roles/healthcare.dicomViewer` (for
|
||||
retrieving DICOM images).
|
||||
intend to run. Common roles include `roles/healthcare.fhirResourceReader` (which
|
||||
includes permissions to read and search for FHIR resources) or
|
||||
`roles/healthcare.dicomViewer` (for retrieving DICOM images).
|
||||
Follow this [guide][set-adc] to set up your ADC.
|
||||
|
||||
### Authentication via User's OAuth Access Token
|
||||
@@ -106,8 +108,8 @@ Follow this [guide][set-adc] to set up your ADC.
|
||||
If the `useClientOAuth` parameter is set to `true`, Toolbox will instead use the
|
||||
OAuth access token for authentication. This token is parsed from the
|
||||
`Authorization` header passed in with the tool invocation request. This method
|
||||
allows Toolbox to make queries to the [Cloud Healthcare API][healthcare-docs] on behalf of the
|
||||
client or the end-user.
|
||||
allows Toolbox to make queries to the [Cloud Healthcare API][healthcare-docs] on
|
||||
behalf of the client or the end-user.
|
||||
|
||||
When using this on-behalf-of authentication, you must ensure that the
|
||||
identity used has been granted the correct IAM permissions.
|
||||
|
||||
@@ -15,6 +15,7 @@ Cloud Monitoring API](https://cloud.google.com/monitoring/api). This allows
|
||||
tools to access cloud monitoring metrics explorer and run promql queries.
|
||||
|
||||
Authentication can be handled in two ways:
|
||||
|
||||
1. **Application Default Credentials (ADC):** By default, the source uses ADC
|
||||
to authenticate with the API.
|
||||
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
---
|
||||
title: "Cloud SQL Admin"
|
||||
title: Cloud SQL Admin
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "cloud-sql-admin" source provides a client for the Cloud SQL Admin API.
|
||||
aliases:
|
||||
- /resources/sources/cloud-sql-admin
|
||||
description: "A \"cloud-sql-admin\" source provides a client for the Cloud SQL Admin API.\n"
|
||||
aliases: [/resources/sources/cloud-sql-admin]
|
||||
---
|
||||
|
||||
## About
|
||||
@@ -16,6 +14,7 @@ allows tools to perform administrative tasks on Cloud SQL instances, such as
|
||||
creating users and databases.
|
||||
|
||||
Authentication can be handled in two ways:
|
||||
|
||||
1. **Application Default Credentials (ADC):** By default, the source uses ADC
|
||||
to authenticate with the API.
|
||||
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will
|
||||
@@ -37,6 +36,7 @@ sources:
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------|:--------:|:------------:|------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| -------------- | :------: | :----------: | ---------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "cloud-sql-admin". |
|
||||
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |
|
||||
| defaultProject | string | false | The Google Cloud project ID to use for Cloud SQL infrastructure tools. |
|
||||
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |
|
||||
|
||||
@@ -47,13 +47,32 @@ to a database by following these instructions][csql-pg-quickstart].
|
||||
- [`postgres-list-schemas`](../tools/postgres/postgres-list-schemas.md)
|
||||
List schemas in a PostgreSQL database.
|
||||
|
||||
- [`postgres-database-overview`](../tools/postgres/postgres-database-overview.md)
|
||||
Fetches the current state of the PostgreSQL server.
|
||||
|
||||
- [`postgres-list-triggers`](../tools/postgres/postgres-list-triggers.md)
|
||||
List triggers in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-indexes`](../tools/postgres/postgres-list-indexes.md)
|
||||
List available user indexes in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-sequences`](../tools/postgres/postgres-list-sequences.md)
|
||||
List sequences in a PostgreSQL database.
|
||||
- [`postgres-long-running-transactions`](../tools/postgres/postgres-long-running-transactions.md)
|
||||
List long running transactions in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-locks`](../tools/postgres/postgres-list-locks.md)
|
||||
List lock stats in a PostgreSQL database.
|
||||
|
||||
- [`postgres-replication-stats`](../tools/postgres/postgres-replication-stats.md)
|
||||
List replication stats in a PostgreSQL database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [Cloud SQL for Postgres using
|
||||
MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_pg_mcp/)
|
||||
Connect your IDE to Cloud SQL for Postgres using Toolbox.
|
||||
|
||||
|
||||
## Requirements
|
||||
|
||||
### IAM Permissions
|
||||
|
||||
@@ -30,24 +30,25 @@ sources:
|
||||
```
|
||||
|
||||
{{< notice note >}}
|
||||
For more details about alternate addresses and custom ports refer to [Managing Connections](https://docs.couchbase.com/java-sdk/current/howtos/managing-connections.html).
|
||||
For more details about alternate addresses and custom ports refer to [Managing
|
||||
Connections](https://docs.couchbase.com/java-sdk/current/howtos/managing-connections.html).
|
||||
{{< /notice >}}
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------------|:--------:|:------------:|---------------------------------------------------------|
|
||||
| kind | string | true | Must be "couchbase". |
|
||||
| connectionString | string | true | Connection string for the Couchbase cluster. |
|
||||
| bucket | string | true | Name of the bucket to connect to. |
|
||||
| scope | string | true | Name of the scope within the bucket. |
|
||||
| username | string | false | Username for authentication. |
|
||||
| password | string | false | Password for authentication. |
|
||||
| clientCert | string | false | Path to client certificate file for TLS authentication. |
|
||||
| clientCertPassword | string | false | Password for the client certificate. |
|
||||
| clientKey | string | false | Path to client key file for TLS authentication. |
|
||||
| clientKeyPassword | string | false | Password for the client key. |
|
||||
| caCert | string | false | Path to CA certificate file. |
|
||||
| noSslVerify | boolean | false | If true, skip server certificate verification. **Warning:** This option should only be used in development or testing environments. Disabling SSL verification poses significant security risks in production as it makes your connection vulnerable to man-in-the-middle attacks. |
|
||||
| profile | string | false | Name of the connection profile to apply. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------------|:--------:|:------------:|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "couchbase". |
|
||||
| connectionString | string | true | Connection string for the Couchbase cluster. |
|
||||
| bucket | string | true | Name of the bucket to connect to. |
|
||||
| scope | string | true | Name of the scope within the bucket. |
|
||||
| username | string | false | Username for authentication. |
|
||||
| password | string | false | Password for authentication. |
|
||||
| clientCert | string | false | Path to client certificate file for TLS authentication. |
|
||||
| clientCertPassword | string | false | Password for the client certificate. |
|
||||
| clientKey | string | false | Path to client key file for TLS authentication. |
|
||||
| clientKeyPassword | string | false | Password for the client key. |
|
||||
| caCert | string | false | Path to CA certificate file. |
|
||||
| noSslVerify | boolean | false | If true, skip server certificate verification. **Warning:** This option should only be used in development or testing environments. Disabling SSL verification poses significant security risks in production as it makes your connection vulnerable to man-in-the-middle attacks. |
|
||||
| profile | string | false | Name of the connection profile to apply. |
|
||||
| queryScanConsistency | integer | false | Query scan consistency. Controls the consistency guarantee for index scanning. Values: 1 for "not_bounded" (fastest option, but results may not include the most recent operations), 2 for "request_plus" (highest consistency level, includes all operations up until the query started, but incurs a performance penalty). If not specified, defaults to the Couchbase Go SDK default. |
|
||||
|
||||
@@ -321,4 +321,4 @@ Logical operators are case-sensitive. `OR` and `AND` are acceptable whereas `or`
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|----------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "dataplex". |
|
||||
| project | string | true | ID of the GCP project used for quota and billing purposes (e.g. "my-project-id").|
|
||||
| project | string | true | ID of the GCP project used for quota and billing purposes (e.g. "my-project-id").|
|
||||
|
||||
@@ -10,22 +10,27 @@ description: >
|
||||
|
||||
# Elasticsearch Source
|
||||
|
||||
[Elasticsearch][elasticsearch-docs] is a distributed, free and open search and analytics engine
|
||||
for all types of data, including textual, numerical, geospatial, structured,
|
||||
and unstructured.
|
||||
[Elasticsearch][elasticsearch-docs] is a distributed, free and open search and
|
||||
analytics engine for all types of data, including textual, numerical,
|
||||
geospatial, structured, and unstructured.
|
||||
|
||||
If you are new to Elasticsearch, you can learn how to
|
||||
If you are new to Elasticsearch, you can learn how to
|
||||
[set up a cluster and start indexing data][elasticsearch-quickstart].
|
||||
|
||||
Elasticsearch uses [ES|QL][elasticsearch-esql] for querying data. ES|QL
|
||||
is a powerful query language that allows you to search and aggregate data in
|
||||
Elasticsearch.
|
||||
|
||||
See the [official documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html) for more information.
|
||||
See the [official
|
||||
documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html)
|
||||
for more information.
|
||||
|
||||
[elasticsearch-docs]: https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html
|
||||
[elasticsearch-quickstart]: https://www.elastic.co/guide/en/elasticsearch/reference/current/getting-started.html
|
||||
[elasticsearch-esql]: https://www.elastic.co/guide/en/elasticsearch/reference/current/esql.html
|
||||
[elasticsearch-docs]:
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html
|
||||
[elasticsearch-quickstart]:
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/getting-started.html
|
||||
[elasticsearch-esql]:
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/esql.html
|
||||
|
||||
## Available Tools
|
||||
|
||||
@@ -44,9 +49,12 @@ ensure the API key has the correct permissions for the queries you intend to
|
||||
run. See [API key management][api-key-management] for more information on
|
||||
applying permissions to an API key.
|
||||
|
||||
[api-key]: https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-api-key.html
|
||||
[set-api-key]: https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-api-key.html
|
||||
[api-key-management]: https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-api-key.html
|
||||
[api-key]:
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-api-key.html
|
||||
[set-api-key]:
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-api-key.html
|
||||
[api-key-management]:
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-api-key.html
|
||||
|
||||
## Example
|
||||
|
||||
@@ -61,8 +69,8 @@ sources:
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|-------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "elasticsearch". |
|
||||
| addresses | []string | true | List of Elasticsearch hosts to connect to. |
|
||||
| apikey | string | true | The API key to use for authentication. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|--------------------------------------------|
|
||||
| kind | string | true | Must be "elasticsearch". |
|
||||
| addresses | []string | true | List of Elasticsearch hosts to connect to. |
|
||||
| apikey | string | true | The API key to use for authentication. |
|
||||
|
||||
@@ -60,4 +60,4 @@ instead of hardcoding your secrets into the configuration file.
|
||||
| port | string | true | Port to connect to (e.g. "3050") |
|
||||
| database | string | true | Path to the Firebird database file (e.g. "/var/lib/firebird/data/test.fdb"). |
|
||||
| user | string | true | Name of the Firebird user to connect as (e.g. "SYSDBA"). |
|
||||
| password | string | true | Password of the Firebird user (e.g. "masterkey"). |
|
||||
| password | string | true | Password of the Firebird user (e.g. "masterkey"). |
|
||||
|
||||
@@ -48,8 +48,8 @@ permissions):
|
||||
- `roles/cloudaicompanion.user`
|
||||
- `roles/geminidataanalytics.dataAgentStatelessUser`
|
||||
|
||||
To initialize the application default credential run `gcloud auth login --update-adc`
|
||||
in your environment before starting MCP Toolbox.
|
||||
To initialize the application default credential run `gcloud auth login
|
||||
--update-adc` in your environment before starting MCP Toolbox.
|
||||
|
||||
[set-adc]: https://cloud.google.com/docs/authentication/provide-credentials-adc
|
||||
|
||||
@@ -81,7 +81,8 @@ The client id and client secret are seemingly random character sequences
|
||||
assigned by the looker server. If you are using Looker OAuth you don't need
|
||||
these settings
|
||||
|
||||
The `project` and `location` fields are utilized **only** when using the conversational analytics tool.
|
||||
The `project` and `location` fields are utilized **only** when using the
|
||||
conversational analytics tool.
|
||||
|
||||
{{< notice tip >}}
|
||||
Use environment variable replacement with the format ${ENV_NAME}
|
||||
@@ -91,16 +92,17 @@ instead of hardcoding your secrets into the configuration file.
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| -------------------- | :------: | :----------: | ----------------------------------------------------------------------------------------- |
|
||||
|----------------------|:--------:|:------------:|-------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "looker". |
|
||||
| base_url | string | true | The URL of your Looker server with no trailing /). |
|
||||
| base_url | string | true | The URL of your Looker server with no trailing /. |
|
||||
| client_id | string | false | The client id assigned by Looker. |
|
||||
| client_secret | string | false | The client secret assigned by Looker. |
|
||||
| verify_ssl | string | false | Whether to check the ssl certificate of the server. |
|
||||
| project | string | false | The project id to use in Google Cloud. |
|
||||
| location | string | false | The location to use in Google Cloud. (default: us) |
|
||||
| timeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, 120s is applied. |
|
||||
| use_client_oauth | string | false | Use OAuth tokens instead of client_id and client_secret. (default: false) |
|
||||
| use_client_oauth | string | false | Use OAuth tokens instead of client_id and client_secret. (default: false) If a header |
|
||||
| | | | name is provided, it will be used instead of "Authorization". |
|
||||
| show_hidden_models | string | false | Show or hide hidden models. (default: true) |
|
||||
| show_hidden_explores | string | false | Show or hide hidden explores. (default: true) |
|
||||
| show_hidden_fields | string | false | Show or hide hidden fields. (default: true) |
|
||||
| show_hidden_fields | string | false | Show or hide hidden fields. (default: true) |
|
||||
@@ -8,17 +8,32 @@ description: >
|
||||
|
||||
## About
|
||||
|
||||
[MindsDB][mindsdb-docs] is an AI federated database in the world. It allows you to combine information from hundreds of datasources as if they were SQL, supporting joins across datasources and enabling you to query all unstructured data as if it were structured.
|
||||
[MindsDB][mindsdb-docs] is an AI federated database in the world. It allows you
|
||||
to combine information from hundreds of datasources as if they were SQL,
|
||||
supporting joins across datasources and enabling you to query all unstructured
|
||||
data as if it were structured.
|
||||
|
||||
MindsDB translates MySQL queries into whatever API is needed - whether it's REST APIs, GraphQL, or native database protocols. This means you can write standard SQL queries and MindsDB automatically handles the translation to APIs like Salesforce, Jira, GitHub, email systems, MongoDB, and hundreds of other datasources.
|
||||
MindsDB translates MySQL queries into whatever API is needed - whether it's REST
|
||||
APIs, GraphQL, or native database protocols. This means you can write standard
|
||||
SQL queries and MindsDB automatically handles the translation to APIs like
|
||||
Salesforce, Jira, GitHub, email systems, MongoDB, and hundreds of other
|
||||
datasources.
|
||||
|
||||
MindsDB also enables you to use ML frameworks to train and use models as virtual tables from the data in those datasources. With MindsDB, the GenAI Toolbox can now expand to hundreds of datasources and leverage all of MindsDB's capabilities on ML and unstructured data.
|
||||
MindsDB also enables you to use ML frameworks to train and use models as virtual
|
||||
tables from the data in those datasources. With MindsDB, the GenAI Toolbox can
|
||||
now expand to hundreds of datasources and leverage all of MindsDB's capabilities
|
||||
on ML and unstructured data.
|
||||
|
||||
**Key Features:**
|
||||
- **Federated Database**: Connect and query hundreds of datasources through a single SQL interface
|
||||
- **Cross-Datasource Joins**: Perform joins across different datasources seamlessly
|
||||
- **API Translation**: Automatically translates MySQL queries into REST APIs, GraphQL, and native protocols
|
||||
- **Unstructured Data Support**: Query unstructured data as if it were structured
|
||||
|
||||
- **Federated Database**: Connect and query hundreds of datasources through a
|
||||
single SQL interface
|
||||
- **Cross-Datasource Joins**: Perform joins across different datasources
|
||||
seamlessly
|
||||
- **API Translation**: Automatically translates MySQL queries into REST APIs,
|
||||
GraphQL, and native protocols
|
||||
- **Unstructured Data Support**: Query unstructured data as if it were
|
||||
structured
|
||||
- **ML as Virtual Tables**: Train and use ML models as virtual tables
|
||||
- **MySQL Wire Protocol**: Compatible with standard MySQL clients and tools
|
||||
|
||||
@@ -30,6 +45,7 @@ MindsDB also enables you to use ML frameworks to train and use models as virtual
|
||||
MindsDB supports hundreds of datasources, including:
|
||||
|
||||
### **Business Applications**
|
||||
|
||||
- **Salesforce**: Query leads, opportunities, accounts, and custom objects
|
||||
- **Jira**: Access issues, projects, workflows, and team data
|
||||
- **GitHub**: Query repositories, commits, pull requests, and issues
|
||||
@@ -37,22 +53,23 @@ MindsDB supports hundreds of datasources, including:
|
||||
- **HubSpot**: Query contacts, companies, deals, and marketing data
|
||||
|
||||
### **Databases & Storage**
|
||||
|
||||
- **MongoDB**: Query NoSQL collections as structured tables
|
||||
- **Redis**: Key-value stores and caching layers
|
||||
- **Elasticsearch**: Search and analytics data
|
||||
- **S3/Google Cloud Storage**: File storage and data lakes
|
||||
|
||||
### **Communication & Email**
|
||||
|
||||
- **Gmail/Outlook**: Query emails, attachments, and metadata
|
||||
- **Slack**: Access workspace data and conversations
|
||||
- **Microsoft Teams**: Team communications and files
|
||||
- **Discord**: Server data and message history
|
||||
|
||||
|
||||
|
||||
## Example Queries
|
||||
|
||||
### Cross-Datasource Analytics
|
||||
|
||||
```sql
|
||||
-- Join Salesforce opportunities with GitHub activity
|
||||
SELECT
|
||||
@@ -67,6 +84,7 @@ GROUP BY s.opportunity_name, s.amount, g.repository_name;
|
||||
```
|
||||
|
||||
### Email & Communication Analysis
|
||||
|
||||
```sql
|
||||
-- Analyze email patterns with Slack activity
|
||||
SELECT
|
||||
@@ -81,6 +99,7 @@ GROUP BY e.sender, e.subject, s.channel_name;
|
||||
```
|
||||
|
||||
### ML Model Predictions
|
||||
|
||||
```sql
|
||||
-- Use ML model to predict customer churn
|
||||
SELECT
|
||||
@@ -96,9 +115,13 @@ WHERE predicted_churn_probability > 0.8;
|
||||
|
||||
### Database User
|
||||
|
||||
This source uses standard MySQL authentication since MindsDB implements the MySQL wire protocol. You will need to [create a MindsDB user][mindsdb-users] to login to the database with. If MindsDB is configured without authentication, you can omit the password field.
|
||||
This source uses standard MySQL authentication since MindsDB implements the
|
||||
MySQL wire protocol. You will need to [create a MindsDB user][mindsdb-users] to
|
||||
login to the database with. If MindsDB is configured without authentication, you
|
||||
can omit the password field.
|
||||
|
||||
[mindsdb-users]: https://docs.mindsdb.com/
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
@@ -136,26 +159,32 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
With MindsDB integration, you can:
|
||||
|
||||
- **Query Multiple Datasources**: Connect to databases, APIs, file systems, and more through a single SQL interface
|
||||
- **Cross-Datasource Analytics**: Perform joins and analytics across different data sources
|
||||
- **ML Model Integration**: Use trained ML models as virtual tables for predictions and insights
|
||||
- **Unstructured Data Processing**: Query documents, images, and other unstructured data as structured tables
|
||||
- **Real-time Predictions**: Get real-time predictions from ML models through SQL queries
|
||||
- **API Abstraction**: Write SQL queries that automatically translate to REST APIs, GraphQL, and native protocols
|
||||
- **Query Multiple Datasources**: Connect to databases, APIs, file systems, and
|
||||
more through a single SQL interface
|
||||
- **Cross-Datasource Analytics**: Perform joins and analytics across different
|
||||
data sources
|
||||
- **ML Model Integration**: Use trained ML models as virtual tables for
|
||||
predictions and insights
|
||||
- **Unstructured Data Processing**: Query documents, images, and other
|
||||
unstructured data as structured tables
|
||||
- **Real-time Predictions**: Get real-time predictions from ML models through
|
||||
SQL queries
|
||||
- **API Abstraction**: Write SQL queries that automatically translate to REST
|
||||
APIs, GraphQL, and native protocols
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ------------ | :------: | :----------: | ----------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "mindsdb". |
|
||||
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
|
||||
| port | string | true | Port to connect to (e.g. "3306"). |
|
||||
| database | string | true | Name of the MindsDB database to connect to (e.g. "my_db"). |
|
||||
| user | string | true | Name of the MindsDB user to connect as (e.g. "my-mindsdb-user"). |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "mindsdb". |
|
||||
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
|
||||
| port | string | true | Port to connect to (e.g. "3306"). |
|
||||
| database | string | true | Name of the MindsDB database to connect to (e.g. "my_db"). |
|
||||
| user | string | true | Name of the MindsDB user to connect as (e.g. "my-mindsdb-user"). |
|
||||
| password | string | false | Password of the MindsDB user (e.g. "my-password"). Optional if MindsDB is configured without authentication. |
|
||||
| queryTimeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, no timeout is applied. |
|
||||
| queryTimeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, no timeout is applied. |
|
||||
|
||||
## Resources
|
||||
|
||||
- [MindsDB Documentation][mindsdb-docs] - Official documentation and guides
|
||||
- [MindsDB GitHub][mindsdb-github] - Source code and community
|
||||
- [MindsDB GitHub][mindsdb-github] - Source code and community
|
||||
|
||||
@@ -33,7 +33,8 @@ amount of data through a structured format.
|
||||
This source only uses standard authentication. You will need to [create a
|
||||
SQL Server user][mssql-users] to login to the database with.
|
||||
|
||||
[mssql-users]: https://learn.microsoft.com/en-us/sql/relational-databases/security/authentication-access/create-a-database-user?view=sql-server-ver16
|
||||
[mssql-users]:
|
||||
https://learn.microsoft.com/en-us/sql/relational-databases/security/authentication-access/create-a-database-user?view=sql-server-ver16
|
||||
|
||||
## Example
|
||||
|
||||
@@ -56,12 +57,12 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "mssql". |
|
||||
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
|
||||
| port | string | true | Port to connect to (e.g. "1433"). |
|
||||
| database | string | true | Name of the SQL Server database to connect to (e.g. "my_db"). |
|
||||
| user | string | true | Name of the SQL Server user to connect as (e.g. "my-user"). |
|
||||
| password | string | true | Password of the SQL Server user (e.g. "my-password"). |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "mssql". |
|
||||
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
|
||||
| port | string | true | Port to connect to (e.g. "1433"). |
|
||||
| database | string | true | Name of the SQL Server database to connect to (e.g. "my_db"). |
|
||||
| user | string | true | Name of the SQL Server user to connect as (e.g. "my-user"). |
|
||||
| password | string | true | Password of the SQL Server user (e.g. "my-password"). |
|
||||
| encrypt | string | false | Encryption level for data transmitted between the client and server (e.g., "strict"). If not specified, defaults to the [github.com/microsoft/go-mssqldb](https://github.com/microsoft/go-mssqldb?tab=readme-ov-file#common-parameters) package's default encrypt value. |
|
||||
|
||||
@@ -82,4 +82,4 @@ suitable for large-scale applications.
|
||||
### Strong Consistency
|
||||
|
||||
OceanBase provides strong consistency guarantees, ensuring that all transactions
|
||||
are ACID compliant.
|
||||
are ACID compliant.
|
||||
|
||||
@@ -8,7 +8,10 @@ description: >
|
||||
|
||||
## About
|
||||
|
||||
[Oracle Database][oracle-docs] is a multi-model database management system produced and marketed by Oracle Corporation. It is commonly used for running online transaction processing (OLTP), data warehousing (DW), and mixed (OLTP & DW) database workloads.
|
||||
[Oracle Database][oracle-docs] is a multi-model database management system
|
||||
produced and marketed by Oracle Corporation. It is commonly used for running
|
||||
online transaction processing (OLTP), data warehousing (DW), and mixed (OLTP &
|
||||
DW) database workloads.
|
||||
|
||||
[oracle-docs]: https://www.oracle.com/database/
|
||||
|
||||
@@ -24,33 +27,44 @@ description: >
|
||||
|
||||
### Database User
|
||||
|
||||
This source uses standard authentication. You will need to [create an Oracle user][oracle-users] to log in to the database with the necessary permissions.
|
||||
This source uses standard authentication. You will need to [create an Oracle
|
||||
user][oracle-users] to log in to the database with the necessary permissions.
|
||||
|
||||
[oracle-users]:
|
||||
https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/CREATE-USER.html
|
||||
|
||||
## Connection Methods
|
||||
|
||||
You can configure the connection to your Oracle database using one of the following three methods. **You should only use one method** in your source configuration.
|
||||
You can configure the connection to your Oracle database using one of the
|
||||
following three methods. **You should only use one method** in your source
|
||||
configuration.
|
||||
|
||||
### Basic Connection (Host/Port/Service Name)
|
||||
|
||||
This is the most straightforward method, where you provide the connection details as separate fields:
|
||||
This is the most straightforward method, where you provide the connection
|
||||
details as separate fields:
|
||||
|
||||
- `host`: The IP address or hostname of the database server.
|
||||
- `port`: The port number the Oracle listener is running on (typically 1521).
|
||||
- `serviceName`: The service name for the database instance you wish to connect to.
|
||||
- `serviceName`: The service name for the database instance you wish to connect
|
||||
to.
|
||||
|
||||
### Connection String
|
||||
|
||||
As an alternative, you can provide all the connection details in a single `connectionString`. This is a convenient way to consolidate the connection information. The typical format is `hostname:port/servicename`.
|
||||
As an alternative, you can provide all the connection details in a single
|
||||
`connectionString`. This is a convenient way to consolidate the connection
|
||||
information. The typical format is `hostname:port/servicename`.
|
||||
|
||||
### TNS Alias
|
||||
|
||||
For environments that use a `tnsnames.ora` configuration file, you can connect using a TNS (Transparent Network Substrate) alias.
|
||||
For environments that use a `tnsnames.ora` configuration file, you can connect
|
||||
using a TNS (Transparent Network Substrate) alias.
|
||||
|
||||
- `tnsAlias`: Specify the alias name defined in your `tnsnames.ora` file.
|
||||
- `tnsAdmin` (Optional): If your configuration file is not in a standard location, you can use this field to provide the path to the directory containing it. This setting will override the `TNS_ADMIN` environment variable.
|
||||
- `tnsAdmin` (Optional): If your configuration file is not in a standard
|
||||
location, you can use this field to provide the path to the directory
|
||||
containing it. This setting will override the `TNS_ADMIN` environment
|
||||
variable.
|
||||
|
||||
## Example
|
||||
|
||||
|
||||
@@ -41,6 +41,27 @@ reputation for reliability, feature robustness, and performance.
|
||||
- [`postgres-list-schemas`](../tools/postgres/postgres-list-views.md)
|
||||
List schemas in a PostgreSQL database.
|
||||
|
||||
- [`postgres-database-overview`](../tools/postgres/postgres-database-overview.md)
|
||||
Fetches the current state of the PostgreSQL server.
|
||||
|
||||
- [`postgres-list-triggers`](../tools/postgres/postgres-list-triggers.md)
|
||||
List triggers in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-indexes`](../tools/postgres/postgres-list-indexes.md)
|
||||
List available user indexes in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-sequences`](../tools/postgres/postgres-list-sequences.md)
|
||||
List sequences in a PostgreSQL database.
|
||||
|
||||
- [`postgres-long-running-transactions`](../tools/postgres/postgres-long-running-transactions.md)
|
||||
List long running transactions in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-locks`](../tools/postgres/postgres-list-locks.md)
|
||||
List lock stats in a PostgreSQL database.
|
||||
|
||||
- [`postgres-replication-stats`](../tools/postgres/postgres-replication-stats.md)
|
||||
List replication stats in a PostgreSQL database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [PostgreSQL using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/postgres_mcp/)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user