mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-01-10 16:08:16 -05:00
Compare commits
116 Commits
v0.6.0
...
invoke-too
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
decff260fb | ||
|
|
dd61496420 | ||
|
|
9cd494b18a | ||
|
|
005a1e624c | ||
|
|
72a7282797 | ||
|
|
29fe3b93cd | ||
|
|
fb3f66acf4 | ||
|
|
1f95eb134b | ||
|
|
4c240ac3c9 | ||
|
|
c6ab74c5da | ||
|
|
04e2529ba9 | ||
|
|
53dd247e6e | ||
|
|
648eede62b | ||
|
|
9b2dfcc553 | ||
|
|
cb514209b6 | ||
|
|
0a93b0482c | ||
|
|
f13e9635ba | ||
|
|
fafed24858 | ||
|
|
6337434623 | ||
|
|
822708afaa | ||
|
|
010c278cbf | ||
|
|
40679d700e | ||
|
|
5fb056ee43 | ||
|
|
a1b60100c2 | ||
|
|
cb92883330 | ||
|
|
bd2f1956bd | ||
|
|
cbb4a33351 | ||
|
|
7badba42ee | ||
|
|
f72e426314 | ||
|
|
7a6644cf0c | ||
|
|
184c681797 | ||
|
|
474df57d62 | ||
|
|
fc1a3813ea | ||
|
|
c7fe3c7f38 | ||
|
|
dc2690bd39 | ||
|
|
b78f7480cf | ||
|
|
ffe9b74211 | ||
|
|
e1355660d4 | ||
|
|
d8e2abe2dd | ||
|
|
7b3539e9ff | ||
|
|
1d658c3b14 | ||
|
|
fd300dc606 | ||
|
|
4827771b78 | ||
|
|
a8df414b11 | ||
|
|
0bf4ebabf1 | ||
|
|
67964d939f | ||
|
|
f77c829271 | ||
|
|
d2977ed1ba | ||
|
|
52e8bf4de1 | ||
|
|
a3aaf93525 | ||
|
|
9197186b8b | ||
|
|
e3844ff76d | ||
|
|
ef6e3f1c32 | ||
|
|
f5f771b0f3 | ||
|
|
12b6636a9b | ||
|
|
d51dbc759b | ||
|
|
4055b0c356 | ||
|
|
65dba4cabc | ||
|
|
447cda2daf | ||
|
|
c54ef61fc6 | ||
|
|
eb98cdc7d1 | ||
|
|
1c067715fa | ||
|
|
cb87f765a6 | ||
|
|
a982314900 | ||
|
|
054ec198b9 | ||
|
|
f0aef29b0c | ||
|
|
075dfa47e1 | ||
|
|
ad62d14cd5 | ||
|
|
5d183b0efe | ||
|
|
904d04bc45 | ||
|
|
75e254c0a4 | ||
|
|
850b32c5b0 | ||
|
|
927ef3c508 | ||
|
|
15d3c45159 | ||
|
|
714d990c34 | ||
|
|
e6c2fb324b | ||
|
|
cf96f4c249 | ||
|
|
8569c6b59f | ||
|
|
44d41a4888 | ||
|
|
4998f82852 | ||
|
|
b81fc6aa6c | ||
|
|
29aa0a70da | ||
|
|
5638ef520a | ||
|
|
2f42de9507 | ||
|
|
0a08d2c15d | ||
|
|
71250e1ced | ||
|
|
702dbc355b | ||
|
|
ef0cbdb4bf | ||
|
|
518a0e4c70 | ||
|
|
d7ba2736eb | ||
|
|
33ae70ec02 | ||
|
|
1c9ad5ea24 | ||
|
|
b76346993f | ||
|
|
1830702fd8 | ||
|
|
b4862825e8 | ||
|
|
f5de1af5bd | ||
|
|
46d7cdf4ba | ||
|
|
9ecf1755ab | ||
|
|
1596f5d772 | ||
|
|
594066f9f4 | ||
|
|
0ddc7240b7 | ||
|
|
0880e16c05 | ||
|
|
a6c49007bf | ||
|
|
ef94648455 | ||
|
|
69d047af46 | ||
|
|
4700dd363c | ||
|
|
386bb23e7c | ||
|
|
ba8a6f3a3b | ||
|
|
ad97578fdf | ||
|
|
3d10f85302 | ||
|
|
5a4cc9af6b | ||
|
|
15f90a3773 | ||
|
|
87380f629d | ||
|
|
953ab9336f | ||
|
|
032b333961 | ||
|
|
25afd63496 |
@@ -17,15 +17,8 @@ steps:
|
||||
waitFor: ['-']
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
docker buildx build --build-arg METADATA_TAGS=$(git rev-parse HEAD) -t ${_DOCKER_URI}:$REF_NAME .
|
||||
|
||||
- id: "push-docker"
|
||||
name: "gcr.io/cloud-builders/docker"
|
||||
waitFor:
|
||||
- "build-docker"
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
docker push ${_DOCKER_URI}:$REF_NAME
|
||||
docker buildx create --name container-builder --driver docker-container --bootstrap --use
|
||||
docker buildx build --platform linux/amd64,linux/arm64 --build-arg COMMIT_SHA=$(git rev-parse HEAD) -t ${_DOCKER_URI}:$REF_NAME --push .
|
||||
|
||||
- id: "install-dependencies"
|
||||
name: golang:1
|
||||
@@ -50,7 +43,7 @@ steps:
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.metadataString=binary.linux.amd64.$REF_NAME" -o toolbox.linux.amd64
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.linux.amd64
|
||||
|
||||
- id: "store-linux-amd64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -72,7 +65,7 @@ steps:
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.metadataString=binary.darwin.arm64.$REF_NAME" -o toolbox.darwin.arm64
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.arm64
|
||||
|
||||
- id: "store-darwin-arm64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -94,7 +87,7 @@ steps:
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.metadataString=binary.darwin.amd64.$REF_NAME" -o toolbox.darwin.amd64
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.amd64
|
||||
|
||||
- id: "store-darwin-amd64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -116,7 +109,7 @@ steps:
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
CGO_ENABLED=0 GOOS=windows GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.metadataString=binary.windows.amd64.$REF_NAME" -o toolbox.windows.amd64
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.windows.amd64
|
||||
|
||||
- id: "store-windows-amd64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -124,12 +117,13 @@ steps:
|
||||
- "build-windows-amd64"
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
gcloud storage cp toolbox.windows.amd64 gs://$_BUCKET_NAME/$REF_NAME/windows/amd64/toolbox
|
||||
gcloud storage cp toolbox.windows.amd64 gs://$_BUCKET_NAME/$REF_NAME/windows/amd64/toolbox.exe
|
||||
|
||||
options:
|
||||
automapSubstitutions: true
|
||||
dynamicSubstitutions: true
|
||||
logging: CLOUD_LOGGING_ONLY # Necessary for custom service account
|
||||
machineType: 'E2_HIGHCPU_32'
|
||||
|
||||
substitutions:
|
||||
_REGION: us-central1
|
||||
|
||||
@@ -36,7 +36,12 @@ do
|
||||
ARCH=$(echo "$file_key" | cut -d '.' -f 2)
|
||||
|
||||
# Get release URL
|
||||
URL="https://storage.googleapis.com/genai-toolbox/$VERSION/$OS/$ARCH/toolbox"
|
||||
if [ "$OS" = 'windows' ];
|
||||
then
|
||||
URL="https://storage.googleapis.com/genai-toolbox/$VERSION/$OS/$ARCH/toolbox.exe"
|
||||
else
|
||||
URL="https://storage.googleapis.com/genai-toolbox/$VERSION/$OS/$ARCH/toolbox"
|
||||
fi
|
||||
|
||||
curl "$URL" --fail --output toolbox || exit 1
|
||||
|
||||
|
||||
@@ -33,7 +33,9 @@ steps:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
script: |
|
||||
go test -c -race ./tests/...
|
||||
go test -c -race -cover \
|
||||
-coverpkg=./internal/sources/...,./internal/tools/... ./tests/...
|
||||
chmod +x .ci/test_with_coverage.sh
|
||||
|
||||
- id: "cloud-sql-pg"
|
||||
name: golang:1
|
||||
@@ -54,7 +56,12 @@ steps:
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
./cloudsqlpg.test -test.v
|
||||
.ci/test_with_coverage.sh \
|
||||
"Cloud SQL Postgres" \
|
||||
cloudsqlpg \
|
||||
postgressql \
|
||||
postgresexecutesql
|
||||
|
||||
|
||||
- id: "alloydb-pg"
|
||||
name: golang:1
|
||||
@@ -75,7 +82,11 @@ steps:
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
./alloydbpg.test -test.v
|
||||
.ci/test_with_coverage.sh \
|
||||
"AlloyDB Postgres" \
|
||||
alloydbpg \
|
||||
postgressql \
|
||||
postgresexecutesql
|
||||
|
||||
- id: "alloydb-ai-nl"
|
||||
name: golang:1
|
||||
@@ -96,7 +107,10 @@ steps:
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
./alloydbainl.test -test.v
|
||||
.ci/test_with_coverage.sh \
|
||||
"AlloyDB AI NL" \
|
||||
alloydbainl \
|
||||
alloydbainl
|
||||
|
||||
- id: "bigtable"
|
||||
name: golang:1
|
||||
@@ -115,7 +129,10 @@ steps:
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
./bigtable.test -test.v
|
||||
.ci/test_with_coverage.sh \
|
||||
"Bigtable" \
|
||||
bigtable \
|
||||
bigtable
|
||||
|
||||
- id: "bigquery"
|
||||
name: golang:1
|
||||
@@ -132,7 +149,10 @@ steps:
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
./bigquery.test -test.v
|
||||
.ci/test_with_coverage.sh \
|
||||
"BigQuery" \
|
||||
bigquery \
|
||||
bigquery
|
||||
|
||||
- id: "postgres"
|
||||
name: golang:1
|
||||
@@ -151,7 +171,11 @@ steps:
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
./postgres.test -test.v
|
||||
.ci/test_with_coverage.sh \
|
||||
"Postgres" \
|
||||
postgres \
|
||||
postgressql \
|
||||
postgresexecutesql
|
||||
|
||||
- id: "spanner"
|
||||
name: golang:1
|
||||
@@ -170,7 +194,10 @@ steps:
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
./spanner.test -test.v
|
||||
.ci/test_with_coverage.sh \
|
||||
"Spanner" \
|
||||
spanner \
|
||||
spanner
|
||||
|
||||
- id: "neo4j"
|
||||
name: golang:1
|
||||
@@ -187,7 +214,10 @@ steps:
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
./neo4j.test -test.v
|
||||
.ci/test_with_coverage.sh \
|
||||
"Neo4j" \
|
||||
neo4j \
|
||||
neo4j
|
||||
|
||||
- id: "cloud-sql-mssql"
|
||||
name: golang:1
|
||||
@@ -208,7 +238,10 @@ steps:
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
./cloudsqlmssql.test -test.v
|
||||
.ci/test_with_coverage.sh \
|
||||
"Cloud SQL MSSQL" \
|
||||
cloudsqlmssql \
|
||||
mssql
|
||||
|
||||
- id: "cloud-sql-mysql"
|
||||
name: golang:1
|
||||
@@ -229,7 +262,10 @@ steps:
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
./cloudsqlmysql.test -test.v
|
||||
.ci/test_with_coverage.sh \
|
||||
"Cloud SQL MySQL" \
|
||||
cloudsqlmysql \
|
||||
mysql
|
||||
|
||||
- id: "mysql"
|
||||
name: golang:1
|
||||
@@ -248,7 +284,10 @@ steps:
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
./mysql.test -test.v
|
||||
.ci/test_with_coverage.sh \
|
||||
"MySQL" \
|
||||
mysql \
|
||||
mysql
|
||||
|
||||
- id: "mssql"
|
||||
name: golang:1
|
||||
@@ -267,7 +306,10 @@ steps:
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
./mssql.test -test.v
|
||||
.ci/test_with_coverage.sh \
|
||||
"MSSQL" \
|
||||
mssql \
|
||||
mssql
|
||||
|
||||
- id: "dgraph"
|
||||
name: golang:1
|
||||
@@ -282,7 +324,10 @@ steps:
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
./dgraph.test -test.v
|
||||
.ci/test_with_coverage.sh \
|
||||
"Dgraph" \
|
||||
dgraph \
|
||||
dgraph
|
||||
|
||||
- id: "http"
|
||||
name: golang:1
|
||||
@@ -297,7 +342,10 @@ steps:
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
./http.test -test.v
|
||||
.ci/test_with_coverage.sh \
|
||||
"HTTP" \
|
||||
http \
|
||||
http
|
||||
|
||||
- id: "sqlite"
|
||||
name: golang:1
|
||||
@@ -313,7 +361,10 @@ steps:
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
./sqlite.test -test.v
|
||||
.ci/test_with_coverage.sh \
|
||||
"SQLite" \
|
||||
sqlite \
|
||||
sqlite
|
||||
|
||||
- id: "couchbase"
|
||||
name : golang:1
|
||||
@@ -331,7 +382,49 @@ steps:
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
./couchbase.test -test.v
|
||||
.ci/test_with_coverage.sh \
|
||||
"Couchbase" \
|
||||
couchbase \
|
||||
couchbase
|
||||
|
||||
- id: "redis"
|
||||
name : golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["REDIS_ADDRESS", "REDIS_PASS", "CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Redis" \
|
||||
redis \
|
||||
redis
|
||||
|
||||
- id: "valkey"
|
||||
name : golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "VALKEY_DATABASE=$_VALKEY_DATABASE"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["VALKEY_ADDRESS", "CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Valkey" \
|
||||
valkey \
|
||||
valkey
|
||||
|
||||
|
||||
availableSecrets:
|
||||
@@ -371,7 +464,7 @@ availableSecrets:
|
||||
- versionName: projects/$PROJECT_ID/secrets/mysql_pass/versions/latest
|
||||
env: MYSQL_PASS
|
||||
- versionName: projects/$PROJECT_ID/secrets/mssql_user/versions/latest
|
||||
env: MSSQL_USER
|
||||
env: MSSQL_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/mssql_pass/versions/latest
|
||||
env: MSSQL_PASS
|
||||
- versionName: projects/$PROJECT_ID/secrets/couchbase_connection/versions/latest
|
||||
@@ -380,6 +473,12 @@ availableSecrets:
|
||||
env: COUCHBASE_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/couchbase_pass/versions/latest
|
||||
env: COUCHBASE_PASS
|
||||
- versionName: projects/$PROJECT_ID/secrets/memorystore_redis_address/versions/latest
|
||||
env: REDIS_ADDRESS
|
||||
- versionName: projects/$PROJECT_ID/secrets/memorystore_redis_pass/versions/latest
|
||||
env: REDIS_PASS
|
||||
- versionName: projects/$PROJECT_ID/secrets/memorystore_valkey_address/versions/latest
|
||||
env: VALKEY_ADDRESS
|
||||
|
||||
|
||||
options:
|
||||
@@ -411,4 +510,4 @@ substitutions:
|
||||
_MSSQL_PORT: "1433"
|
||||
_DGRAPHURL: "https://play.dgraph.io"
|
||||
_COUCHBASE_BUCKET: "couchbase-bucket"
|
||||
_COUCHBASE_SCOPE: "couchbase-scope"
|
||||
_COUCHBASE_SCOPE: "couchbase-scope"
|
||||
60
.ci/test_with_coverage.sh
Executable file
60
.ci/test_with_coverage.sh
Executable file
@@ -0,0 +1,60 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Arguments:
|
||||
# $1: Display name for logs (e.g., "Cloud SQL Postgres")
|
||||
# $2: Source package name (e.g., cloudsqlpg)
|
||||
# $3, $4, ...: Tool package names for grep (e.g., postgressql)
|
||||
|
||||
DISPLAY_NAME="$1"
|
||||
SOURCE_PACKAGE_NAME="$2"
|
||||
|
||||
# Construct the test binary name
|
||||
TEST_BINARY="${SOURCE_PACKAGE_NAME}.test"
|
||||
|
||||
# Construct the full source path
|
||||
SOURCE_PATH="sources/${SOURCE_PACKAGE_NAME}/"
|
||||
|
||||
# Shift arguments so that $3 and onwards become the list of tool package names
|
||||
shift 2
|
||||
TOOL_PACKAGE_NAMES=("$@")
|
||||
|
||||
COVERAGE_FILE="${TEST_BINARY%.test}_coverage.out"
|
||||
FILTERED_COVERAGE_FILE="${TEST_BINARY%.test}_filtered_coverage.out"
|
||||
|
||||
export path="github.com/googleapis/genai-toolbox/internal/"
|
||||
|
||||
GREP_PATTERN="^mode:|${path}${SOURCE_PATH}"
|
||||
# Add each tool package path to the grep pattern
|
||||
for tool_name in "${TOOL_PACKAGE_NAMES[@]}"; do
|
||||
if [ -n "$tool_name" ]; then
|
||||
full_tool_path="tools/${tool_name}/"
|
||||
GREP_PATTERN="${GREP_PATTERN}|${path}${full_tool_path}"
|
||||
fi
|
||||
done
|
||||
|
||||
# Run integration test
|
||||
if ! ./"${TEST_BINARY}" -test.v -test.coverprofile="${COVERAGE_FILE}"; then
|
||||
echo "Error: Tests for ${DISPLAY_NAME} failed. Exiting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Filter source/tool packages
|
||||
if ! grep -E "${GREP_PATTERN}" "${COVERAGE_FILE}" > "${FILTERED_COVERAGE_FILE}"; then
|
||||
echo "Warning: Could not filter coverage for ${DISPLAY_NAME}. Filtered file might be empty or invalid."
|
||||
fi
|
||||
|
||||
# Calculate coverage
|
||||
echo "Calculating coverage for ${DISPLAY_NAME}..."
|
||||
total_coverage=$(go tool cover -func="${FILTERED_COVERAGE_FILE}" 2>/dev/null | grep "total:" | awk '{print $3}')
|
||||
|
||||
|
||||
echo "${DISPLAY_NAME} total coverage: $total_coverage"
|
||||
coverage_numeric=$(echo "$total_coverage" | sed 's/%//')
|
||||
|
||||
# Check coverage threshold
|
||||
if awk -v coverage="$coverage_numeric" 'BEGIN {exit !(coverage < 50)}'; then
|
||||
echo "Coverage failure: ${DISPLAY_NAME} total coverage($total_coverage) is below 50%."
|
||||
exit 1
|
||||
else
|
||||
echo "Coverage for ${DISPLAY_NAME} is sufficient."
|
||||
fi
|
||||
@@ -18,19 +18,13 @@ steps:
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
docker buildx build --build-arg METADATA_TAGS=$(git rev-parse HEAD) -t ${_DOCKER_URI}:$VERSION -t ${_DOCKER_URI}:latest .
|
||||
docker buildx create --name container-builder --driver docker-container --bootstrap --use
|
||||
|
||||
- id: "push-docker"
|
||||
name: "gcr.io/cloud-builders/docker"
|
||||
waitFor:
|
||||
- "build-docker"
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
docker push ${_DOCKER_URI}:$VERSION
|
||||
export TAGS="-t ${_DOCKER_URI}:$VERSION"
|
||||
if [[ $_PUSH_LATEST == 'true' ]]; then
|
||||
docker push ${_DOCKER_URI}:latest
|
||||
export TAGS="$TAGS -t ${_DOCKER_URI}:latest"
|
||||
fi
|
||||
docker buildx build --platform linux/amd64,linux/arm64 --build-arg BUILD_TYPE=container.release --build-arg COMMIT_SHA=$(git rev-parse HEAD) $TAGS --push .
|
||||
|
||||
- id: "install-dependencies"
|
||||
name: golang:1
|
||||
@@ -56,7 +50,7 @@ steps:
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.metadataString=binary.linux.amd64.$VERSION.$(git rev-parse HEAD)" -o toolbox.linux.amd64
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.linux.amd64
|
||||
|
||||
- id: "store-linux-amd64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -80,7 +74,7 @@ steps:
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.metadataString=binary.darwin.arm64.$VERSION.$(git rev-parse HEAD)" -o toolbox.darwin.arm64
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.arm64
|
||||
|
||||
- id: "store-darwin-arm64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -104,7 +98,7 @@ steps:
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.metadataString=binary.darwin.amd64.$VERSION.$(git rev-parse HEAD)" -o toolbox.darwin.amd64
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.amd64
|
||||
|
||||
- id: "store-darwin-amd64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -128,7 +122,7 @@ steps:
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
CGO_ENABLED=0 GOOS=windows GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.metadataString=binary.windows.amd64.$VERSION.$(git rev-parse HEAD)" -o toolbox.windows.amd64
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.windows.amd64
|
||||
|
||||
- id: "store-windows-amd64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
@@ -137,12 +131,13 @@ steps:
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
export VERSION=v$(cat ./cmd/version.txt)
|
||||
gcloud storage cp toolbox.windows.amd64 gs://$_BUCKET_NAME/$VERSION/windows/amd64/toolbox
|
||||
gcloud storage cp toolbox.windows.amd64 gs://$_BUCKET_NAME/$VERSION/windows/amd64/toolbox.exe
|
||||
|
||||
options:
|
||||
automapSubstitutions: true
|
||||
dynamicSubstitutions: true
|
||||
logging: CLOUD_LOGGING_ONLY # Necessary for custom service account
|
||||
machineType: 'E2_HIGHCPU_32'
|
||||
|
||||
substitutions:
|
||||
_REGION: us-central1
|
||||
|
||||
9
.github/blunderbuss.yml
vendored
9
.github/blunderbuss.yml
vendored
@@ -2,7 +2,14 @@ assign_issues:
|
||||
- kurtisvg
|
||||
- Yuan325
|
||||
- duwenxin99
|
||||
assign_issues_by:
|
||||
- labels:
|
||||
- 'product: bigquery'
|
||||
to:
|
||||
- Genesis929
|
||||
- shobsi
|
||||
- jiaxunwu
|
||||
assign_prs:
|
||||
- kurtisvg
|
||||
- Yuan325
|
||||
- duwenxin99
|
||||
- duwenxin99
|
||||
|
||||
10
.github/labels.yaml
vendored
10
.github/labels.yaml
vendored
@@ -69,6 +69,10 @@
|
||||
color: 3DED97
|
||||
description: Label to trigger Github Action tests.
|
||||
|
||||
- name: 'docs: deploy-preview'
|
||||
color: BFDADC
|
||||
description: Label to trigger Github Action docs preview.
|
||||
|
||||
- name: 'status: contribution welcome'
|
||||
color: 8befd7
|
||||
description: Status - Contributions welcome.
|
||||
@@ -80,3 +84,9 @@
|
||||
- name: 'status: awaiting codeowners'
|
||||
color: 8befd7
|
||||
description: Status - Awaiting response from code owners.
|
||||
|
||||
# Product Labels
|
||||
- name: 'product: bigquery'
|
||||
color: 5065c7
|
||||
description: Product - Assigned to the BigQuery team.
|
||||
|
||||
|
||||
10
.github/release-please.yml
vendored
10
.github/release-please.yml
vendored
@@ -21,9 +21,15 @@ extraFiles: [
|
||||
"docs/en/getting-started/introduction/_index.md",
|
||||
"docs/en/getting-started/local_quickstart.md",
|
||||
"docs/en/getting-started/mcp_quickstart/_index.md",
|
||||
"docs/en/how-to/deploy_gke.md",
|
||||
"docs/en/samples/bigquery/local_quickstart.md",
|
||||
"docs/en/samples/bigquery/mcp_quickstart.md",
|
||||
"docs/en/samples/bigquery/mcp_quickstart/_index.md",
|
||||
"docs/en/getting-started/colab_quickstart.ipynb",
|
||||
"docs/en/samples/bigquery/colab_quickstart_bigquery.ipynb",
|
||||
"docs/en/how-to/connect-ide/bigquery_mcp.md",
|
||||
"docs/en/how-to/connect-ide/spanner_mcp.md",
|
||||
"docs/en/how-to/connect-ide/alloydb_pg_mcp.md",
|
||||
"docs/en/how-to/connect-ide/cloud_sql_mysql_mcp.md",
|
||||
"docs/en/how-to/connect-ide/cloud_sql_pg_mcp.md",
|
||||
"docs/en/how-to/connect-ide/postgres_mcp.md",
|
||||
"docs/en/how-to/connect-ide/cloud_sql_mssql_mcp.md",
|
||||
]
|
||||
|
||||
2
.github/sync-repo-settings.yaml
vendored
2
.github/sync-repo-settings.yaml
vendored
@@ -31,6 +31,8 @@ branchProtectionRules:
|
||||
- "header-check"
|
||||
# - Add required status checks like presubmit tests
|
||||
- "unit tests (ubuntu-latest)"
|
||||
- "unit tests (windows-latest)"
|
||||
- "unit tests (macos-latest)"
|
||||
- "integration-test-pr (toolbox-testing-438616)"
|
||||
requiredApprovingReviewCount: 1
|
||||
requiresCodeOwnerReviews: true
|
||||
|
||||
2
.github/workflows/docs_deploy.yaml
vendored
2
.github/workflows/docs_deploy.yaml
vendored
@@ -31,7 +31,7 @@ on:
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
defaults:
|
||||
run:
|
||||
working-directory: .hugo
|
||||
|
||||
17
.github/workflows/docs_preview_deploy.yaml
vendored
17
.github/workflows/docs_preview_deploy.yaml
vendored
@@ -17,7 +17,7 @@ name: "docs"
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
|
||||
# This Workflow depends on 'github.event.number',
|
||||
# not compatible with branch or manual triggers.
|
||||
on:
|
||||
@@ -27,9 +27,19 @@ on:
|
||||
- 'docs/**'
|
||||
- 'github/workflows/docs**'
|
||||
- '.hugo/**'
|
||||
pull_request_target:
|
||||
types: [labeled]
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- 'github/workflows/docs**'
|
||||
- '.hugo/**'
|
||||
|
||||
jobs:
|
||||
preview:
|
||||
# run job on proper workflow event triggers (skip job for pull_request event
|
||||
# from forks and only run pull_request_target for "docs: deploy-preview"
|
||||
# label)
|
||||
if: "${{ (github.event.action != 'labeled' && github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) || github.event.label.name == 'docs: deploy-preview' }}"
|
||||
runs-on: ubuntu-24.04
|
||||
defaults:
|
||||
run:
|
||||
@@ -41,6 +51,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
|
||||
|
||||
- name: Setup Hugo
|
||||
@@ -70,8 +81,6 @@ jobs:
|
||||
HUGO_RELATIVEURLS: false
|
||||
|
||||
- name: Deploy
|
||||
# If run from a fork, GitHub write operations will fail.
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -80,8 +89,6 @@ jobs:
|
||||
commit_message: "stage: PR-${{ github.event.number }}: ${{ github.event.head_commit.message }}"
|
||||
|
||||
- name: Comment
|
||||
# If run from a fork, GitHub write operations will fail.
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
with:
|
||||
script: |
|
||||
|
||||
2
.github/workflows/lint.yaml
vendored
2
.github/workflows/lint.yaml
vendored
@@ -66,7 +66,7 @@ jobs:
|
||||
run: |
|
||||
go mod tidy && git diff --exit-code
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@55c2c1448f86e01eaae002a5a3a9624417608d84 # v6.5.2
|
||||
uses: golangci/golangci-lint-action@4afd733a84b1f43292c63897423277bb7f4313a9 # v8.0.0
|
||||
with:
|
||||
version: latest
|
||||
args: --timeout 3m
|
||||
|
||||
12
.github/workflows/tests.yaml
vendored
12
.github/workflows/tests.yaml
vendored
@@ -32,8 +32,7 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
# os: [macos-latest, windows-latest, ubuntu-latest]
|
||||
os: [ubuntu-latest]
|
||||
os: [macos-latest, windows-latest, ubuntu-latest]
|
||||
fail-fast: false
|
||||
permissions:
|
||||
contents: 'read'
|
||||
@@ -75,7 +74,8 @@ jobs:
|
||||
- name: Build
|
||||
run: go build -v ./...
|
||||
|
||||
- name: Run tests
|
||||
- name: Run tests with coverage
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
run: |
|
||||
source_dir="./internal/sources/*"
|
||||
tool_dir="./internal/tools/*"
|
||||
@@ -85,7 +85,13 @@ jobs:
|
||||
go test -race -cover -coverprofile=coverage.out -v $included_packages
|
||||
go test -race -v ./internal/sources/... ./internal/tools/... ./internal/auth/...
|
||||
|
||||
- name: Run tests without coverage
|
||||
if: ${{ runner.os != 'Linux' }}
|
||||
run: |
|
||||
go test -race -v ./internal/... ./cmd/...
|
||||
|
||||
- name: Check coverage
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
run: |
|
||||
FILE_TO_EXCLUDE="github.com/googleapis/genai-toolbox/internal/server/config.go"
|
||||
ESCAPED_PATH=$(echo "$FILE_TO_EXCLUDE" | sed 's/\//\\\//g; s/\./\\\./g')
|
||||
|
||||
@@ -12,21 +12,26 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
version: "2"
|
||||
linters:
|
||||
enable:
|
||||
- errcheck
|
||||
- goimports
|
||||
- gosimple
|
||||
- govet
|
||||
- ineffassign
|
||||
- staticcheck
|
||||
- unused
|
||||
linters-settings:
|
||||
gofmt:
|
||||
rewrite-rules:
|
||||
- pattern: 'interface{}'
|
||||
replacement: 'any'
|
||||
- pattern: 'a[b:len(a)]'
|
||||
replacement: 'a[b:]'
|
||||
exclusions:
|
||||
presets:
|
||||
- std-error-handling
|
||||
issues:
|
||||
fix: true
|
||||
formatters:
|
||||
enable:
|
||||
- goimports
|
||||
settings:
|
||||
gofmt:
|
||||
rewrite-rules:
|
||||
- pattern: interface{}
|
||||
replacement: any
|
||||
- pattern: a[b:len(a)]
|
||||
replacement: a[b:]
|
||||
|
||||
1
.hugo/assets/scss/_styles_project.scss
Normal file
1
.hugo/assets/scss/_styles_project.scss
Normal file
@@ -0,0 +1 @@
|
||||
@import 'td/code-dark';
|
||||
@@ -17,7 +17,7 @@ enableRobotsTXT = true
|
||||
proxy = "direct"
|
||||
[module.hugoVersion]
|
||||
extended = true
|
||||
min = "0.73.0"
|
||||
min = "0.146.0"
|
||||
[[module.mounts]]
|
||||
source = "../docs/en"
|
||||
target = 'content'
|
||||
@@ -28,6 +28,7 @@ enableRobotsTXT = true
|
||||
path = "github.com/martignoni/hugo-notice"
|
||||
|
||||
[params]
|
||||
description = "MCP Toolbox for Databases is an open source MCP server for databases. It enables you to develop tools easier, faster, and more securely by handling the complexities such as connection pooling, authentication, and more."
|
||||
copyright = "Google LLC"
|
||||
github_repo = "https://github.com/googleapis/genai-toolbox"
|
||||
github_project_repo = "https://github.com/googleapis/genai-toolbox"
|
||||
@@ -47,4 +48,23 @@ enableRobotsTXT = true
|
||||
pre = "<i class='fa-brands fa-github'></i>"
|
||||
|
||||
[markup.goldmark.renderer]
|
||||
unsafe= true
|
||||
unsafe= true
|
||||
|
||||
[markup.highlight]
|
||||
noClasses = false
|
||||
style = "tango"
|
||||
|
||||
[outputFormats]
|
||||
[outputFormats.LLMS]
|
||||
mediaType = "text/plain"
|
||||
baseName = "llms"
|
||||
isPlainText = true
|
||||
root = true
|
||||
[outputFormats.LLMS-FULL]
|
||||
mediaType = "text/plain"
|
||||
baseName = "llms-full"
|
||||
isPlainText = true
|
||||
root = true
|
||||
|
||||
[outputs]
|
||||
home = ["HTML", "RSS", "LLMS", "LLMS-FULL"]
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
{{ template "_default/_markup/td-render-heading.html" . }}
|
||||
14
.hugo/layouts/index.llms-full.txt
Normal file
14
.hugo/layouts/index.llms-full.txt
Normal file
@@ -0,0 +1,14 @@
|
||||
{{ .Site.Params.description }}
|
||||
|
||||
{{ range .Site.Sections }}
|
||||
# {{ .Title }}
|
||||
{{ .Description }}
|
||||
{{ range .Pages }}
|
||||
# {{ .Title }}
|
||||
{{ .Description }}
|
||||
{{ .RawContent }}
|
||||
{{ range .Pages }}
|
||||
# {{ .Title }}
|
||||
{{ .Description }}
|
||||
{{ .RawContent }}
|
||||
{{end }}{{ end }}{{ end }}
|
||||
9
.hugo/layouts/index.llms.txt
Normal file
9
.hugo/layouts/index.llms.txt
Normal file
@@ -0,0 +1,9 @@
|
||||
# {{ .Site.Title }}
|
||||
|
||||
> {{ .Site.Params.description }}
|
||||
|
||||
## Docs
|
||||
{{ range .Site.Sections }}
|
||||
### {{ .Title }}
|
||||
|
||||
{{ .Description }}{{ range .Pages }}- [{{ .Title }}]({{ .Permalink }}): {{ .Description }}{{ range .Pages }} - [{{ .Title }}]({{ .Permalink }}): {{ .Description }}{{end }}{{ end }}{{ end }}
|
||||
1
.hugo/layouts/partials/td/render-heading.html
Normal file
1
.hugo/layouts/partials/td/render-heading.html
Normal file
@@ -0,0 +1 @@
|
||||
{{ template "partials/td/render-heading.html" . }}
|
||||
50
CHANGELOG.md
50
CHANGELOG.md
@@ -1,5 +1,55 @@
|
||||
# Changelog
|
||||
|
||||
## [0.8.0](https://github.com/googleapis/genai-toolbox/compare/v0.7.0...v0.8.0) (2025-07-02)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **postgres,mssql,cloudsqlmssql:** encode source connection url for sources ([#727](https://github.com/googleapis/genai-toolbox/issues/727))
|
||||
|
||||
### Features
|
||||
|
||||
* Add support for multiple YAML configuration files ([#760](https://github.com/googleapis/genai-toolbox/issues/760)) ([40679d7](https://github.com/googleapis/genai-toolbox/commit/40679d700eded50d19569923e2a71c51e907a8bf))
|
||||
* Add support for optional parameters ([#617](https://github.com/googleapis/genai-toolbox/issues/617)) ([4827771](https://github.com/googleapis/genai-toolbox/commit/4827771b78dee9a1284a898b749509b472061527)), closes [#475](https://github.com/googleapis/genai-toolbox/issues/475)
|
||||
* **mcp:** Support MCP version 2025-03-26 ([#755](https://github.com/googleapis/genai-toolbox/issues/755)) ([474df57](https://github.com/googleapis/genai-toolbox/commit/474df57d62de683079f8d12c31db53396a545fd1))
|
||||
* **sources/http:** Support disable SSL verification for HTTP Source ([#674](https://github.com/googleapis/genai-toolbox/issues/674)) ([4055b0c](https://github.com/googleapis/genai-toolbox/commit/4055b0c3569c527560d7ad34262963b3dd4e282d))
|
||||
* **tools/bigquery:** Add templateParameters field for bigquery ([#699](https://github.com/googleapis/genai-toolbox/issues/699)) ([f5f771b](https://github.com/googleapis/genai-toolbox/commit/f5f771b0f3d159630ff602ff55c6c66b61981446))
|
||||
* **tools/bigtable:** Add templateParameters field for bigtable ([#692](https://github.com/googleapis/genai-toolbox/issues/692)) ([1c06771](https://github.com/googleapis/genai-toolbox/commit/1c067715fac06479eb0060d7067b73dba099ed92))
|
||||
* **tools/couchbase:** Add templateParameters field for couchbase ([#723](https://github.com/googleapis/genai-toolbox/issues/723)) ([9197186](https://github.com/googleapis/genai-toolbox/commit/9197186b8bea1ac4ec1b39c9c5c110807c8b2ba9))
|
||||
* **tools/http:** Add support for HTTP Tool pathParams ([#726](https://github.com/googleapis/genai-toolbox/issues/726)) ([fd300dc](https://github.com/googleapis/genai-toolbox/commit/fd300dc606d88bf9f7bba689e2cee4e3565537dd))
|
||||
* **tools/redis:** Add Redis Source and Tool ([#519](https://github.com/googleapis/genai-toolbox/issues/519)) ([f0aef29](https://github.com/googleapis/genai-toolbox/commit/f0aef29b0c2563e2a00277fbe2784f39f16d2835))
|
||||
* **tools/spanner:** Add templateParameters field for spanner ([#691](https://github.com/googleapis/genai-toolbox/issues/691)) ([075dfa4](https://github.com/googleapis/genai-toolbox/commit/075dfa47e1fd92be4847bd0aec63296146b66455))
|
||||
* **tools/sqlitesql:** Add templateParameters field for sqlitesql ([#687](https://github.com/googleapis/genai-toolbox/issues/687)) ([75e254c](https://github.com/googleapis/genai-toolbox/commit/75e254c0a4ce690ca5fa4d1741550ce54734b226))
|
||||
* **tools/valkey:** Add Valkey Source and Tool ([#532](https://github.com/googleapis/genai-toolbox/issues/532)) ([054ec19](https://github.com/googleapis/genai-toolbox/commit/054ec198b97ba9f36f67dd12b2eff0cc6bc4d080))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **bigquery,mssql:** Fix panic on tools with array param ([#722](https://github.com/googleapis/genai-toolbox/issues/722)) ([7a6644c](https://github.com/googleapis/genai-toolbox/commit/7a6644cf0c5413e5c803955c88a2cfd0a2233ed3))
|
||||
* **postgres,mssql,cloudsqlmssql:** Encode source connection url for sources ([#727](https://github.com/googleapis/genai-toolbox/issues/727)) ([67964d9](https://github.com/googleapis/genai-toolbox/commit/67964d939f27320b63b5759f4b3f3fdaa0c76fbf)), closes [#717](https://github.com/googleapis/genai-toolbox/issues/717)
|
||||
* Set default value to field's type during unmarshalling ([#774](https://github.com/googleapis/genai-toolbox/issues/774)) ([fafed24](https://github.com/googleapis/genai-toolbox/commit/fafed2485839cf1acc1350e8a24103d2e6356ee0)), closes [#771](https://github.com/googleapis/genai-toolbox/issues/771)
|
||||
* **server/mcp:** Do not listen from port for stdio ([#719](https://github.com/googleapis/genai-toolbox/issues/719)) ([d51dbc7](https://github.com/googleapis/genai-toolbox/commit/d51dbc759ba493021d3ec6f5417fc04c21f7044f)), closes [#711](https://github.com/googleapis/genai-toolbox/issues/711)
|
||||
* **tools/mysqlexecutesql:** Handle nil panic and connection leak in Invoke ([#757](https://github.com/googleapis/genai-toolbox/issues/757)) ([7badba4](https://github.com/googleapis/genai-toolbox/commit/7badba42eefb34252be77b852a57d6bd78dd267d))
|
||||
* **tools/mysqlsql:** Handle nil panic and connection leak in invoke ([#758](https://github.com/googleapis/genai-toolbox/issues/758)) ([cbb4a33](https://github.com/googleapis/genai-toolbox/commit/cbb4a333517313744800d148840312e56340f3fd))
|
||||
|
||||
## [0.7.0](https://github.com/googleapis/genai-toolbox/compare/v0.6.0...v0.7.0) (2025-06-10)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add templateParameters field for mssqlsql ([#671](https://github.com/googleapis/genai-toolbox/issues/671)) ([b81fc6a](https://github.com/googleapis/genai-toolbox/commit/b81fc6aa6ccdfbc15676fee4d87041d9ad9682fa))
|
||||
* Add templateParameters field for mysqlsql ([#663](https://github.com/googleapis/genai-toolbox/issues/663)) ([0a08d2c](https://github.com/googleapis/genai-toolbox/commit/0a08d2c15dcbec18bb556f4dc49792ba0c69db46))
|
||||
* **metrics:** Add user agent for prebuilt tools ([#669](https://github.com/googleapis/genai-toolbox/issues/669)) ([29aa0a7](https://github.com/googleapis/genai-toolbox/commit/29aa0a70da3c2eb409a38993b3782da8bec7cb85))
|
||||
* **tools/postgressql:** Add templateParameters field ([#615](https://github.com/googleapis/genai-toolbox/issues/615)) ([b763469](https://github.com/googleapis/genai-toolbox/commit/b76346993f298b4f7493a51405d0a287bacce05f))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Improve versionString ([#658](https://github.com/googleapis/genai-toolbox/issues/658)) ([cf96f4c](https://github.com/googleapis/genai-toolbox/commit/cf96f4c249f0692e3eb19fc56c794ca6a3079307))
|
||||
* **server/stdio:** Notifications should not return a response ([#638](https://github.com/googleapis/genai-toolbox/issues/638)) ([69d047a](https://github.com/googleapis/genai-toolbox/commit/69d047af46f1ec00f236db8a978a7a7627217fd2))
|
||||
* **tools/mysqlsql:** Handled the null value for string case in mysqlsql tools ([#641](https://github.com/googleapis/genai-toolbox/issues/641)) ([ef94648](https://github.com/googleapis/genai-toolbox/commit/ef94648455c3b20adda4f8cff47e70ddccac8c06))
|
||||
* Update path library ([#678](https://github.com/googleapis/genai-toolbox/issues/678)) ([4998f82](https://github.com/googleapis/genai-toolbox/commit/4998f8285287b5daddd0043540f2cf871e256db5)), closes [#662](https://github.com/googleapis/genai-toolbox/issues/662)
|
||||
|
||||
## [0.6.0](https://github.com/googleapis/genai-toolbox/compare/v0.5.0...v0.6.0) (2025-05-28)
|
||||
|
||||
|
||||
|
||||
@@ -14,21 +14,21 @@ race, religion, or sexual identity and orientation.
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
@@ -75,7 +75,7 @@ receive and address reported violations of the code of conduct. They will then
|
||||
work with a committee consisting of representatives from the Open Source
|
||||
Programs Office and the Google Open Source Strategy team. If for any reason you
|
||||
are uncomfortable reaching out to the Project Steward, please email
|
||||
opensource@google.com.
|
||||
<opensource@google.com>.
|
||||
|
||||
We will investigate every complaint, but you may not receive a direct response.
|
||||
We will use our discretion in determining when and how to follow up on reported
|
||||
@@ -90,4 +90,4 @@ harassment or threats to anyone's safety, we may take action without notice.
|
||||
|
||||
This Code of Conduct is adapted from the Contributor Covenant, version 1.4,
|
||||
available at
|
||||
https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||
<https://www.contributor-covenant.org/version/1/4/code-of-conduct.html>
|
||||
|
||||
151
CONTRIBUTING.md
151
CONTRIBUTING.md
@@ -30,4 +30,153 @@ This project follows
|
||||
All submissions, including submissions by project members, require review. We
|
||||
use GitHub pull requests for this purpose. Consult
|
||||
[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
|
||||
information on using pull requests.
|
||||
information on using pull requests.
|
||||
|
||||
Within 2-5 days, a reviewer will review your PR. They may approve it, or request
|
||||
changes. When requesting changes, reviewers should self-assign the PR to ensure
|
||||
they are aware of any updates.
|
||||
If additional changes are needed, push additional commits to your PR branch -
|
||||
this helps the reviewer know which parts of the PR have changed. Commits will be
|
||||
squashed when merged.
|
||||
Please follow up with changes promptly. If a PR is awaiting changes by the
|
||||
author for more than 10 days, maintainers may mark that PR as Draft. PRs that
|
||||
are inactive for more than 30 days may be closed.
|
||||
|
||||
### Adding a New Database Source and Tool
|
||||
|
||||
We recommend creating an
|
||||
[issue](https://github.com/googleapis/genai-toolbox/issues) before
|
||||
implementation to ensure we can accept the contribution and no duplicated work.
|
||||
If you have any questions, reach out on our
|
||||
[Discord](https://discord.gg/Dmm69peqjh) to chat directly with the team. New
|
||||
contributions should be added with both unit tests and integration tests.
|
||||
|
||||
#### 1. Implement the New Data Source
|
||||
|
||||
We recommend looking at an [example source
|
||||
implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/sources/postgres/postgres.go).
|
||||
|
||||
* **Create a new directory** under `internal/sources` for your database type
|
||||
(e.g., `internal/sources/newdb`).
|
||||
* **Define a configuration struct** for your data source in a file named
|
||||
`newdb.go`. Create a `Config` struct to include all the necessary parameters
|
||||
for connecting to the database (e.g., host, port, username, password, database
|
||||
name) and a `Source` struct to store necessary parameters for tools (e.g.,
|
||||
Name, Kind, connection object, additional config).
|
||||
* **Implement the
|
||||
[`SourceConfig`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/sources/sources.go#L57)
|
||||
interface**. This interface requires two methods:
|
||||
* `SourceConfigKind() string`: Returns a unique string identifier for your
|
||||
data source (e.g., `"newdb"`).
|
||||
* `Initialize(ctx context.Context, tracer trace.Tracer) (Source, error)`:
|
||||
Creates a new instance of your data source and establishes a connection to
|
||||
the database.
|
||||
* **Implement the
|
||||
[`Source`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/sources/sources.go#L63)
|
||||
interface**. This interface requires one method:
|
||||
* `SourceKind() string`: Returns the same string identifier as `SourceConfigKind()`.
|
||||
* **Implement `init()`** to register the new Source.
|
||||
* **Implement Unit Tests** in a file named `newdb_test.go`.
|
||||
|
||||
#### 2. Implement the New Tool
|
||||
|
||||
We recommend looking at an [example tool
|
||||
implementation](https://github.com/googleapis/genai-toolbox/tree/main/internal/tools/postgressql).
|
||||
|
||||
* **Create a new directory** under `internal/tools` for your tool type (e.g.,
|
||||
`internal/tools/newdb` or `internal/tools/newdb<tool_name>`).
|
||||
* **Define a configuration struct** for your tool in a file named `newdbtool.go`.
|
||||
Create a `Config` struct and a `Tool` struct to store necessary parameters for
|
||||
tools.
|
||||
* **Implement the
|
||||
[`ToolConfig`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/tools/tools.go#L61)
|
||||
interface**. This interface requires one method:
|
||||
* `ToolConfigKind() string`: Returns a unique string identifier for your tool
|
||||
(e.g., `"newdb"`).
|
||||
* `Initialize(sources map[string]Source) (Tool, error)`: Creates a new
|
||||
instance of your tool and validates that it can connect to the specified
|
||||
data source.
|
||||
* **Implement the `Tool` interface**. This interface requires the following
|
||||
methods:
|
||||
* `Invoke(ctx context.Context, params map[string]any) ([]any, error)`:
|
||||
Executes the operation on the database using the provided parameters.
|
||||
* `ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
(ParamValues, error)`: Parses and validates the input parameters.
|
||||
* `Manifest() Manifest`: Returns a manifest describing the tool's capabilities
|
||||
and parameters.
|
||||
* `McpManifest() McpManifest`: Returns an MCP manifest describing the tool for
|
||||
use with the Model Context Protocol.
|
||||
* `Authorized(services []string) bool`: Checks if the tool is authorized to
|
||||
run based on the provided authentication services.
|
||||
* **Implement `init()`** to register the new Tool.
|
||||
* **Implement Unit Tests** in a file named `newdb_test.go`.
|
||||
|
||||
#### 3. Add Integration Tests
|
||||
|
||||
* **Add a test file** under a new directory `tests/newdb`.
|
||||
* **Add pre-defined integration test suites** in the
|
||||
`/tests/newdb/newdb_test.go` that are **required** to be run as long as your
|
||||
code contains related features:
|
||||
|
||||
1. [RunToolGetTest][tool-get]: tests for the `GET` endpoint that returns the
|
||||
tool's manifest.
|
||||
|
||||
2. [RunToolInvokeTest][tool-call]: tests for tool calling through the native
|
||||
Toolbox endpoints.
|
||||
|
||||
3. [RunMCPToolCallMethod][mcp-call]: tests tool calling through the MCP
|
||||
endpoints.
|
||||
|
||||
4. (Optional) [RunExecuteSqlToolInvokeTest][execute-sql]: tests an
|
||||
`execute-sql` tool for any source. Only run this test if you are adding an
|
||||
`execute-sql` tool.
|
||||
|
||||
5. (Optional) [RunToolInvokeWithTemplateParameters][temp-param]: tests for [template
|
||||
parameters][temp-param-doc]. Only run this test if template
|
||||
parameters apply to your tool.
|
||||
|
||||
* **Add the new database to the test config** in
|
||||
[integration.cloudbuild.yaml](.ci/integration.cloudbuild.yaml).
|
||||
|
||||
[tool-get]:
|
||||
https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L31
|
||||
[tool-call]:
|
||||
<https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L79>
|
||||
[mcp-call]:
|
||||
https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L554
|
||||
[execute-sql]:
|
||||
<https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L431>
|
||||
[temp-param]:
|
||||
<https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L297>
|
||||
[temp-param-doc]:
|
||||
https://googleapis.github.io/genai-toolbox/resources/tools/#template-parameters
|
||||
|
||||
#### 4. Add Documentation
|
||||
|
||||
* **Update the documentation** to include information about your new data source
|
||||
and tool. This includes:
|
||||
* Adding a new page to the `docs/en/resources/sources` directory for your data
|
||||
source.
|
||||
* Adding a new page to the `docs/en/resources/tools` directory for your tool.
|
||||
|
||||
* **(Optional) Add samples** to the `docs/en/samples/<newdb>` directory.
|
||||
|
||||
#### (Optional) 5. Add Prebuilt Tools
|
||||
|
||||
You can provide developers with a set of "build-time" tools to aid common
|
||||
software development user journeys like viewing and creating tables/collections
|
||||
and data.
|
||||
|
||||
* **Create a set of prebuilt tools** by defining a new `tools.yaml` and adding
|
||||
it to `internal/tools`. Make sure the file name matches the source (i.e. for
|
||||
source "alloydb-postgres" create a file named "alloydb-postgres.yaml").
|
||||
* **Update `cmd/root.go`** to add new source to the `prebuilt` flag.
|
||||
* **Add tests** in
|
||||
[internal/prebuiltconfigs/prebuiltconfigs_test.go](internal/prebuiltconfigs/prebuiltconfigs_test.go)
|
||||
and [cmd/root_test.go](cmd/root_test.go).
|
||||
|
||||
#### 6. Submit a Pull Request
|
||||
|
||||
* **Submit a pull request** to the repository with your changes. Be sure to
|
||||
include a detailed description of your changes and any requests for long term
|
||||
testing resources.
|
||||
|
||||
405
DEVELOPER.md
405
DEVELOPER.md
@@ -1,12 +1,16 @@
|
||||
# DEVELOPER.md
|
||||
|
||||
## Before you begin
|
||||
This document provides instructions for setting up your development environment
|
||||
and contributing to the Toolbox project.
|
||||
|
||||
1. Make sure you've setup your databases.
|
||||
## Prerequisites
|
||||
|
||||
1. Install the latest version of [Go](https://go.dev/doc/install).
|
||||
Before you begin, ensure you have the following:
|
||||
|
||||
1. Locate and download dependencies:
|
||||
1. **Databases:** Set up the necessary databases for your development
|
||||
environment.
|
||||
1. **Go:** Install the latest version of [Go](https://go.dev/doc/install).
|
||||
1. **Dependencies:** Download and manage project dependencies:
|
||||
|
||||
```bash
|
||||
go get
|
||||
@@ -15,194 +19,343 @@
|
||||
|
||||
## Developing Toolbox
|
||||
|
||||
### Run Toolbox from local source
|
||||
### Running from Local Source
|
||||
|
||||
1. Create a `tools.yaml` file with your [sources and tools configurations](./README.md#Configuration).
|
||||
|
||||
1. You can specify flags for the Toolbox server. Execute the following to list the possible CLI flags:
|
||||
1. **Configuration:** Create a `tools.yaml` file to configure your sources and
|
||||
tools. See the [Configuration section in the
|
||||
README](./README.md#Configuration) for details.
|
||||
1. **CLI Flags:** List available command-line flags for the Toolbox server:
|
||||
|
||||
```bash
|
||||
go run . --help
|
||||
```
|
||||
|
||||
1. To run the server, execute the following (with any flags, if applicable):
|
||||
1. **Running the Server:** Start the Toolbox server with optional flags. The
|
||||
server listens on port 5000 by default.
|
||||
|
||||
```bash
|
||||
go run .
|
||||
```
|
||||
|
||||
The server will listen on port 5000 (by default).
|
||||
|
||||
1. Test endpoint using the following:
|
||||
1. **Testing the Endpoint:** Verify the server is running by sending a request
|
||||
to the endpoint:
|
||||
|
||||
```bash
|
||||
curl http://127.0.0.1:5000
|
||||
```
|
||||
|
||||
### Testing
|
||||
## Testing
|
||||
|
||||
- Run the lint check:
|
||||
### Infrastructure
|
||||
|
||||
```bash
|
||||
golangci-lint run --fix
|
||||
Toolbox uses both GitHub Actions and Cloud Build to run test workflows. Cloud
|
||||
Build is used when Google credentials are required. Cloud Build uses test
|
||||
project "toolbox-testing-438616".
|
||||
|
||||
### Linting
|
||||
|
||||
Run the lint check to ensure code quality:
|
||||
|
||||
```bash
|
||||
golangci-lint run --fix
|
||||
```
|
||||
|
||||
### Unit Tests
|
||||
|
||||
Execute unit tests locally:
|
||||
|
||||
```bash
|
||||
go test -race -v ./...
|
||||
```
|
||||
|
||||
### Integration Tests
|
||||
|
||||
#### Running Locally
|
||||
|
||||
1. **Environment Variables:** Set the required environment variables. Refer to
|
||||
the [Cloud Build testing configuration](./.ci/integration.cloudbuild.yaml)
|
||||
for a complete list of variables for each source.
|
||||
* `SERVICE_ACCOUNT_EMAIL`: Use your own GCP email.
|
||||
* `CLIENT_ID`: Use the Google Cloud SDK application Client ID. Contact
|
||||
Toolbox maintainers if you don't have it.
|
||||
1. **Running Tests:** Run the integration test for your target source. Specify
|
||||
the required Go build tags at the top of each integration test file.
|
||||
|
||||
```shell
|
||||
go test -race -v ./tests/<YOUR_TEST_DIR>
|
||||
```
|
||||
|
||||
- Run unit tests locally:
|
||||
For example, to run the AlloyDB integration test:
|
||||
|
||||
```bash
|
||||
go test -race -v ./...
|
||||
```shell
|
||||
go test -race -v ./tests/alloydbpg
|
||||
```
|
||||
|
||||
- Run integration tests locally:
|
||||
1. Set required environment variables. For a complete lists of required
|
||||
vairables for each source, check out the [Cloud Build testing
|
||||
configuration](./.ci/integration.cloudbuild.yaml).
|
||||
- Use your own GCP email as the `SERVICE_ACCOUNT_EMAIL`.
|
||||
- Use the Google Cloud SDK application Client ID as the `CLIENT_ID`. Ask the
|
||||
Toolbox maintainers if you don't know it already.
|
||||
#### Running on Pull Requests
|
||||
|
||||
2. Run the integration test for your target source with the required Go
|
||||
build tags specified at the top of each integration test file:
|
||||
* **Internal Contributors:** Testing workflows should trigger automatically.
|
||||
* **External Contributors:** Request Toolbox maintainers to trigger the testing
|
||||
workflows on your PR.
|
||||
|
||||
```shell
|
||||
go test -race -v ./tests/<YOUR_TEST_DIR>
|
||||
```
|
||||
#### Test Resources
|
||||
|
||||
For example, to run the AlloyDB integration test, run:
|
||||
The following databases have been added as test resources. To add a new database
|
||||
to test against, please contact the Toolbox maintainer team via an issue or PR.
|
||||
Refer to the [Cloud Build testing
|
||||
configuration](./.ci/integration.cloudbuild.yaml) for a complete list of
|
||||
variables for each source.
|
||||
|
||||
```shell
|
||||
go test -race -v ./tests/alloydbpg
|
||||
```
|
||||
* AlloyDB - setup in the test project
|
||||
* AI Natural Language ([setup
|
||||
instructions](https://cloud.google.com/alloydb/docs/ai/use-natural-language-generate-sql-queries))
|
||||
has been configured for `alloydb-a`-nl` tool tests
|
||||
* The Cloud Build service account is a user
|
||||
* Bigtable - setup in the test project
|
||||
* The Cloud Build service account is a user
|
||||
* BigQuery - setup in the test project
|
||||
* The Cloud Build service account is a user
|
||||
* Cloud SQL Postgres - setup in the test project
|
||||
* The Cloud Build service account is a user
|
||||
* Cloud SQL MySQL - setup in the test project
|
||||
* The Cloud Build service account is a user
|
||||
* Cloud SQL SQL Server - setup in the test project
|
||||
* The Cloud Build service account is a user
|
||||
* Couchbase - setup in the test project via the Marketplace
|
||||
* DGraph - using the public dgraph interface <https://play.dgraph.io> for
|
||||
testing
|
||||
* Memorystore Redis - setup in the test project using a Memorystore for Redis
|
||||
standalone instance
|
||||
* Memorystore Redis Cluster, Memorystore Valkey standalone, and Memorystore
|
||||
Valkey Cluster instances all require PSC connections, which requires extra
|
||||
security setup to connect from Cloud Build. Memorystore Redis standalone is
|
||||
the only one allowing PSA connection.
|
||||
* The Cloud Build service account is a user
|
||||
* Memorystore Valkey - setup in the test project using a Memorystore for Redis
|
||||
standalone instance
|
||||
* The Cloud Build service account is a user
|
||||
* MySQL - setup in the test project using a Cloud SQL instance
|
||||
* Neo4j - setup in the test project on a GCE VM
|
||||
* Postgres - setup in the test project using an AlloyDB instance
|
||||
* Spanner - setup in the test project
|
||||
* The Cloud Build service account is a user
|
||||
* SQL Server - setup in the test project using a Cloud SQL instance
|
||||
* SQLite - setup in the integration test, where we create a temporary database
|
||||
file
|
||||
|
||||
- Run integration tests on your PR:
|
||||
### Other GitHub Checks
|
||||
|
||||
For internal contributors, the testing workflows should trigger
|
||||
automatically. For external contributors, ask the Toolbox
|
||||
maintainers to trigger the testing workflows on your PR.
|
||||
|
||||
## Compile the app locally
|
||||
|
||||
### Compile Toolbox binary
|
||||
|
||||
1. Run build to compile binary:
|
||||
|
||||
```bash
|
||||
go build -o toolbox
|
||||
```
|
||||
|
||||
1. You can specify flags for the Toolbox server. Execute the following to list the possible CLI flags:
|
||||
|
||||
```bash
|
||||
./toolbox --help
|
||||
```
|
||||
|
||||
1. To run the binary, execute the following (with any flags, if applicable):
|
||||
|
||||
```bash
|
||||
./toolbox
|
||||
```
|
||||
|
||||
The server will listen on port 5000 (by default).
|
||||
|
||||
1. Test endpoint using the following:
|
||||
|
||||
```bash
|
||||
curl http://127.0.0.1:5000
|
||||
```
|
||||
|
||||
### Compile Toolbox container images
|
||||
|
||||
1. Run build to compile container image:
|
||||
|
||||
```bash
|
||||
docker build -t toolbox:dev .
|
||||
```
|
||||
|
||||
1. Execute the following to view image:
|
||||
|
||||
```bash
|
||||
docker images
|
||||
```
|
||||
|
||||
1. Run container image with Docker:
|
||||
|
||||
```bash
|
||||
docker run -d toolbox:dev
|
||||
```
|
||||
* License header check (`.github/header-checker-lint.yml`) - Ensures files have
|
||||
the appropriate license
|
||||
* CLA/google - Ensures the developer has signed the CLA:
|
||||
<https://cla.developers.google.com/>
|
||||
* conventionalcommits.org - Ensures the commit messages are in the correct
|
||||
format. This repository uses tool [Release
|
||||
Please](https://github.com/googleapis/release-please) to create GitHub
|
||||
releases. It does so by parsing your git history, looking for [Conventional
|
||||
Commit messages](https://www.conventionalcommits.org/), and creating release
|
||||
PRs. Learn more by reading [How should I write my
|
||||
commits?](https://github.com/googleapis/release-please?tab=readme-ov-file#how-should-i-write-my-commits)
|
||||
|
||||
## Developing Documentation
|
||||
|
||||
1. [Install Hugo](https://gohugo.io/installation/macos/)
|
||||
1. Move into the `.hugo` directory
|
||||
### Running a Local Hugo Server
|
||||
|
||||
Follow these steps to preview documentation changes locally using a Hugo server:
|
||||
|
||||
1. **Install Hugo:** Ensure you have
|
||||
[Hugo](https://gohugo.io/installation/macos/) extended edition version
|
||||
0.146.0 or later installed.
|
||||
1. **Navigate to the Hugo Directory:**
|
||||
|
||||
```bash
|
||||
cd .hugo
|
||||
```
|
||||
|
||||
1. Install dependencies
|
||||
1. **Install Dependencies:**
|
||||
|
||||
```bash
|
||||
npm ci
|
||||
```
|
||||
|
||||
1. Run the server
|
||||
1. **Start the Server:**
|
||||
|
||||
```bash
|
||||
hugo server
|
||||
```
|
||||
|
||||
### Previewing Documentation on Pull Requests
|
||||
|
||||
#### Contributors
|
||||
|
||||
Request a repo owner to run the preview deployment workflow on your PR. A
|
||||
preview link will be automatically added as a comment to your PR.
|
||||
|
||||
#### Maintainers
|
||||
|
||||
1. **Inspect Changes:** Review the proposed changes in the PR to ensure they are
|
||||
safe and do not contain malicious code. Pay close attention to changes in the
|
||||
`.github/workflows/` directory.
|
||||
1. **Deploy Preview:** Apply the `docs: deploy-preview` label to the PR to
|
||||
deploy a documentation preview.
|
||||
|
||||
## Building Toolbox
|
||||
|
||||
### Building the Binary
|
||||
|
||||
1. **Build Command:** Compile the Toolbox binary:
|
||||
|
||||
```bash
|
||||
go build -o toolbox
|
||||
```
|
||||
|
||||
1. **Running the Binary:** Execute the compiled binary with optional flags. The
|
||||
server listens on port 5000 by default:
|
||||
|
||||
```bash
|
||||
./toolbox
|
||||
```
|
||||
|
||||
1. **Testing the Endpoint:** Verify the server is running by sending a request
|
||||
to the endpoint:
|
||||
|
||||
```bash
|
||||
curl http://127.0.0.1:5000
|
||||
```
|
||||
|
||||
### Building Container Images
|
||||
|
||||
1. **Build Command:** Build the Toolbox container image:
|
||||
|
||||
```bash
|
||||
docker build -t toolbox:dev .
|
||||
```
|
||||
|
||||
1. **View Image:** List available Docker images to confirm the build:
|
||||
|
||||
```bash
|
||||
docker images
|
||||
```
|
||||
|
||||
1. **Run Container:** Run the Toolbox container image using Docker:
|
||||
|
||||
```bash
|
||||
docker run -d toolbox:dev
|
||||
```
|
||||
|
||||
## Developing Toolbox SDKs
|
||||
|
||||
Please refer to the [SDK developer guide](https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/DEVELOPER.md)
|
||||
Refer to the [SDK developer
|
||||
guide](https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/DEVELOPER.md)
|
||||
for instructions on developing Toolbox SDKs.
|
||||
|
||||
## (Optional) Maintainer Information
|
||||
## Maintainer Information
|
||||
|
||||
### Team
|
||||
|
||||
Team, `@googleapis/senseai-eco`, has been set as
|
||||
[CODEOWNERS](.github/CODEOWNERS). The GitHub TeamSync tool is used to create
|
||||
this team from MDB Group, `senseai-eco`.
|
||||
|
||||
### Releasing
|
||||
|
||||
There are two types of release for Toolbox, including a versioned release and continuous release.
|
||||
Toolbox has two types of releases: versioned and continuous. It uses Google
|
||||
Cloud project, `database-toolbox`.
|
||||
|
||||
- Versioned release: Official supported distributions with the `latest` tag. The release process for versioned release is in [versioned.release.cloudbuild.yaml](https://github.com/googleapis/genai-toolbox/blob/main/versioned.release.cloudbuild.yaml).
|
||||
- Continuous release: Used for early testing features between official supported releases and end-to-end testings.
|
||||
* **Versioned Release:** Official, supported distributions tagged as `latest`.
|
||||
The release process is defined in
|
||||
[versioned.release.cloudbuild.yaml](.ci/versioned.release.cloudbuild.yaml).
|
||||
* **Continuous Release:** Used for early testing of features between official
|
||||
releases and for end-to-end testing. The release process is defined in
|
||||
[continuous.release.cloudbuild.yaml](.ci/continuous.release.cloudbuild.yaml).
|
||||
* **GitHub Release:** `.github/release-please.yml` automatically creates GitHub
|
||||
Releases and release PRs.
|
||||
|
||||
#### Supported OS and Architecture binaries
|
||||
### How-to Release a new Version
|
||||
|
||||
The following OS and computer architecture is supported within the binary releases.
|
||||
1. [Optional] If you want to override the version number, send a
|
||||
[PR](https://github.com/googleapis/genai-toolbox/pull/31) to trigger
|
||||
[release-please](https://github.com/googleapis/release-please?tab=readme-ov-file#how-do-i-change-the-version-number).
|
||||
You can generate a commit with the following line: `git commit -m "chore:
|
||||
release 0.1.0" -m "Release-As: 0.1.0" --allow-empty`
|
||||
1. [Optional] If you want to edit the changelog, send commits to the release PR
|
||||
1. Approve and merge the PR with the title “[chore(main): release
|
||||
x.x.x](https://github.com/googleapis/genai-toolbox/pull/16)”
|
||||
1. The
|
||||
[trigger](https://pantheon.corp.google.com/cloud-build/triggers;region=us-central1/edit/27bd0d21-264a-4446-b2d7-0df4e9915fb3?e=13802955&inv=1&invt=AbhU8A&mods=logs_tg_staging&project=database-toolbox)
|
||||
should automatically run when a new tag is pushed. You can view [triggered
|
||||
builds here to check the
|
||||
status](https://pantheon.corp.google.com/cloud-build/builds;region=us-central1?query=trigger_id%3D%2227bd0d21-264a-4446-b2d7-0df4e9915fb3%22&e=13802955&inv=1&invt=AbhU8A&mods=logs_tg_staging&project=database-toolbox)
|
||||
1. Update the Github release notes to include the following table:
|
||||
1. Run the following command (from the root directory):
|
||||
|
||||
- linux/amd64
|
||||
- darwin/arm64
|
||||
- darwin/amd64
|
||||
- windows/amd64
|
||||
```
|
||||
export VERSION="v0.0.0"
|
||||
.ci/generate_release_table.sh
|
||||
```
|
||||
|
||||
#### Supported container images
|
||||
1. Copy the table output
|
||||
1. In the GitHub UI, navigate to Releases and click the `edit` button.
|
||||
1. Paste the table at the bottom of release note and click `Update release`.
|
||||
1. Post release in internal chat and on Discord.
|
||||
|
||||
The following base container images is supported within the container image releases.
|
||||
#### Supported Binaries
|
||||
|
||||
- distroless
|
||||
The following operating systems and architectures are supported for binary
|
||||
releases:
|
||||
|
||||
### Automated tests
|
||||
* linux/amd64
|
||||
* darwin/arm64
|
||||
* darwin/amd64
|
||||
* windows/amd64
|
||||
|
||||
Integration and unit tests are automatically triggered via CloudBuild during each PR creation.
|
||||
#### Supported Container Images
|
||||
|
||||
The following base container images are supported for container image releases:
|
||||
|
||||
* distroless
|
||||
|
||||
### Automated Tests
|
||||
|
||||
Integration and unit tests are automatically triggered via Cloud Build on each
|
||||
pull request. Integration tests run on merge and nightly.
|
||||
|
||||
#### Failure notifications
|
||||
|
||||
On-merge and nightly tests that fail have notification setup via Cloud Build
|
||||
Failure Reporter [GitHub Actions
|
||||
Workflow](.github/workflows/schedule_reporter.yml).
|
||||
|
||||
#### Trigger Setup
|
||||
|
||||
Create a Cloud Build trigger via the UI or `gcloud` with the following specs:
|
||||
Configure a Cloud Build trigger using the UI or `gcloud` with the following
|
||||
settings:
|
||||
|
||||
- Event: Pull request
|
||||
- Region:
|
||||
- global - for default worker pools
|
||||
- Source:
|
||||
- Generation: 1st gen
|
||||
- Repo: googleapis/genai-toolbox (GitHub App)
|
||||
- Base branch: `^main$`
|
||||
- Comment control: Required except for owners and collaborators
|
||||
- Filters: add directory filter
|
||||
- Config: Cloud Build configuration file
|
||||
- Location: Repository (add path to file)
|
||||
- Service account: set for demo service to enable ID token creation to use to authenticated services
|
||||
* **Event:** Pull request
|
||||
* **Region:** global (for default worker pools)
|
||||
* **Source:**
|
||||
* Generation: 1st gen
|
||||
* Repo: googleapis/genai-toolbox (GitHub App)
|
||||
* Base branch: `^main$`
|
||||
* **Comment control:** Required except for owners and collaborators
|
||||
* **Filters:** Add directory filter
|
||||
* **Config:** Cloud Build configuration file
|
||||
* Location: Repository (add path to file)
|
||||
* **Service account:** Set for demo service to enable ID token creation for
|
||||
authenticated services
|
||||
|
||||
### Trigger
|
||||
### Triggering Tests
|
||||
|
||||
Trigger the PR tests on PRs from external contributors:
|
||||
Trigger pull request tests for external contributors by:
|
||||
|
||||
- Cloud Build tests: comment `/gcbrun`
|
||||
- Unit tests: add `tests:run` label
|
||||
* **Cloud Build tests:** Comment `/gcbrun`
|
||||
* **Unit tests:** Add the `tests:run` label
|
||||
|
||||
## Repo Setup & Automation
|
||||
|
||||
* .github/blunderbuss.yml - Auto-assign issues and PRs from GitHub teams
|
||||
* .github/renovate.json5 - Tooling for dependency updates. Dependabot is built
|
||||
into the GitHub repo for GitHub security warnings
|
||||
* go/github-issue-mirror - GitHub issues are automatically mirrored into buganizer
|
||||
* (Suspended) .github/sync-repo-settings.yaml - configure repo settings
|
||||
* .github/release-please.yml - Creates GitHub releases
|
||||
* .github/ISSUE_TEMPLATE - templates for GitHub issues
|
||||
|
||||
@@ -13,18 +13,19 @@
|
||||
# limitations under the License.
|
||||
|
||||
# Use the latest stable golang 1.x to compile to a binary
|
||||
FROM --platform=$BUILDPLATFORM golang:1 as build
|
||||
FROM --platform=$BUILDPLATFORM golang:1 AS build
|
||||
|
||||
WORKDIR /go/src/genai-toolbox
|
||||
COPY . .
|
||||
|
||||
ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
ARG METADATA_TAGS=dev
|
||||
ARG BUILD_TYPE="container.dev"
|
||||
ARG COMMIT_SHA=""
|
||||
|
||||
RUN go get ./...
|
||||
RUN CGO_ENABLED=0 GOOS=${TARGETOS} GOARCH=${TARGETARCH} \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.metadataString=container.${METADATA_TAGS}"
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=container.${BUILD_TYPE} -X github.com/googleapis/genai-toolbox/cmd.commitSha=${COMMIT_SHA}"
|
||||
|
||||
# Final Stage
|
||||
FROM gcr.io/distroless/static:nonroot
|
||||
|
||||
235
README.md
235
README.md
@@ -3,10 +3,10 @@
|
||||
# MCP Toolbox for Databases
|
||||
|
||||
[](https://discord.gg/Dmm69peqjh)
|
||||
[](https://goreportcard.com/report/github.com/googleapis/genai-toolbox)
|
||||
[](https://goreportcard.com/report/github.com/googleapis/genai-toolbox)
|
||||
|
||||
> [!NOTE]
|
||||
> MCP Toolbox for Databases is currently in beta, and may see breaking
|
||||
> [!NOTE]
|
||||
> MCP Toolbox for Databases is currently in beta, and may see breaking
|
||||
> changes until the first stable release (v1.0).
|
||||
|
||||
MCP Toolbox for Databases is an open source MCP server for databases. It enables
|
||||
@@ -16,11 +16,10 @@ such as connection pooling, authentication, and more.
|
||||
This README provides a brief overview. For comprehensive details, see the [full
|
||||
documentation](https://googleapis.github.io/genai-toolbox/).
|
||||
|
||||
|
||||
> [!NOTE]
|
||||
> [!NOTE]
|
||||
> This solution was originally named “Gen AI Toolbox for Databases” as
|
||||
> its initial development predated MCP, but was renamed to align with recently
|
||||
> added MCP compatibility.
|
||||
> added MCP compatibility.
|
||||
|
||||
<!-- TOC ignore:true -->
|
||||
## Table of Contents
|
||||
@@ -30,23 +29,23 @@ documentation](https://googleapis.github.io/genai-toolbox/).
|
||||
- [Why Toolbox?](#why-toolbox)
|
||||
- [General Architecture](#general-architecture)
|
||||
- [Getting Started](#getting-started)
|
||||
- [Installing the server](#installing-the-server)
|
||||
- [Running the server](#running-the-server)
|
||||
- [Integrating your application](#integrating-your-application)
|
||||
- [Installing the server](#installing-the-server)
|
||||
- [Running the server](#running-the-server)
|
||||
- [Integrating your application](#integrating-your-application)
|
||||
- [Configuration](#configuration)
|
||||
- [Sources](#sources)
|
||||
- [Tools](#tools)
|
||||
- [Toolsets](#toolsets)
|
||||
- [Sources](#sources)
|
||||
- [Tools](#tools)
|
||||
- [Toolsets](#toolsets)
|
||||
- [Versioning](#versioning)
|
||||
- [Contributing](#contributing)
|
||||
|
||||
<!-- /TOC -->
|
||||
|
||||
|
||||
## Why Toolbox?
|
||||
## Why Toolbox?
|
||||
|
||||
Toolbox helps you build Gen AI tools that let your agents access data in your
|
||||
database. Toolbox provides:
|
||||
|
||||
- **Simplified development**: Integrate tools to your agent in less than 10
|
||||
lines of code, reuse tools between multiple agents or frameworks, and deploy
|
||||
new versions of tools more easily.
|
||||
@@ -56,6 +55,33 @@ database. Toolbox provides:
|
||||
- **End-to-end observability**: Out of the box metrics and tracing with built-in
|
||||
support for OpenTelemetry.
|
||||
|
||||
**⚡ Supercharge Your Workflow with an AI Database Assistant ⚡**
|
||||
|
||||
Stop context-switching and let your AI assistant become a true co-developer. By
|
||||
[connecting your IDE to your databases with MCP Toolbox][connect-ide], you can
|
||||
delegate complex and time-consuming database tasks, allowing you to build faster
|
||||
and focus on what matters. This isn't just about code completion; it's about
|
||||
giving your AI the context it needs to handle the entire development lifecycle.
|
||||
|
||||
Here’s how it will save you time:
|
||||
|
||||
- **Query in Plain English**: Interact with your data using natural language
|
||||
right from your IDE. Ask complex questions like, *"How many orders were
|
||||
delivered in 2024, and what items were in them?"* without writing any SQL.
|
||||
- **Automate Database Management**: Simply describe your data needs, and let the
|
||||
AI assistant manage your database for you. It can handle generating queries,
|
||||
creating tables, adding indexes, and more.
|
||||
- **Generate Context-Aware Code**: Empower your AI assistant to generate
|
||||
application code and tests with a deep understanding of your real-time
|
||||
database schema. This accelerates the development cycle by ensuring the
|
||||
generated code is directly usable.
|
||||
- **Slash Development Overhead**: Radically reduce the time spent on manual
|
||||
setup and boilerplate. MCP Toolbox helps streamline lengthy database
|
||||
configurations, repetitive code, and error-prone schema migrations.
|
||||
|
||||
Learn [how to connect your AI tools (IDEs) to Toolbox using MCP][connect-ide].
|
||||
|
||||
[connect-ide]: https://googleapis.github.io/genai-toolbox/how-to/connect-ide/
|
||||
|
||||
## General Architecture
|
||||
|
||||
@@ -71,6 +97,7 @@ redeploying your application.
|
||||
## Getting Started
|
||||
|
||||
### Installing the server
|
||||
|
||||
For the latest version, check the [releases page][releases] and use the
|
||||
following instructions for your OS and CPU architecture.
|
||||
|
||||
@@ -84,7 +111,7 @@ To install Toolbox as a binary:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.6.0
|
||||
export VERSION=0.8.0
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -97,7 +124,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.6.0
|
||||
export VERSION=0.8.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -110,7 +137,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.6.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.8.0
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -124,6 +151,8 @@ execute `toolbox` to start the server:
|
||||
```sh
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
> [!NOTE]
|
||||
> Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
|
||||
You can use `toolbox help` for a full list of flags! To stop the server, send a
|
||||
terminate signal (`ctrl+c` on most platforms).
|
||||
@@ -138,20 +167,28 @@ Once your server is up and running, you can load the tools into your
|
||||
application. See below the list of Client SDKs for using various frameworks:
|
||||
|
||||
<details open>
|
||||
<summary>Core</summary>
|
||||
<summary>Python (<a href="https://github.com/googleapis/mcp-toolbox-sdk-python">Github</a>)</summary>
|
||||
<br>
|
||||
<blockquote>
|
||||
|
||||
<details open>
|
||||
<summary>Core</summary>
|
||||
|
||||
1. Install [Toolbox Core SDK][toolbox-core]:
|
||||
|
||||
```bash
|
||||
pip install toolbox-core
|
||||
```
|
||||
|
||||
1. Load tools:
|
||||
|
||||
```python
|
||||
from toolbox_core import ToolboxClient
|
||||
|
||||
# update the url to point to your server
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
|
||||
# these tools can be passed to your application!
|
||||
# these tools can be passed to your application!
|
||||
tools = await client.load_toolset("toolset_name")
|
||||
```
|
||||
|
||||
@@ -161,57 +198,179 @@ For more detailed instructions on using the Toolbox Core SDK, see the
|
||||
[toolbox-core]: https://pypi.org/project/toolbox-core/
|
||||
[toolbox-core-readme]: https://github.com/googleapis/mcp-toolbox-sdk-python/tree/main/packages/toolbox-core/README.md
|
||||
|
||||
</details>
|
||||
<details>
|
||||
<summary>LangChain / LangGraph</summary>
|
||||
</details>
|
||||
<details>
|
||||
<summary>LangChain / LangGraph</summary>
|
||||
|
||||
1. Install [Toolbox LangChain SDK][toolbox-langchain]:
|
||||
|
||||
```bash
|
||||
pip install toolbox-langchain
|
||||
```
|
||||
|
||||
1. Load tools:
|
||||
|
||||
```python
|
||||
from toolbox_langchain import ToolboxClient
|
||||
|
||||
# update the url to point to your server
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
|
||||
# these tools can be passed to your application!
|
||||
# these tools can be passed to your application!
|
||||
tools = client.load_toolset()
|
||||
```
|
||||
|
||||
For more detailed instructions on using the Toolbox LangChain SDK, see the
|
||||
[project's README][toolbox-langchain-readme].
|
||||
For more detailed instructions on using the Toolbox LangChain SDK, see the
|
||||
[project's README][toolbox-langchain-readme].
|
||||
|
||||
[toolbox-langchain]: https://pypi.org/project/toolbox-langchain/
|
||||
[toolbox-langchain-readme]: https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/packages/toolbox-langchain/README.md
|
||||
[toolbox-langchain]: https://pypi.org/project/toolbox-langchain/
|
||||
[toolbox-langchain-readme]: https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/packages/toolbox-langchain/README.md
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>LlamaIndex</summary>
|
||||
</details>
|
||||
<details>
|
||||
<summary>LlamaIndex</summary>
|
||||
|
||||
1. Install [Toolbox Llamaindex SDK][toolbox-llamaindex]:
|
||||
|
||||
```bash
|
||||
pip install toolbox-llamaindex
|
||||
```
|
||||
|
||||
1. Load tools:
|
||||
|
||||
```python
|
||||
from toolbox_llamaindex import ToolboxClient
|
||||
|
||||
# update the url to point to your server
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
|
||||
# these tools can be passed to your application!
|
||||
# these tools can be passed to your application!
|
||||
tools = client.load_toolset()
|
||||
```
|
||||
|
||||
For more detailed instructions on using the Toolbox Llamaindex SDK, see the
|
||||
[project's README][toolbox-llamaindex-readme].
|
||||
For more detailed instructions on using the Toolbox Llamaindex SDK, see the
|
||||
[project's README][toolbox-llamaindex-readme].
|
||||
|
||||
[toolbox-llamaindex]: https://pypi.org/project/toolbox-llamaindex/
|
||||
[toolbox-llamaindex-readme]: https://github.com/googleapis/genai-toolbox-llamaindex-python/blob/main/README.md
|
||||
[toolbox-llamaindex]: https://pypi.org/project/toolbox-llamaindex/
|
||||
[toolbox-llamaindex-readme]: https://github.com/googleapis/genai-toolbox-llamaindex-python/blob/main/README.md
|
||||
|
||||
</details>
|
||||
</details>
|
||||
</blockquote>
|
||||
<details>
|
||||
<summary>Javascript/Typescript (<a href="https://github.com/googleapis/mcp-toolbox-sdk-js">Github</a>)</summary>
|
||||
<br>
|
||||
<blockquote>
|
||||
|
||||
<details open>
|
||||
<summary>Core</summary>
|
||||
|
||||
1. Install [Toolbox Core SDK][toolbox-core-js]:
|
||||
|
||||
```bash
|
||||
npm install @toolbox-sdk/core
|
||||
```
|
||||
|
||||
1. Load tools:
|
||||
|
||||
```javascript
|
||||
import { ToolboxClient } from '@toolbox-sdk/core';
|
||||
|
||||
// update the url to point to your server
|
||||
const URL = 'http://127.0.0.1:5000';
|
||||
let client = new ToolboxClient(URL);
|
||||
|
||||
// these tools can be passed to your application!
|
||||
const tools = await client.loadToolset('toolsetName');
|
||||
```
|
||||
|
||||
For more detailed instructions on using the Toolbox Core SDK, see the
|
||||
[project's README][toolbox-core-js-readme].
|
||||
|
||||
[toolbox-core-js]: https://www.npmjs.com/package/@toolbox-sdk/core
|
||||
[toolbox-core-js-readme]: https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md
|
||||
|
||||
</details>
|
||||
<details>
|
||||
<summary>LangChain / LangGraph</summary>
|
||||
|
||||
1. Install [Toolbox Core SDK][toolbox-core-js]:
|
||||
|
||||
```bash
|
||||
npm install @toolbox-sdk/core
|
||||
```
|
||||
|
||||
2. Load tools:
|
||||
|
||||
```javascript
|
||||
import { ToolboxClient } from '@toolbox-sdk/core';
|
||||
|
||||
// update the url to point to your server
|
||||
const URL = 'http://127.0.0.1:5000';
|
||||
let client = new ToolboxClient(URL);
|
||||
|
||||
// these tools can be passed to your application!
|
||||
const toolboxTools = await client.loadToolset('toolsetName');
|
||||
|
||||
// Define the basics of the tool: name, description, schema and core logic
|
||||
const getTool = (toolboxTool) => tool(currTool, {
|
||||
name: toolboxTool.getName(),
|
||||
description: toolboxTool.getDescription(),
|
||||
schema: toolboxTool.getParamSchema()
|
||||
});
|
||||
|
||||
// Use these tools in your Langchain/Langraph applications
|
||||
const tools = toolboxTools.map(getTool);
|
||||
```
|
||||
|
||||
</details>
|
||||
<details>
|
||||
<summary>Genkit</summary>
|
||||
|
||||
1. Install [Toolbox Core SDK][toolbox-core-js]:
|
||||
|
||||
```bash
|
||||
npm install @toolbox-sdk/core
|
||||
```
|
||||
|
||||
2. Load tools:
|
||||
|
||||
```javascript
|
||||
import { ToolboxClient } from '@toolbox-sdk/core';
|
||||
import { genkit } from 'genkit';
|
||||
|
||||
// Initialise genkit
|
||||
const ai = genkit({
|
||||
plugins: [
|
||||
googleAI({
|
||||
apiKey: process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY
|
||||
})
|
||||
],
|
||||
model: googleAI.model('gemini-2.0-flash'),
|
||||
});
|
||||
|
||||
// update the url to point to your server
|
||||
const URL = 'http://127.0.0.1:5000';
|
||||
let client = new ToolboxClient(URL);
|
||||
|
||||
// these tools can be passed to your application!
|
||||
const toolboxTools = await client.loadToolset('toolsetName');
|
||||
|
||||
// Define the basics of the tool: name, description, schema and core logic
|
||||
const getTool = (toolboxTool) => ai.defineTool({
|
||||
name: toolboxTool.getName(),
|
||||
description: toolboxTool.getDescription(),
|
||||
schema: toolboxTool.getParamSchema()
|
||||
}, toolboxTool)
|
||||
|
||||
// Use these tools in your Genkit applications
|
||||
const tools = toolboxTools.map(getTool);
|
||||
```
|
||||
|
||||
</details>
|
||||
</details>
|
||||
</blockquote>
|
||||
|
||||
</details>
|
||||
|
||||
## Configuration
|
||||
@@ -222,6 +381,7 @@ tools.yaml` flag.
|
||||
|
||||
You can find more detailed reference documentation to all resource types in the
|
||||
[Resources](https://googleapis.github.io/genai-toolbox/resources/).
|
||||
|
||||
### Sources
|
||||
|
||||
The `sources` section of your `tools.yaml` defines what data sources your
|
||||
@@ -263,7 +423,6 @@ tools:
|
||||
For more details on configuring different types of tools, see the
|
||||
[Tools](https://googleapis.github.io/genai-toolbox/resources/tools).
|
||||
|
||||
|
||||
### Toolsets
|
||||
|
||||
The `toolsets` section of your `tools.yaml` allows you to define groups of tools
|
||||
@@ -300,7 +459,7 @@ This project uses [semantic versioning](https://semver.org/), including a
|
||||
- PATCH version when we make backward compatible bug fixes
|
||||
|
||||
The public API that this applies to is the CLI associated with Toolbox, the
|
||||
interactions with official SDKs, and the definitions in the `tools.yaml` file.
|
||||
interactions with official SDKs, and the definitions in the `tools.yaml` file.
|
||||
|
||||
## Contributing
|
||||
|
||||
|
||||
527
cmd/root.go
527
cmd/root.go
@@ -19,28 +19,84 @@ import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"io"
|
||||
"maps"
|
||||
"os"
|
||||
"os/signal"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"slices"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/fsnotify/fsnotify"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/auth"
|
||||
"github.com/googleapis/genai-toolbox/internal/log"
|
||||
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/telemetry"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
|
||||
// Import tool packages for side effect of registration
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydbainl"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygetdatasetinfo"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygettableinfo"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylistdatasetids"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylisttableids"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigtable"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/couchbase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dgraph"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/http"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/redis"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannersql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlitesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/valkey"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/alloydbpg"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/bigtable"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmssql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/couchbase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/dgraph"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/http"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mssql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/neo4j"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/postgres"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/redis"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/spanner"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/valkey"
|
||||
)
|
||||
|
||||
var (
|
||||
// versionString indicates the version of this library.
|
||||
//go:embed version.txt
|
||||
// versionString stores the full semantic version, including build metadata.
|
||||
versionString string
|
||||
// versionNum indicates the numerical part fo the version
|
||||
//go:embed version.txt
|
||||
versionNum string
|
||||
// metadataString indicates additional build or distribution metadata.
|
||||
metadataString string
|
||||
buildType string = "dev" // should be one of "dev", "binary", or "container"
|
||||
// commitSha is the git commit it was built from
|
||||
commitSha string
|
||||
)
|
||||
|
||||
func init() {
|
||||
@@ -49,10 +105,11 @@ func init() {
|
||||
|
||||
// semanticVersion returns the version of the CLI including a compile-time metadata.
|
||||
func semanticVersion() string {
|
||||
v := strings.TrimSpace(versionString)
|
||||
if metadataString != "" {
|
||||
v += "+" + metadataString
|
||||
metadataStrings := []string{buildType, runtime.GOOS, runtime.GOARCH}
|
||||
if commitSha != "" {
|
||||
metadataStrings = append(metadataStrings, commitSha)
|
||||
}
|
||||
v := strings.TrimSpace(versionNum) + "+" + strings.Join(metadataStrings, ".")
|
||||
return v
|
||||
}
|
||||
|
||||
@@ -72,6 +129,8 @@ type Command struct {
|
||||
cfg server.ServerConfig
|
||||
logger log.Logger
|
||||
tools_file string
|
||||
tools_files []string
|
||||
tools_folder string
|
||||
prebuiltConfig string
|
||||
inStream io.Reader
|
||||
outStream io.Writer
|
||||
@@ -115,7 +174,9 @@ func NewCommand(opts ...Option) *Command {
|
||||
flags.StringVar(&cmd.tools_file, "tools_file", "", "File path specifying the tool configuration. Cannot be used with --prebuilt.")
|
||||
// deprecate tools_file
|
||||
_ = flags.MarkDeprecated("tools_file", "please use --tools-file instead")
|
||||
flags.StringVar(&cmd.tools_file, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --prebuilt.")
|
||||
flags.StringVar(&cmd.tools_file, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --prebuilt, --tools-files, or --tools-folder.")
|
||||
flags.StringSliceVar(&cmd.tools_files, "tools-files", []string{}, "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --prebuilt, --tools-file, or --tools-folder.")
|
||||
flags.StringVar(&cmd.tools_folder, "tools-folder", "", "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --prebuilt, --tools-file, or --tools-files.")
|
||||
flags.Var(&cmd.cfg.LogLevel, "log-level", "Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'.")
|
||||
flags.Var(&cmd.cfg.LoggingFormat, "logging-format", "Specify logging format to use. Allowed: 'standard' or 'JSON'.")
|
||||
flags.BoolVar(&cmd.cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.")
|
||||
@@ -123,6 +184,8 @@ func NewCommand(opts ...Option) *Command {
|
||||
flags.StringVar(&cmd.cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
|
||||
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", "Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. Allowed: 'alloydb-postgres', 'bigquery', 'cloud-sql-mysql', 'cloud-sql-postgres', 'cloud-sql-mssql', 'postgres', 'spanner', 'spanner-postgres'.")
|
||||
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
|
||||
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
|
||||
flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
|
||||
|
||||
// wrap RunE command so that we have access to original Command object
|
||||
cmd.RunE = func(*cobra.Command, []string) error { return run(cmd) }
|
||||
@@ -171,6 +234,307 @@ func parseToolsFile(ctx context.Context, raw []byte) (ToolsFile, error) {
|
||||
return toolsFile, nil
|
||||
}
|
||||
|
||||
// mergeToolsFiles merges multiple ToolsFile structs into one.
|
||||
// Detects and raises errors for resource conflicts in sources, authServices, tools, and toolsets.
|
||||
// All resource names (sources, authServices, tools, toolsets) must be unique across all files.
|
||||
func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) {
|
||||
merged := ToolsFile{
|
||||
Sources: make(server.SourceConfigs),
|
||||
AuthServices: make(server.AuthServiceConfigs),
|
||||
Tools: make(server.ToolConfigs),
|
||||
Toolsets: make(server.ToolsetConfigs),
|
||||
}
|
||||
|
||||
var conflicts []string
|
||||
|
||||
for fileIndex, file := range files {
|
||||
// Check for conflicts and merge sources
|
||||
for name, source := range file.Sources {
|
||||
if _, exists := merged.Sources[name]; exists {
|
||||
conflicts = append(conflicts, fmt.Sprintf("source '%s' (file #%d)", name, fileIndex+1))
|
||||
} else {
|
||||
merged.Sources[name] = source
|
||||
}
|
||||
}
|
||||
|
||||
// Check for conflicts and merge authSources (deprecated, but still support)
|
||||
for name, authSource := range file.AuthSources {
|
||||
if _, exists := merged.AuthSources[name]; exists {
|
||||
conflicts = append(conflicts, fmt.Sprintf("authSource '%s' (file #%d)", name, fileIndex+1))
|
||||
} else {
|
||||
merged.AuthSources[name] = authSource
|
||||
}
|
||||
}
|
||||
|
||||
// Check for conflicts and merge authServices
|
||||
for name, authService := range file.AuthServices {
|
||||
if _, exists := merged.AuthServices[name]; exists {
|
||||
conflicts = append(conflicts, fmt.Sprintf("authService '%s' (file #%d)", name, fileIndex+1))
|
||||
} else {
|
||||
merged.AuthServices[name] = authService
|
||||
}
|
||||
}
|
||||
|
||||
// Check for conflicts and merge tools
|
||||
for name, tool := range file.Tools {
|
||||
if _, exists := merged.Tools[name]; exists {
|
||||
conflicts = append(conflicts, fmt.Sprintf("tool '%s' (file #%d)", name, fileIndex+1))
|
||||
} else {
|
||||
merged.Tools[name] = tool
|
||||
}
|
||||
}
|
||||
|
||||
// Check for conflicts and merge toolsets
|
||||
for name, toolset := range file.Toolsets {
|
||||
if _, exists := merged.Toolsets[name]; exists {
|
||||
conflicts = append(conflicts, fmt.Sprintf("toolset '%s' (file #%d)", name, fileIndex+1))
|
||||
} else {
|
||||
merged.Toolsets[name] = toolset
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If conflicts were detected, return an error
|
||||
if len(conflicts) > 0 {
|
||||
return ToolsFile{}, fmt.Errorf("resource conflicts detected:\n - %s\n\nPlease ensure each source, authService, tool, and toolset has a unique name across all files", strings.Join(conflicts, "\n - "))
|
||||
}
|
||||
|
||||
return merged, nil
|
||||
}
|
||||
|
||||
// loadAndMergeToolsFiles loads multiple YAML files and merges them
|
||||
func loadAndMergeToolsFiles(ctx context.Context, filePaths []string) (ToolsFile, error) {
|
||||
var toolsFiles []ToolsFile
|
||||
|
||||
for _, filePath := range filePaths {
|
||||
buf, err := os.ReadFile(filePath)
|
||||
if err != nil {
|
||||
return ToolsFile{}, fmt.Errorf("unable to read tool file at %q: %w", filePath, err)
|
||||
}
|
||||
|
||||
toolsFile, err := parseToolsFile(ctx, buf)
|
||||
if err != nil {
|
||||
return ToolsFile{}, fmt.Errorf("unable to parse tool file at %q: %w", filePath, err)
|
||||
}
|
||||
|
||||
toolsFiles = append(toolsFiles, toolsFile)
|
||||
}
|
||||
|
||||
mergedFile, err := mergeToolsFiles(toolsFiles...)
|
||||
if err != nil {
|
||||
return ToolsFile{}, fmt.Errorf("unable to merge tools files: %w", err)
|
||||
}
|
||||
|
||||
return mergedFile, nil
|
||||
}
|
||||
|
||||
// loadAndMergeToolsFolder loads all YAML files from a directory and merges them
|
||||
func loadAndMergeToolsFolder(ctx context.Context, folderPath string) (ToolsFile, error) {
|
||||
// Check if directory exists
|
||||
info, err := os.Stat(folderPath)
|
||||
if err != nil {
|
||||
return ToolsFile{}, fmt.Errorf("unable to access tools folder at %q: %w", folderPath, err)
|
||||
}
|
||||
if !info.IsDir() {
|
||||
return ToolsFile{}, fmt.Errorf("path %q is not a directory", folderPath)
|
||||
}
|
||||
|
||||
// Find all YAML files in the directory
|
||||
pattern := filepath.Join(folderPath, "*.yaml")
|
||||
yamlFiles, err := filepath.Glob(pattern)
|
||||
if err != nil {
|
||||
return ToolsFile{}, fmt.Errorf("error finding YAML files in %q: %w", folderPath, err)
|
||||
}
|
||||
|
||||
// Also find .yml files
|
||||
ymlPattern := filepath.Join(folderPath, "*.yml")
|
||||
ymlFiles, err := filepath.Glob(ymlPattern)
|
||||
if err != nil {
|
||||
return ToolsFile{}, fmt.Errorf("error finding YML files in %q: %w", folderPath, err)
|
||||
}
|
||||
|
||||
// Combine both file lists
|
||||
allFiles := append(yamlFiles, ymlFiles...)
|
||||
|
||||
if len(allFiles) == 0 {
|
||||
return ToolsFile{}, fmt.Errorf("no YAML files found in directory %q", folderPath)
|
||||
}
|
||||
|
||||
// Use existing loadAndMergeToolsFiles function
|
||||
return loadAndMergeToolsFiles(ctx, allFiles)
|
||||
}
|
||||
|
||||
func handleDynamicReload(ctx context.Context, toolsFile ToolsFile, s *server.Server) error {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
sourcesMap, authServicesMap, toolsMap, toolsetsMap, err := validateReloadEdits(ctx, toolsFile)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to validate reloaded edits: %w", err)
|
||||
logger.WarnContext(ctx, errMsg.Error())
|
||||
return err
|
||||
}
|
||||
|
||||
s.ResourceMgr.SetResources(sourcesMap, authServicesMap, toolsMap, toolsetsMap)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// validateReloadEdits checks that the reloaded tools file configs can initialized without failing
|
||||
func validateReloadEdits(
|
||||
ctx context.Context, toolsFile ToolsFile,
|
||||
) (map[string]sources.Source, map[string]auth.AuthService, map[string]tools.Tool, map[string]tools.Toolset, error,
|
||||
) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
instrumentation, err := util.InstrumentationFromContext(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
logger.DebugContext(ctx, "Attempting to parse and validate reloaded tools file.")
|
||||
|
||||
ctx, span := instrumentation.Tracer.Start(ctx, "toolbox/server/reload")
|
||||
defer span.End()
|
||||
|
||||
reloadedConfig := server.ServerConfig{
|
||||
Version: versionString,
|
||||
SourceConfigs: toolsFile.Sources,
|
||||
AuthServiceConfigs: toolsFile.AuthServices,
|
||||
ToolConfigs: toolsFile.Tools,
|
||||
ToolsetConfigs: toolsFile.Toolsets,
|
||||
}
|
||||
|
||||
sourcesMap, authServicesMap, toolsMap, toolsetsMap, err := server.InitializeConfigs(ctx, reloadedConfig)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to initialize reloaded configs: %w", err)
|
||||
logger.WarnContext(ctx, errMsg.Error())
|
||||
return nil, nil, nil, nil, err
|
||||
}
|
||||
|
||||
return sourcesMap, authServicesMap, toolsMap, toolsetsMap, nil
|
||||
}
|
||||
|
||||
// watchChanges checks for changes in the provided yaml tools file(s) or folder.
|
||||
func watchChanges(ctx context.Context, watchDirs map[string]bool, watchedFiles map[string]bool, s *server.Server) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
w, err := fsnotify.NewWatcher()
|
||||
if err != nil {
|
||||
logger.WarnContext(ctx, "error setting up new watcher %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
defer w.Close()
|
||||
|
||||
watchingFolder := false
|
||||
var folderToWatch string
|
||||
|
||||
// if watchedFiles is empty, indicates that user passed entire folder instead
|
||||
if len(watchedFiles) == 0 {
|
||||
watchingFolder = true
|
||||
|
||||
// validate that watchDirs only has single element
|
||||
if len(watchDirs) > 1 {
|
||||
logger.WarnContext(ctx, "error setting watcher, expected single tools folder if no file(s) are defined.")
|
||||
return
|
||||
}
|
||||
|
||||
for onlyKey := range watchDirs {
|
||||
folderToWatch = onlyKey
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
for dir := range watchDirs {
|
||||
err := w.Add(dir)
|
||||
if err != nil {
|
||||
logger.WarnContext(ctx, fmt.Sprintf("Error adding path %s to watcher: %s", dir, err))
|
||||
break
|
||||
}
|
||||
logger.DebugContext(ctx, fmt.Sprintf("Added directory %s to watcher.", dir))
|
||||
}
|
||||
|
||||
// debounce timer is used to prevent multiple writes triggering multiple reloads
|
||||
debounceDelay := 100 * time.Millisecond
|
||||
debounce := time.NewTimer(1 * time.Minute)
|
||||
debounce.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
logger.DebugContext(ctx, "file watcher context cancelled")
|
||||
return
|
||||
case err, ok := <-w.Errors:
|
||||
if !ok {
|
||||
logger.WarnContext(ctx, "file watcher was closed unexpectedly")
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
logger.WarnContext(ctx, "file watcher error %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
case e, ok := <-w.Events:
|
||||
if !ok {
|
||||
logger.WarnContext(ctx, "file watcher already closed")
|
||||
return
|
||||
}
|
||||
|
||||
// only check for write events which indicate user saved a new tools file
|
||||
if !e.Has(fsnotify.Write) {
|
||||
continue
|
||||
}
|
||||
|
||||
cleanedFilename := filepath.Clean(e.Name)
|
||||
logger.DebugContext(ctx, fmt.Sprintf("WRITE event detected in %s", cleanedFilename))
|
||||
|
||||
folderChanged := watchingFolder &&
|
||||
(strings.HasSuffix(cleanedFilename, ".yaml") || strings.HasSuffix(cleanedFilename, ".yml"))
|
||||
|
||||
if folderChanged || watchedFiles[cleanedFilename] {
|
||||
// indicates the write event is on a relevant file
|
||||
debounce.Reset(debounceDelay)
|
||||
}
|
||||
|
||||
case <-debounce.C:
|
||||
debounce.Stop()
|
||||
var reloadedToolsFile ToolsFile
|
||||
|
||||
if watchingFolder {
|
||||
logger.DebugContext(ctx, "Reloading tools folder.")
|
||||
reloadedToolsFile, err = loadAndMergeToolsFolder(ctx, folderToWatch)
|
||||
if err != nil {
|
||||
logger.WarnContext(ctx, "error loading tools folder %s", err)
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
logger.DebugContext(ctx, "Reloading tools file(s).")
|
||||
reloadedToolsFile, err = loadAndMergeToolsFiles(ctx, slices.Collect(maps.Keys(watchedFiles)))
|
||||
if err != nil {
|
||||
logger.WarnContext(ctx, "error loading tools files %s", err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
err = handleDynamicReload(ctx, reloadedToolsFile, s)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to parse reloaded tools file at %q: %w", reloadedToolsFile, err)
|
||||
logger.WarnContext(ctx, errMsg.Error())
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// updateLogLevel checks if Toolbox have to update the existing log level set by users.
|
||||
// stdio doesn't support "debug" and "info" logs.
|
||||
func updateLogLevel(stdio bool, logLevel string) bool {
|
||||
@@ -185,6 +549,33 @@ func updateLogLevel(stdio bool, logLevel string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func resolveWatcherInputs(toolsFile string, toolsFiles []string, toolsFolder string) (map[string]bool, map[string]bool) {
|
||||
var relevantFiles []string
|
||||
|
||||
// map for efficiently checking if a file is relevant
|
||||
watchedFiles := make(map[string]bool)
|
||||
|
||||
// dirs that will be added to watcher (fsnotify prefers watching directory then filtering for file)
|
||||
watchDirs := make(map[string]bool)
|
||||
|
||||
if len(toolsFiles) > 0 {
|
||||
relevantFiles = toolsFiles
|
||||
} else if toolsFolder != "" {
|
||||
watchDirs[filepath.Clean(toolsFolder)] = true
|
||||
} else {
|
||||
relevantFiles = []string{toolsFile}
|
||||
}
|
||||
|
||||
// extract parent dir for relevant files and dedup
|
||||
for _, f := range relevantFiles {
|
||||
cleanFile := filepath.Clean(f)
|
||||
watchedFiles[cleanFile] = true
|
||||
watchDirs[filepath.Dir(cleanFile)] = true
|
||||
}
|
||||
|
||||
return watchDirs, watchedFiles
|
||||
}
|
||||
|
||||
func run(cmd *Command) error {
|
||||
if updateLogLevel(cmd.cfg.Stdio, cmd.cfg.LogLevel.String()) {
|
||||
cmd.cfg.LogLevel = server.StringLevel(log.Warn)
|
||||
@@ -228,13 +619,13 @@ func run(cmd *Command) error {
|
||||
}
|
||||
cmd.logger = logger
|
||||
default:
|
||||
return fmt.Errorf("logging format invalid.")
|
||||
return fmt.Errorf("logging format invalid")
|
||||
}
|
||||
|
||||
ctx = util.WithLogger(ctx, cmd.logger)
|
||||
|
||||
// Set up OpenTelemetry
|
||||
otelShutdown, err := telemetry.SetupOTel(ctx, cmd.Command.Version, cmd.cfg.TelemetryOTLP, cmd.cfg.TelemetryGCP, cmd.cfg.TelemetryServiceName)
|
||||
otelShutdown, err := telemetry.SetupOTel(ctx, cmd.cfg.Version, cmd.cfg.TelemetryOTLP, cmd.cfg.TelemetryGCP, cmd.cfg.TelemetryServiceName)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("error setting up OpenTelemetry: %w", err)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
@@ -248,38 +639,86 @@ func run(cmd *Command) error {
|
||||
}
|
||||
}()
|
||||
|
||||
var buf []byte
|
||||
var toolsFile ToolsFile
|
||||
|
||||
if cmd.prebuiltConfig != "" {
|
||||
// Make sure --prebuilt and --tools-file flags are mutually exclusive
|
||||
if cmd.tools_file != "" {
|
||||
errMsg := fmt.Errorf("--prebuilt and --tools-file flags cannot be used simultaneously")
|
||||
// Make sure --prebuilt and --tools-file/--tools-files/--tools-folder flags are mutually exclusive
|
||||
if cmd.tools_file != "" || len(cmd.tools_files) > 0 || cmd.tools_folder != "" {
|
||||
errMsg := fmt.Errorf("--prebuilt and --tools-file/--tools-files/--tools-folder flags cannot be used simultaneously")
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
// Use prebuilt tools
|
||||
buf, err = prebuiltconfigs.Get(cmd.prebuiltConfig)
|
||||
buf, err := prebuiltconfigs.Get(cmd.prebuiltConfig)
|
||||
if err != nil {
|
||||
cmd.logger.ErrorContext(ctx, err.Error())
|
||||
return err
|
||||
}
|
||||
logMsg := fmt.Sprint("Using prebuilt tool configuration for ", cmd.prebuiltConfig)
|
||||
cmd.logger.InfoContext(ctx, logMsg)
|
||||
// Append prebuilt.source to Version string for the User Agent
|
||||
cmd.cfg.Version += "+prebuilt." + cmd.prebuiltConfig
|
||||
|
||||
toolsFile, err = parseToolsFile(ctx, buf)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to parse prebuilt tool configuration: %w", err)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
} else if len(cmd.tools_files) > 0 {
|
||||
// Make sure --tools-file, --tools-files, and --tools-folder flags are mutually exclusive
|
||||
if cmd.tools_file != "" || cmd.tools_folder != "" {
|
||||
errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously")
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
// Use multiple tools files
|
||||
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging %d tool configuration files", len(cmd.tools_files)))
|
||||
var err error
|
||||
toolsFile, err = loadAndMergeToolsFiles(ctx, cmd.tools_files)
|
||||
if err != nil {
|
||||
cmd.logger.ErrorContext(ctx, err.Error())
|
||||
return err
|
||||
}
|
||||
} else if cmd.tools_folder != "" {
|
||||
// Make sure --tools-folder and other flags are mutually exclusive
|
||||
if cmd.tools_file != "" || len(cmd.tools_files) > 0 {
|
||||
errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously")
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
// Use tools folder
|
||||
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging all YAML files from directory: %s", cmd.tools_folder))
|
||||
var err error
|
||||
toolsFile, err = loadAndMergeToolsFolder(ctx, cmd.tools_folder)
|
||||
if err != nil {
|
||||
cmd.logger.ErrorContext(ctx, err.Error())
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// Set default value of tools-file flag to tools.yaml
|
||||
if cmd.tools_file == "" {
|
||||
cmd.tools_file = "tools.yaml"
|
||||
}
|
||||
// Read tool file contents
|
||||
buf, err = os.ReadFile(cmd.tools_file)
|
||||
|
||||
// Read single tool file contents
|
||||
buf, err := os.ReadFile(cmd.tools_file)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to read tool file at %q: %w", cmd.tools_file, err)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
toolsFile, err = parseToolsFile(ctx, buf)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to parse tool file at %q: %w", cmd.tools_file, err)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
}
|
||||
|
||||
toolsFile, err := parseToolsFile(ctx, buf)
|
||||
cmd.cfg.SourceConfigs, cmd.cfg.AuthServiceConfigs, cmd.cfg.ToolConfigs, cmd.cfg.ToolsetConfigs = toolsFile.Sources, toolsFile.AuthServices, toolsFile.Tools, toolsFile.Toolsets
|
||||
authSourceConfigs := toolsFile.AuthSources
|
||||
if authSourceConfigs != nil {
|
||||
@@ -292,38 +731,60 @@ func run(cmd *Command) error {
|
||||
return errMsg
|
||||
}
|
||||
|
||||
instrumentation, err := telemetry.CreateTelemetryInstrumentation(versionString)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to create telemetry instrumentation: %w", err)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
ctx = util.WithInstrumentation(ctx, instrumentation)
|
||||
|
||||
// start server
|
||||
s, err := server.NewServer(ctx, cmd.cfg, cmd.logger)
|
||||
s, err := server.NewServer(ctx, cmd.cfg)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("toolbox failed to initialize: %w", err)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
err = s.Listen(ctx)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("toolbox failed to start listener: %w", err)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
cmd.logger.InfoContext(ctx, "Server ready to serve!")
|
||||
|
||||
// run server in background
|
||||
srvErr := make(chan error)
|
||||
go func() {
|
||||
defer close(srvErr)
|
||||
if cmd.cfg.Stdio {
|
||||
if cmd.cfg.Stdio {
|
||||
go func() {
|
||||
defer close(srvErr)
|
||||
err = s.ServeStdio(ctx, cmd.inStream, cmd.outStream)
|
||||
if err != nil {
|
||||
srvErr <- err
|
||||
}
|
||||
} else {
|
||||
}()
|
||||
} else {
|
||||
err = s.Listen(ctx)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("toolbox failed to start listener: %w", err)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
cmd.logger.InfoContext(ctx, "Server ready to serve!")
|
||||
if cmd.cfg.UI {
|
||||
cmd.logger.InfoContext(ctx, "Toolbox UI is up and running at: http://localhost:5000/ui")
|
||||
}
|
||||
|
||||
go func() {
|
||||
defer close(srvErr)
|
||||
err = s.Serve(ctx)
|
||||
if err != nil {
|
||||
srvErr <- err
|
||||
}
|
||||
}
|
||||
}()
|
||||
}()
|
||||
}
|
||||
|
||||
watchDirs, watchedFiles := resolveWatcherInputs(cmd.tools_file, cmd.tools_files, cmd.tools_folder)
|
||||
|
||||
if !cmd.cfg.DisableReload {
|
||||
// start watching the file(s) or folder for changes to trigger dynamic reloading
|
||||
go watchChanges(ctx, watchDirs, watchedFiles, s)
|
||||
}
|
||||
|
||||
// wait for either the server to error out or the command's context to be canceled
|
||||
select {
|
||||
@@ -339,7 +800,7 @@ func run(cmd *Command) error {
|
||||
cmd.logger.WarnContext(shutdownContext, "Shutting down gracefully...")
|
||||
err := s.Shutdown(shutdownContext)
|
||||
if err == context.DeadlineExceeded {
|
||||
return fmt.Errorf("graceful shutdown timed out... forcing exit.")
|
||||
return fmt.Errorf("graceful shutdown timed out... forcing exit")
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
276
cmd/root_test.go
276
cmd/root_test.go
@@ -16,28 +16,41 @@ package cmd
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/auth/google"
|
||||
"github.com/googleapis/genai-toolbox/internal/log"
|
||||
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
cloudsqlpgsrc "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg"
|
||||
httpsrc "github.com/googleapis/genai-toolbox/internal/sources/http"
|
||||
"github.com/googleapis/genai-toolbox/internal/telemetry"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/http"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/postgressql"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func withDefaults(c server.ServerConfig) server.ServerConfig {
|
||||
data, _ := os.ReadFile("version.txt")
|
||||
c.Version = strings.TrimSpace(string(data))
|
||||
version := strings.TrimSpace(string(data)) // Preserving 'data', new var for clarity
|
||||
c.Version = version + "+" + strings.Join([]string{"dev", runtime.GOOS, runtime.GOARCH}, ".")
|
||||
|
||||
if c.Address == "" {
|
||||
c.Address = "127.0.0.1"
|
||||
}
|
||||
@@ -171,6 +184,13 @@ func TestServerConfigFlags(t *testing.T) {
|
||||
Stdio: true,
|
||||
}),
|
||||
},
|
||||
{
|
||||
desc: "disable reload",
|
||||
args: []string{"--disable-reload"},
|
||||
want: withDefaults(server.ServerConfig{
|
||||
DisableReload: true,
|
||||
}),
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
@@ -226,6 +246,71 @@ func TestToolFileFlag(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestToolsFilesFlag(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
args []string
|
||||
want []string
|
||||
}{
|
||||
{
|
||||
desc: "no value",
|
||||
args: []string{},
|
||||
want: []string{},
|
||||
},
|
||||
{
|
||||
desc: "single file",
|
||||
args: []string{"--tools-files", "foo.yaml"},
|
||||
want: []string{"foo.yaml"},
|
||||
},
|
||||
{
|
||||
desc: "multiple files",
|
||||
args: []string{"--tools-files", "foo.yaml,bar.yaml"},
|
||||
want: []string{"foo.yaml", "bar.yaml"},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
c, _, err := invokeCommand(tc.args)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error invoking command: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(c.tools_files, tc.want); diff != "" {
|
||||
t.Fatalf("got %v, want %v", c.tools_files, tc.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestToolsFolderFlag(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
args []string
|
||||
want string
|
||||
}{
|
||||
{
|
||||
desc: "no value",
|
||||
args: []string{},
|
||||
want: "",
|
||||
},
|
||||
{
|
||||
desc: "folder set",
|
||||
args: []string{"--tools-folder", "test-folder"},
|
||||
want: "test-folder",
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
c, _, err := invokeCommand(tc.args)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error invoking command: %s", err)
|
||||
}
|
||||
if c.tools_folder != tc.want {
|
||||
t.Fatalf("got %v, want %v", c.tools_folder, tc.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPrebuiltFlag(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
@@ -358,7 +443,7 @@ func TestParseToolFile(t *testing.T) {
|
||||
Tools: server.ToolConfigs{
|
||||
"example_tool": postgressql.Config{
|
||||
Name: "example_tool",
|
||||
Kind: postgressql.ToolKind,
|
||||
Kind: "postgres-sql",
|
||||
Source: "my-pg-instance",
|
||||
Description: "some description",
|
||||
Statement: "SELECT * FROM SQL_STATEMENT;\n",
|
||||
@@ -489,7 +574,7 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
||||
Tools: server.ToolConfigs{
|
||||
"example_tool": postgressql.Config{
|
||||
Name: "example_tool",
|
||||
Kind: postgressql.ToolKind,
|
||||
Kind: "postgres-sql",
|
||||
Source: "my-pg-instance",
|
||||
Description: "some description",
|
||||
Statement: "SELECT * FROM SQL_STATEMENT;\n",
|
||||
@@ -588,7 +673,7 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
||||
Tools: server.ToolConfigs{
|
||||
"example_tool": postgressql.Config{
|
||||
Name: "example_tool",
|
||||
Kind: postgressql.ToolKind,
|
||||
Kind: "postgres-sql",
|
||||
Source: "my-pg-instance",
|
||||
Description: "some description",
|
||||
Statement: "SELECT * FROM SQL_STATEMENT;\n",
|
||||
@@ -689,7 +774,7 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
||||
Tools: server.ToolConfigs{
|
||||
"example_tool": postgressql.Config{
|
||||
Name: "example_tool",
|
||||
Kind: postgressql.ToolKind,
|
||||
Kind: "postgres-sql",
|
||||
Source: "my-pg-instance",
|
||||
Description: "some description",
|
||||
Statement: "SELECT * FROM SQL_STATEMENT;\n",
|
||||
@@ -842,7 +927,7 @@ func TestEnvVarReplacement(t *testing.T) {
|
||||
Tools: server.ToolConfigs{
|
||||
"example_tool": http.Config{
|
||||
Name: "example_tool",
|
||||
Kind: http.ToolKind,
|
||||
Kind: "http",
|
||||
Source: "my-instance",
|
||||
Method: "GET",
|
||||
Path: "search?name=alice&pet=cat",
|
||||
@@ -897,6 +982,183 @@ func TestEnvVarReplacement(t *testing.T) {
|
||||
|
||||
}
|
||||
|
||||
// normalizeFilepaths is a helper function to allow same filepath formats for Mac and Windows.
|
||||
// this prevents needing multiple "want" cases for TestResolveWatcherInputs
|
||||
func normalizeFilepaths(m map[string]bool) map[string]bool {
|
||||
newMap := make(map[string]bool)
|
||||
for k, v := range m {
|
||||
newMap[filepath.ToSlash(k)] = v
|
||||
}
|
||||
return newMap
|
||||
}
|
||||
|
||||
func TestResolveWatcherInputs(t *testing.T) {
|
||||
tcs := []struct {
|
||||
description string
|
||||
toolsFile string
|
||||
toolsFiles []string
|
||||
toolsFolder string
|
||||
wantWatchDirs map[string]bool
|
||||
wantWatchedFiles map[string]bool
|
||||
}{
|
||||
{
|
||||
description: "single tools file",
|
||||
toolsFile: "tools_folder/example_tools.yaml",
|
||||
toolsFiles: []string{},
|
||||
toolsFolder: "",
|
||||
wantWatchDirs: map[string]bool{"tools_folder": true},
|
||||
wantWatchedFiles: map[string]bool{"tools_folder/example_tools.yaml": true},
|
||||
},
|
||||
{
|
||||
description: "default tools file (root dir)",
|
||||
toolsFile: "tools.yaml",
|
||||
toolsFiles: []string{},
|
||||
toolsFolder: "",
|
||||
wantWatchDirs: map[string]bool{".": true},
|
||||
wantWatchedFiles: map[string]bool{"tools.yaml": true},
|
||||
},
|
||||
{
|
||||
description: "multiple files in different folders",
|
||||
toolsFile: "",
|
||||
toolsFiles: []string{"tools_folder/example_tools.yaml", "tools_folder2/example_tools.yaml"},
|
||||
toolsFolder: "",
|
||||
wantWatchDirs: map[string]bool{"tools_folder": true, "tools_folder2": true},
|
||||
wantWatchedFiles: map[string]bool{
|
||||
"tools_folder/example_tools.yaml": true,
|
||||
"tools_folder2/example_tools.yaml": true,
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "multiple files in same folder",
|
||||
toolsFile: "",
|
||||
toolsFiles: []string{"tools_folder/example_tools.yaml", "tools_folder/example_tools2.yaml"},
|
||||
toolsFolder: "",
|
||||
wantWatchDirs: map[string]bool{"tools_folder": true},
|
||||
wantWatchedFiles: map[string]bool{
|
||||
"tools_folder/example_tools.yaml": true,
|
||||
"tools_folder/example_tools2.yaml": true,
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "multiple files in different levels",
|
||||
toolsFile: "",
|
||||
toolsFiles: []string{
|
||||
"tools_folder/example_tools.yaml",
|
||||
"tools_folder/special_tools/example_tools2.yaml"},
|
||||
toolsFolder: "",
|
||||
wantWatchDirs: map[string]bool{"tools_folder": true, "tools_folder/special_tools": true},
|
||||
wantWatchedFiles: map[string]bool{
|
||||
"tools_folder/example_tools.yaml": true,
|
||||
"tools_folder/special_tools/example_tools2.yaml": true,
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "tools folder",
|
||||
toolsFile: "",
|
||||
toolsFiles: []string{},
|
||||
toolsFolder: "tools_folder",
|
||||
wantWatchDirs: map[string]bool{"tools_folder": true},
|
||||
wantWatchedFiles: map[string]bool{},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.description, func(t *testing.T) {
|
||||
gotWatchDirs, gotWatchedFiles := resolveWatcherInputs(tc.toolsFile, tc.toolsFiles, tc.toolsFolder)
|
||||
|
||||
normalizedGotWatchDirs := normalizeFilepaths(gotWatchDirs)
|
||||
normalizedGotWatchedFiles := normalizeFilepaths(gotWatchedFiles)
|
||||
|
||||
if diff := cmp.Diff(tc.wantWatchDirs, normalizedGotWatchDirs); diff != "" {
|
||||
t.Errorf("incorrect watchDirs: diff %v", diff)
|
||||
}
|
||||
if diff := cmp.Diff(tc.wantWatchedFiles, normalizedGotWatchedFiles); diff != "" {
|
||||
t.Errorf("incorrect watchedFiles: diff %v", diff)
|
||||
}
|
||||
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// helper function for testing file detection in dynamic reloading
|
||||
func tmpFileWithCleanup(content []byte) (string, func(), error) {
|
||||
f, err := os.CreateTemp("", "*")
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
cleanup := func() { os.Remove(f.Name()) }
|
||||
|
||||
if _, err := f.Write(content); err != nil {
|
||||
cleanup()
|
||||
return "", nil, err
|
||||
}
|
||||
if err := f.Close(); err != nil {
|
||||
cleanup()
|
||||
return "", nil, err
|
||||
}
|
||||
return f.Name(), cleanup, err
|
||||
}
|
||||
|
||||
func TestSingleEdit(t *testing.T) {
|
||||
ctx, cancelCtx := context.WithTimeout(context.Background(), time.Minute)
|
||||
defer cancelCtx()
|
||||
|
||||
pr, pw := io.Pipe()
|
||||
defer pw.Close()
|
||||
defer pr.Close()
|
||||
|
||||
fileToWatch, cleanup, err := tmpFileWithCleanup([]byte("initial content"))
|
||||
if err != nil {
|
||||
t.Fatalf("error editing tools file %s", err)
|
||||
}
|
||||
defer cleanup()
|
||||
|
||||
logger, err := log.NewStdLogger(pw, pw, "DEBUG")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup logger %s", err)
|
||||
}
|
||||
ctx = util.WithLogger(ctx, logger)
|
||||
|
||||
instrumentation, err := telemetry.CreateTelemetryInstrumentation(versionString)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup instrumentation %s", err)
|
||||
}
|
||||
ctx = util.WithInstrumentation(ctx, instrumentation)
|
||||
|
||||
mockServer := &server.Server{}
|
||||
|
||||
cleanFileToWatch := filepath.Clean(fileToWatch)
|
||||
watchDir := filepath.Dir(cleanFileToWatch)
|
||||
|
||||
watchedFiles := map[string]bool{cleanFileToWatch: true}
|
||||
watchDirs := map[string]bool{watchDir: true}
|
||||
|
||||
go watchChanges(ctx, watchDirs, watchedFiles, mockServer)
|
||||
|
||||
// escape backslash so regex doesn't fail on windows filepaths
|
||||
regexEscapedPathFile := strings.ReplaceAll(cleanFileToWatch, `\`, `\\\\*\\`)
|
||||
regexEscapedPathFile = path.Clean(regexEscapedPathFile)
|
||||
|
||||
regexEscapedPathDir := strings.ReplaceAll(watchDir, `\`, `\\\\*\\`)
|
||||
regexEscapedPathDir = path.Clean(regexEscapedPathDir)
|
||||
|
||||
begunWatchingDir := regexp.MustCompile(fmt.Sprintf(`DEBUG "Added directory %s to watcher."`, regexEscapedPathDir))
|
||||
_, err = testutils.WaitForString(ctx, begunWatchingDir, pr)
|
||||
if err != nil {
|
||||
t.Fatalf("timeout or error waiting for watcher to start: %s", err)
|
||||
}
|
||||
|
||||
err = os.WriteFile(fileToWatch, []byte("modification"), 0777)
|
||||
if err != nil {
|
||||
t.Fatalf("error writing to file: %v", err)
|
||||
}
|
||||
|
||||
detectedFileChange := regexp.MustCompile(fmt.Sprintf(`DEBUG "WRITE event detected in %s"`, regexEscapedPathFile))
|
||||
_, err = testutils.WaitForString(ctx, detectedFileChange, pr)
|
||||
if err != nil {
|
||||
t.Fatalf("timeout or error waiting for file to detect write: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestPrebuiltTools(t *testing.T) {
|
||||
alloydb_config, _ := prebuiltconfigs.Get("alloydb-postgres")
|
||||
bigquery_config, _ := prebuiltconfigs.Get("bigquery")
|
||||
|
||||
@@ -1 +1 @@
|
||||
0.6.0
|
||||
0.8.0
|
||||
|
||||
@@ -12,4 +12,4 @@ description: >
|
||||
<link rel="canonical" href="getting-started/introduction/"/>
|
||||
<meta http-equiv="refresh" content="0;url=getting-started/introduction"/>
|
||||
</head>
|
||||
</html>
|
||||
</html>
|
||||
|
||||
@@ -2,5 +2,6 @@
|
||||
title: "About"
|
||||
type: docs
|
||||
weight: 6
|
||||
description: A list of other information related to Toolbox.
|
||||
description: >
|
||||
A list of other information related to Toolbox.
|
||||
---
|
||||
|
||||
@@ -9,22 +9,22 @@ description: Frequently asked questions about Toolbox.
|
||||
|
||||
MCP Toolbox for Databases is open-source and can be ran or deployed to a
|
||||
multitude of environments. For convenience, we release [compiled binaries and
|
||||
docker images][release-notes] (but you can always compile yourself as well!).
|
||||
docker images][release-notes] (but you can always compile yourself as well!).
|
||||
|
||||
For detailed instructions, check our these resources:
|
||||
|
||||
- [Quickstart: How to Run Locally](../getting-started/local_quickstart.md)
|
||||
- [Deploy to Cloud Run](../how-to/deploy_toolbox.md)
|
||||
|
||||
[release-notes]: https://github.com/googleapis/genai-toolbox/releases/
|
||||
|
||||
|
||||
## Do I need a Google Cloud account/project to get started with Toolbox?
|
||||
|
||||
Nope! While some of the sources Toolbox connects to may require GCP credentials,
|
||||
Toolbox doesn't require them and can connect to a bunch of different resources
|
||||
that don't.
|
||||
that don't.
|
||||
|
||||
## Does Toolbox take contributions from external users?
|
||||
## Does Toolbox take contributions from external users?
|
||||
|
||||
Absolutely! Please check out our [DEVELOPER.md][] for instructions on how to get
|
||||
started developing _on_ Toolbox instead of with it, and the [CONTRIBUTING.md][]
|
||||
@@ -33,17 +33,16 @@ for instructions on completing the CLA and getting a PR accepted.
|
||||
[DEVELOPER.md]: https://github.com/googleapis/genai-toolbox/blob/main/DEVELOPER.md
|
||||
[CONTRIBUTING.MD]: https://github.com/googleapis/genai-toolbox/blob/main/CONTRIBUTING.md
|
||||
|
||||
|
||||
## Can Toolbox support a feature to let me do _$FOO_?
|
||||
## Can Toolbox support a feature to let me do _$FOO_?
|
||||
|
||||
Maybe? The best place to start is by [opening an issue][github-issue] for
|
||||
discussion (or seeing if there is already one open), so we can better understand
|
||||
your use case and the best way to solve it. Generally we aim to prioritize the
|
||||
most popular issues, so make sure to +1 ones you are the most interested in.
|
||||
most popular issues, so make sure to +1 ones you are the most interested in.
|
||||
|
||||
[github-issue]: https://github.com/googleapis/genai-toolbox/issues
|
||||
|
||||
## Can Toolbox be used for non-database tools?
|
||||
## Can Toolbox be used for non-database tools?
|
||||
|
||||
Currently, Toolbox is primarily focused on making it easier to create and
|
||||
develop tools focused on interacting with Databases. We believe that there are a
|
||||
@@ -55,21 +54,21 @@ GRPC tools might be helpful in assisting with migrating to Toolbox or in
|
||||
accomplishing more complicated workflows. We're looking into what that might
|
||||
best look like in Toolbox.
|
||||
|
||||
## Can I use _$BAR_ orchestration framework to use tools from Toolbox?
|
||||
## Can I use _$BAR_ orchestration framework to use tools from Toolbox?
|
||||
|
||||
Currently, Toolbox only supports a limited number of client SDKs at our initial
|
||||
launch. We are investigating support for more frameworks as well as more general
|
||||
approaches for users without a framework -- look forward to seeing an update
|
||||
soon.
|
||||
|
||||
## Why does Toolbox use a server-client architecture pattern?
|
||||
## Why does Toolbox use a server-client architecture pattern?
|
||||
|
||||
Toolbox's server-client architecture allows us to more easily support a wide
|
||||
variety of languages and frameworks with a centralized implementation. It also
|
||||
allows us to tackle problems like connection pooling, auth, or caching more
|
||||
completely than entirely client-side solutions.
|
||||
|
||||
## Why was Toolbox written in Go?
|
||||
## Why was Toolbox written in Go?
|
||||
|
||||
While a large part of the Gen AI Ecosystem is predominately Python, we opted to
|
||||
use Go. We chose Go because it's still easy and simple to use, but also easier
|
||||
@@ -80,8 +79,9 @@ to be able to use Toolbox on the serving path of mission critical applications.
|
||||
It's easier to build the needed robustness, performance and scalability in Go
|
||||
than in Python.
|
||||
|
||||
## Is Toolbox compatible with Model Context Protocol (MCP)?
|
||||
|
||||
## Is Toolbox compatible with Model Context Protocol (MCP)?
|
||||
|
||||
Yes! Toolbox is compatible with [Anthropic's Model Context Protocol (MCP)](https://modelcontextprotocol.io/). Please checkout [Connect via MCP](../how-to/connect_via_mcp.md) on how to
|
||||
connect to Toolbox with an MCP client.
|
||||
Yes! Toolbox is compatible with [Anthropic's Model Context Protocol
|
||||
(MCP)](https://modelcontextprotocol.io/). Please checkout [Connect via
|
||||
MCP](../how-to/connect_via_mcp.md) on how to connect to Toolbox with an MCP
|
||||
client.
|
||||
|
||||
@@ -2,5 +2,6 @@
|
||||
title: "Concepts"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: Some core concepts in Toolbox
|
||||
description: >
|
||||
Some core concepts in Toolbox
|
||||
---
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
title: "Telemetry"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: An overview of telemetry and observability in Toolbox.
|
||||
description: >
|
||||
An overview of telemetry and observability in Toolbox.
|
||||
---
|
||||
|
||||
## About
|
||||
@@ -16,7 +17,6 @@ through [OpenTelemetry](https://opentelemetry.io/). Additional flags can be
|
||||
passed to Toolbox to enable different logging behavior, or to export metrics
|
||||
through a specific [exporter](#exporter).
|
||||
|
||||
|
||||
## Logging
|
||||
|
||||
The following flags can be used to customize Toolbox logging:
|
||||
@@ -26,7 +26,8 @@ The following flags can be used to customize Toolbox logging:
|
||||
| `--log-level` | Preferred log level, allowed values: `debug`, `info`, `warn`, `error`. Default: `info`. |
|
||||
| `--logging-format` | Preferred logging format, allowed values: `standard`, `json`. Default: `standard`. |
|
||||
|
||||
__Example:__
|
||||
**Example:**
|
||||
|
||||
```bash
|
||||
./toolbox --tools-file "tools.yaml" --log-level warn --logging-format json
|
||||
```
|
||||
@@ -34,6 +35,7 @@ __Example:__
|
||||
### Level
|
||||
|
||||
Toolbox supports the following log levels, including:
|
||||
|
||||
| **Log level** | **Description** |
|
||||
|---------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Debug | Debug logs typically contain information that is only useful during the debugging phase and may be of little value during production. |
|
||||
@@ -45,17 +47,18 @@ Toolbox will only output logs that are equal or more severe to the
|
||||
level that it is set. Below are the log levels that Toolbox supports in the
|
||||
order of severity.
|
||||
|
||||
|
||||
### Format
|
||||
|
||||
Toolbox supports both standard and structured logging format.
|
||||
|
||||
The standard logging outputs log as string:
|
||||
|
||||
```
|
||||
2024-11-12T15:08:11.451377-08:00 INFO "Initialized 0 sources.\n"
|
||||
```
|
||||
|
||||
The structured logging outputs log as JSON:
|
||||
|
||||
```
|
||||
{
|
||||
"timestamp":"2024-11-04T16:45:11.987299-08:00",
|
||||
@@ -65,9 +68,9 @@ The structured logging outputs log as JSON:
|
||||
}
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
{{< notice tip >}}
|
||||
`logging.googleapis.com/sourceLocation` shows the source code
|
||||
location information associated with the log entry, if any.
|
||||
location information associated with the log entry, if any.
|
||||
{{< /notice >}}
|
||||
|
||||
## Telemetry
|
||||
@@ -124,7 +127,6 @@ unified [resource][resource]. The list of resource attributes included are:
|
||||
| `service.name` | Open telemetry service name. Defaulted to `toolbox`. User can set the service name via flag mentioned above to distinguish between different toolbox service. |
|
||||
| `service.version` | The version of Toolbox used. |
|
||||
|
||||
|
||||
[resource]: https://opentelemetry.io/docs/languages/go/resources/
|
||||
|
||||
### Exporter
|
||||
@@ -150,9 +152,10 @@ Exporter][gcp-trace-exporter].
|
||||
[gcp-trace-exporter]:
|
||||
https://github.com/GoogleCloudPlatform/opentelemetry-operations-go/tree/main/exporter/trace
|
||||
|
||||
{{< notice note >}}
|
||||
If you're using Google Cloud Monitoring, the following APIs will need to be
|
||||
{{< notice note >}}
|
||||
If you're using Google Cloud Monitoring, the following APIs will need to be
|
||||
enabled:
|
||||
|
||||
- [Cloud Logging API](https://cloud.google.com/logging/docs/api/enable-api)
|
||||
- [Cloud Monitoring API](https://cloud.google.com/monitoring/api/enable-api)
|
||||
- [Cloud Trace API](https://cloud.google.com/apis/enableflow?apiid=cloudtrace.googleapis.com)
|
||||
@@ -183,7 +186,7 @@ The following flags are used to determine Toolbox's telemetry configuration:
|
||||
| **flag** | **type** | **description** |
|
||||
|----------------------------|----------|----------------------------------------------------------------------------------------------------------------|
|
||||
| `--telemetry-gcp` | bool | Enable exporting directly to Google Cloud Monitoring. Default is `false`. |
|
||||
| `--telemetry-otlp` | string | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. "http://127.0.0.1:4318"). |
|
||||
| `--telemetry-otlp` | string | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. "<http://127.0.0.1:4318>"). |
|
||||
| `--telemetry-service-name` | string | Sets the value of the `service.name` resource attribute. Default is `toolbox`. |
|
||||
|
||||
In addition to the flags noted above, you can also make additional configuration
|
||||
@@ -193,14 +196,16 @@ environmental variables.
|
||||
[sdk-configuration]:
|
||||
https://opentelemetry.io/docs/languages/sdk-configuration/general/
|
||||
|
||||
__Examples:__
|
||||
**Examples:**
|
||||
|
||||
To enable Google Cloud Exporter:
|
||||
|
||||
```bash
|
||||
./toolbox --telemetry-gcp
|
||||
```
|
||||
|
||||
To enable OTLP Exporter, provide Collector endpoint:
|
||||
|
||||
```bash
|
||||
./toolbox --telemetry-otlp="http://127.0.0.1:4553"
|
||||
```
|
||||
|
||||
@@ -3,5 +3,5 @@ title: "Getting Started"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
How to get started with Toolbox
|
||||
How to get started with Toolbox.
|
||||
---
|
||||
|
||||
@@ -222,7 +222,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.6.0\" # x-release-please-version\n",
|
||||
"version = \"0.8.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
@@ -474,165 +474,12 @@
|
||||
"source": [
|
||||
"> You can either use LangGraph or LlamaIndex to develop a Toolbox based\n",
|
||||
"> application. Run one of the sections below\n",
|
||||
"> - [Connect using Google GenAI](#scrollTo=Rwgv1LDdNKSn)\n",
|
||||
"> - [Connect using Google GenAI](#scrollTo=Fv2-uT4mvYtp)\n",
|
||||
"> - [Connect using ADK](#scrollTo=QqRlWqvYNKSo)\n",
|
||||
"> - [Connect Using LangGraph](#scrollTo=pbapNMhhL33S)\n",
|
||||
"> - [Connect using LlamaIndex](#scrollTo=04iysrm_L_7v)\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "Rwgv1LDdNKSn"
|
||||
},
|
||||
"source": [
|
||||
"### Connect Using Google GenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"id": "HY23RMk4NKSn"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Install the Toolbox Core package\n",
|
||||
"!pip install toolbox-core --quiet\n",
|
||||
"\n",
|
||||
"# Install the Google GenAI package\n",
|
||||
"!pip install google-genai --quiet"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "9F1u566sNKSn"
|
||||
},
|
||||
"source": [
|
||||
"Create a Google GenAI Application which can Search, Book and Cancel hotels."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"id": "LAuBIOXvNKSn"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import asyncio\n",
|
||||
"\n",
|
||||
"from google import genai\n",
|
||||
"from google.genai.types import (\n",
|
||||
" Content,\n",
|
||||
" FunctionDeclaration,\n",
|
||||
" GenerateContentConfig,\n",
|
||||
" Part,\n",
|
||||
" Tool,\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"from toolbox_core import ToolboxClient\n",
|
||||
"\n",
|
||||
"prompt = \"\"\"\n",
|
||||
" You're a helpful hotel assistant. You handle hotel searching, booking and\n",
|
||||
" cancellations. When the user searches for a hotel, mention it's name, id,\n",
|
||||
" location and price tier. Always mention hotel id while performing any\n",
|
||||
" searches. This is very important for any operations. For any bookings or\n",
|
||||
" cancellations, please provide the appropriate confirmation. Be sure to\n",
|
||||
" update checkin or checkout dates if mentioned by the user.\n",
|
||||
" Don't ask for confirmations from the user.\n",
|
||||
"\"\"\"\n",
|
||||
"\n",
|
||||
"queries = [\n",
|
||||
" \"Find hotels in Basel with Basel in it's name.\",\n",
|
||||
" \"Please book the hotel Hilton Basel for me.\",\n",
|
||||
" \"This is too expensive. Please cancel it.\",\n",
|
||||
" \"Please book Hyatt Regency for me\",\n",
|
||||
" \"My check in dates for my booking would be from April 10, 2024 to April 19, 2024.\",\n",
|
||||
"]\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"async def run_application():\n",
|
||||
" toolbox_client = ToolboxClient(\"http://127.0.0.1:5000\")\n",
|
||||
"\n",
|
||||
" # The toolbox_tools list contains Python callables (functions/methods) designed for LLM tool-use\n",
|
||||
" # integration. While this example uses Google's genai client, these callables can be adapted for\n",
|
||||
" # various function-calling or agent frameworks. For easier integration with supported frameworks\n",
|
||||
" # (https://github.com/googleapis/mcp-toolbox-python-sdk/tree/main/packages), use the\n",
|
||||
" # provided wrapper packages, which handle framework-specific boilerplate.\n",
|
||||
" toolbox_tools = await toolbox_client.load_toolset(\"my-toolset\")\n",
|
||||
" genai_client = genai.Client(\n",
|
||||
" vertexai=True, project=project_id, location=\"us-central1\"\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
" genai_tools = [\n",
|
||||
" Tool(\n",
|
||||
" function_declarations=[\n",
|
||||
" FunctionDeclaration.from_callable_with_api_option(callable=tool)\n",
|
||||
" ]\n",
|
||||
" )\n",
|
||||
" for tool in toolbox_tools\n",
|
||||
" ]\n",
|
||||
" history = []\n",
|
||||
" for query in queries:\n",
|
||||
" user_prompt_content = Content(\n",
|
||||
" role=\"user\",\n",
|
||||
" parts=[Part.from_text(text=query)],\n",
|
||||
" )\n",
|
||||
" history.append(user_prompt_content)\n",
|
||||
"\n",
|
||||
" response = genai_client.models.generate_content(\n",
|
||||
" model=\"gemini-2.0-flash\",\n",
|
||||
" contents=history,\n",
|
||||
" config=GenerateContentConfig(\n",
|
||||
" system_instruction=prompt,\n",
|
||||
" tools=genai_tools,\n",
|
||||
" ),\n",
|
||||
" )\n",
|
||||
" history.append(response.candidates[0].content)\n",
|
||||
" function_response_parts = []\n",
|
||||
" for function_call in response.function_calls:\n",
|
||||
" fn_name = function_call.name\n",
|
||||
" # The tools are sorted alphabetically\n",
|
||||
" if fn_name == \"search-hotels-by-name\":\n",
|
||||
" function_result = await toolbox_tools[3](**function_call.args)\n",
|
||||
" elif fn_name == \"search-hotels-by-location\":\n",
|
||||
" function_result = await toolbox_tools[2](**function_call.args)\n",
|
||||
" elif fn_name == \"book-hotel\":\n",
|
||||
" function_result = await toolbox_tools[0](**function_call.args)\n",
|
||||
" elif fn_name == \"update-hotel\":\n",
|
||||
" function_result = await toolbox_tools[4](**function_call.args)\n",
|
||||
" elif fn_name == \"cancel-hotel\":\n",
|
||||
" function_result = await toolbox_tools[1](**function_call.args)\n",
|
||||
" else:\n",
|
||||
" raise ValueError(\"Function name not present.\")\n",
|
||||
" function_response = {\"result\": function_result}\n",
|
||||
" function_response_part = Part.from_function_response(\n",
|
||||
" name=function_call.name,\n",
|
||||
" response=function_response,\n",
|
||||
" )\n",
|
||||
" function_response_parts.append(function_response_part)\n",
|
||||
"\n",
|
||||
" if function_response_parts:\n",
|
||||
" tool_response_content = Content(role=\"tool\", parts=function_response_parts)\n",
|
||||
" history.append(tool_response_content)\n",
|
||||
"\n",
|
||||
" response2 = genai_client.models.generate_content(\n",
|
||||
" model=\"gemini-2.0-flash-001\",\n",
|
||||
" contents=history,\n",
|
||||
" config=GenerateContentConfig(\n",
|
||||
" tools=genai_tools,\n",
|
||||
" ),\n",
|
||||
" )\n",
|
||||
" final_model_response_content = response2.candidates[0].content\n",
|
||||
" history.append(final_model_response_content)\n",
|
||||
" print(response2.text)\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"asyncio.run(run_application())"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
@@ -686,7 +533,7 @@
|
||||
"\"\"\"\n",
|
||||
"\n",
|
||||
"root_agent = Agent(\n",
|
||||
" model='gemini-2.0-flash',\n",
|
||||
" model='gemini-2.0-flash-001',\n",
|
||||
" name='hotel_agent',\n",
|
||||
" description='A helpful AI assistant.',\n",
|
||||
" instruction=prompt,\n",
|
||||
@@ -695,7 +542,7 @@
|
||||
"\n",
|
||||
"session_service = InMemorySessionService()\n",
|
||||
"artifacts_service = InMemoryArtifactService()\n",
|
||||
"session = session_service.create_session(\n",
|
||||
"session = await session_service.create_session(\n",
|
||||
" state={}, app_name='hotel_agent', user_id='123'\n",
|
||||
")\n",
|
||||
"runner = Runner(\n",
|
||||
@@ -802,8 +649,8 @@
|
||||
"async def run_application():\n",
|
||||
" # Create an LLM to bind with the agent.\n",
|
||||
" # TODO(developer): replace this with another model if needed\n",
|
||||
" model = ChatVertexAI(model_name=\"gemini-1.5-pro\", project=project_id)\n",
|
||||
" # model = ChatGoogleGenerativeAI(model=\"gemini-1.5-pro\")\n",
|
||||
" model = ChatVertexAI(model_name=\"gemini-2.0-flash-001\", project=project_id)\n",
|
||||
" # model = ChatGoogleGenerativeAI(model=\"gemini-2.0-flash-001\")\n",
|
||||
" # model = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
|
||||
"\n",
|
||||
" # Load the tools from the Toolbox server\n",
|
||||
@@ -898,12 +745,12 @@
|
||||
" # Create an LLM to bind with the agent.\n",
|
||||
" # TODO(developer): replace this with another model if needed\n",
|
||||
" llm = GoogleGenAI(\n",
|
||||
" model=\"gemini-1.5-pro\",\n",
|
||||
" model=\"gemini-2.0-flash-001\",\n",
|
||||
" vertexai_config={\"project\": project_id, \"location\": \"us-central1\"},\n",
|
||||
" )\n",
|
||||
" # llm = GoogleGenAI(\n",
|
||||
" # api_key=os.getenv(\"GOOGLE_API_KEY\"),\n",
|
||||
" # model=\"gemini-1.5-pro\",\n",
|
||||
" # model=\"gemini-2.0-flash-001\",\n",
|
||||
" # )\n",
|
||||
" # llm = Anthropic(\n",
|
||||
" # model=\"claude-3-7-sonnet-latest\",\n",
|
||||
@@ -931,6 +778,159 @@
|
||||
"await run_application()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "Fv2-uT4mvYtp"
|
||||
},
|
||||
"source": [
|
||||
"### Connect Using Google GenAI"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"id": "mHSvk5_AvYtu"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Install the Toolbox Core package\n",
|
||||
"!pip install toolbox-core --quiet\n",
|
||||
"\n",
|
||||
"# Install the Google GenAI package\n",
|
||||
"!pip install google-genai --quiet"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "sO_7FGSYvYtu"
|
||||
},
|
||||
"source": [
|
||||
"Create a Google GenAI Application which can Search, Book and Cancel hotels."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"id": "-NVVBiLnvYtu"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import asyncio\n",
|
||||
"\n",
|
||||
"from google import genai\n",
|
||||
"from google.genai.types import (\n",
|
||||
" Content,\n",
|
||||
" FunctionDeclaration,\n",
|
||||
" GenerateContentConfig,\n",
|
||||
" Part,\n",
|
||||
" Tool,\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"from toolbox_core import ToolboxClient\n",
|
||||
"\n",
|
||||
"prompt = \"\"\"\n",
|
||||
" You're a helpful hotel assistant. You handle hotel searching, booking and\n",
|
||||
" cancellations. When the user searches for a hotel, mention it's name, id,\n",
|
||||
" location and price tier. Always mention hotel id while performing any\n",
|
||||
" searches. This is very important for any operations. For any bookings or\n",
|
||||
" cancellations, please provide the appropriate confirmation. Be sure to\n",
|
||||
" update checkin or checkout dates if mentioned by the user.\n",
|
||||
" Don't ask for confirmations from the user.\n",
|
||||
"\"\"\"\n",
|
||||
"\n",
|
||||
"queries = [\n",
|
||||
" \"Find hotels in Basel with Basel in it's name.\",\n",
|
||||
" \"Please book the hotel Hilton Basel for me.\",\n",
|
||||
" \"This is too expensive. Please cancel it.\",\n",
|
||||
" \"Please book Hyatt Regency for me\",\n",
|
||||
" \"My check in dates for my booking would be from April 10, 2024 to April 19, 2024.\",\n",
|
||||
"]\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"async def run_application():\n",
|
||||
" toolbox_client = ToolboxClient(\"http://127.0.0.1:5000\")\n",
|
||||
"\n",
|
||||
" # The toolbox_tools list contains Python callables (functions/methods) designed for LLM tool-use\n",
|
||||
" # integration. While this example uses Google's genai client, these callables can be adapted for\n",
|
||||
" # various function-calling or agent frameworks. For easier integration with supported frameworks\n",
|
||||
" # (https://github.com/googleapis/mcp-toolbox-python-sdk/tree/main/packages), use the\n",
|
||||
" # provided wrapper packages, which handle framework-specific boilerplate.\n",
|
||||
" toolbox_tools = await toolbox_client.load_toolset(\"my-toolset\")\n",
|
||||
" genai_client = genai.Client(\n",
|
||||
" vertexai=True, project=project_id, location=\"us-central1\"\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
" genai_tools = [\n",
|
||||
" Tool(\n",
|
||||
" function_declarations=[\n",
|
||||
" FunctionDeclaration.from_callable_with_api_option(callable=tool)\n",
|
||||
" ]\n",
|
||||
" )\n",
|
||||
" for tool in toolbox_tools\n",
|
||||
" ]\n",
|
||||
" history = []\n",
|
||||
" for query in queries:\n",
|
||||
" user_prompt_content = Content(\n",
|
||||
" role=\"user\",\n",
|
||||
" parts=[Part.from_text(text=query)],\n",
|
||||
" )\n",
|
||||
" history.append(user_prompt_content)\n",
|
||||
"\n",
|
||||
" response = genai_client.models.generate_content(\n",
|
||||
" model=\"gemini-2.0-flash-001\",\n",
|
||||
" contents=history,\n",
|
||||
" config=GenerateContentConfig(\n",
|
||||
" system_instruction=prompt,\n",
|
||||
" tools=genai_tools,\n",
|
||||
" ),\n",
|
||||
" )\n",
|
||||
" history.append(response.candidates[0].content)\n",
|
||||
" function_response_parts = []\n",
|
||||
" for function_call in response.function_calls:\n",
|
||||
" fn_name = function_call.name\n",
|
||||
" # The tools are sorted alphabetically\n",
|
||||
" if fn_name == \"search-hotels-by-name\":\n",
|
||||
" function_result = await toolbox_tools[3](**function_call.args)\n",
|
||||
" elif fn_name == \"search-hotels-by-location\":\n",
|
||||
" function_result = await toolbox_tools[2](**function_call.args)\n",
|
||||
" elif fn_name == \"book-hotel\":\n",
|
||||
" function_result = await toolbox_tools[0](**function_call.args)\n",
|
||||
" elif fn_name == \"update-hotel\":\n",
|
||||
" function_result = await toolbox_tools[4](**function_call.args)\n",
|
||||
" elif fn_name == \"cancel-hotel\":\n",
|
||||
" function_result = await toolbox_tools[1](**function_call.args)\n",
|
||||
" else:\n",
|
||||
" raise ValueError(\"Function name not present.\")\n",
|
||||
" function_response = {\"result\": function_result}\n",
|
||||
" function_response_part = Part.from_function_response(\n",
|
||||
" name=function_call.name,\n",
|
||||
" response=function_response,\n",
|
||||
" )\n",
|
||||
" function_response_parts.append(function_response_part)\n",
|
||||
"\n",
|
||||
" if function_response_parts:\n",
|
||||
" tool_response_content = Content(role=\"tool\", parts=function_response_parts)\n",
|
||||
" history.append(tool_response_content)\n",
|
||||
"\n",
|
||||
" response2 = genai_client.models.generate_content(\n",
|
||||
" model=\"gemini-2.0-flash-001\",\n",
|
||||
" contents=history,\n",
|
||||
" config=GenerateContentConfig(\n",
|
||||
" tools=genai_tools,\n",
|
||||
" ),\n",
|
||||
" )\n",
|
||||
" final_model_response_content = response2.candidates[0].content\n",
|
||||
" history.append(final_model_response_content)\n",
|
||||
" print(response2.text)\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"asyncio.run(run_application())"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
title: "Configuration"
|
||||
type: docs
|
||||
weight: 4
|
||||
description: How to configure Toolbox's tools.yaml file.
|
||||
description: >
|
||||
How to configure Toolbox's tools.yaml file.
|
||||
---
|
||||
|
||||
The primary way to configure Toolbox is through the `tools.yaml` file. If you
|
||||
|
||||
@@ -2,24 +2,25 @@
|
||||
title: "Introduction"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: An introduction to MCP Toolbox for Databases.
|
||||
description: >
|
||||
An introduction to MCP Toolbox for Databases.
|
||||
---
|
||||
|
||||
MCP Toolbox for Databases is an open source MCP server for databases. It enables
|
||||
you to develop tools easier, faster, and more securely by handling the complexities
|
||||
such as connection pooling, authentication, and more.
|
||||
|
||||
|
||||
{{< notice note >}}
|
||||
{{< notice note >}}
|
||||
This solution was originally named “Gen AI Toolbox for
|
||||
Databases” as its initial development predated MCP, but was renamed to align
|
||||
with recently added MCP compatibility.
|
||||
with recently added MCP compatibility.
|
||||
{{< /notice >}}
|
||||
|
||||
## Why Toolbox?
|
||||
## Why Toolbox?
|
||||
|
||||
Toolbox helps you build Gen AI tools that let your agents access data in your
|
||||
database. Toolbox provides:
|
||||
|
||||
- **Simplified development**: Integrate tools to your agent in less than 10
|
||||
lines of code, reuse tools between multiple agents or frameworks, and deploy
|
||||
new versions of tools more easily.
|
||||
@@ -29,6 +30,33 @@ database. Toolbox provides:
|
||||
- **End-to-end observability**: Out of the box metrics and tracing with built-in
|
||||
support for OpenTelemetry.
|
||||
|
||||
**⚡ Supercharge Your Workflow with an AI Database Assistant ⚡**
|
||||
|
||||
Stop context-switching and let your AI assistant become a true co-developer. By
|
||||
[connecting your IDE to your databases with MCP Toolbox][connect-ide], you can
|
||||
delegate complex and time-consuming database tasks, allowing you to build faster
|
||||
and focus on what matters. This isn't just about code completion; it's about
|
||||
giving your AI the context it needs to handle the entire development lifecycle.
|
||||
|
||||
Here’s how it will save you time:
|
||||
|
||||
- **Query in Plain English**: Interact with your data using natural language
|
||||
right from your IDE. Ask complex questions like, *"How many orders were
|
||||
delivered in 2024, and what items were in them?"* without writing any SQL.
|
||||
- **Automate Database Management**: Simply describe your data needs, and let the
|
||||
AI assistant manage your database for you. It can handle generating queries,
|
||||
creating tables, adding indexes, and more.
|
||||
- **Generate Context-Aware Code**: Empower your AI assistant to generate
|
||||
application code and tests with a deep understanding of your real-time
|
||||
database schema. This accelerates the development cycle by ensuring the
|
||||
generated code is directly usable.
|
||||
- **Slash Development Overhead**: Radically reduce the time spent on manual
|
||||
setup and boilerplate. MCP Toolbox helps streamline lengthy database
|
||||
configurations, repetitive code, and error-prone schema migrations.
|
||||
|
||||
Learn [how to connect your AI tools (IDEs) to Toolbox using MCP][connect-ide].
|
||||
|
||||
[connect-ide]: ../../how-to/connect-ide/
|
||||
|
||||
## General Architecture
|
||||
|
||||
@@ -44,6 +72,7 @@ redeploying your application.
|
||||
## Getting Started
|
||||
|
||||
### Installing the server
|
||||
|
||||
For the latest version, check the [releases page][releases] and use the
|
||||
following instructions for your OS and CPU architecture.
|
||||
|
||||
@@ -57,7 +86,7 @@ To install Toolbox as a binary:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.6.0
|
||||
export VERSION=0.8.0
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -68,7 +97,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.6.0
|
||||
export VERSION=0.8.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -79,7 +108,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.6.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.8.0
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
@@ -94,6 +123,9 @@ execute `toolbox` to start the server:
|
||||
```sh
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
|
||||
You can use `toolbox help` for a full list of flags! To stop the server, send a
|
||||
terminate signal (`ctrl+c` on most platforms).
|
||||
@@ -106,6 +138,7 @@ out the resources in the [How-to section](../../how-to/_index.md)
|
||||
Once your server is up and running, you can load the tools into your
|
||||
application. See below the list of Client SDKs for using various frameworks:
|
||||
|
||||
#### Python
|
||||
{{< tabpane text=true persist=header >}}
|
||||
{{% tab header="Core" lang="en" %}}
|
||||
|
||||
@@ -117,9 +150,10 @@ tools:
|
||||
from toolbox_core import ToolboxClient
|
||||
|
||||
# update the url to point to your server
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
|
||||
# these tools can be passed to your application!
|
||||
async with ToolboxClient("<http://127.0.0.1:5000>") as client:
|
||||
|
||||
# these tools can be passed to your application!
|
||||
tools = await client.load_toolset("toolset_name")
|
||||
{{< /highlight >}}
|
||||
|
||||
@@ -137,9 +171,10 @@ tools:
|
||||
from toolbox_langchain import ToolboxClient
|
||||
|
||||
# update the url to point to your server
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
|
||||
# these tools can be passed to your application!
|
||||
async with ToolboxClient("<http://127.0.0.1:5000>") as client:
|
||||
|
||||
# these tools can be passed to your application!
|
||||
tools = client.load_toolset()
|
||||
{{< /highlight >}}
|
||||
|
||||
@@ -157,9 +192,11 @@ tools:
|
||||
from toolbox_llamaindex import ToolboxClient
|
||||
|
||||
# update the url to point to your server
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
|
||||
# these tools can be passed to your application!
|
||||
async with ToolboxClient("<http://127.0.0.1:5000>") as client:
|
||||
|
||||
# these tools can be passed to your application
|
||||
|
||||
tools = client.load_toolset()
|
||||
{{< /highlight >}}
|
||||
|
||||
@@ -168,3 +205,115 @@ For more detailed instructions on using the Toolbox Llamaindex SDK, see the
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
#### Javascript/Typescript
|
||||
|
||||
Once you've installed the [Toolbox Core
|
||||
SDK](https://www.npmjs.com/package/@toolbox-sdk/core), you can load
|
||||
tools:
|
||||
|
||||
{{< tabpane text=true persist=header >}}
|
||||
{{% tab header="Core" lang="en" %}}
|
||||
|
||||
{{< highlight javascript >}}
|
||||
import { ToolboxClient } from '@toolbox-sdk/core';
|
||||
|
||||
// update the url to point to your server
|
||||
const URL = 'http://127.0.0.1:5000';
|
||||
let client = new ToolboxClient(URL);
|
||||
|
||||
// these tools can be passed to your application!
|
||||
const toolboxTools = await client.loadToolset('toolsetName');
|
||||
{{< /highlight >}}
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="LangChain/Langraph" lang="en" %}}
|
||||
|
||||
{{< highlight javascript >}}
|
||||
import { ToolboxClient } from '@toolbox-sdk/core';
|
||||
|
||||
// update the url to point to your server
|
||||
const URL = 'http://127.0.0.1:5000';
|
||||
let client = new ToolboxClient(URL);
|
||||
|
||||
// these tools can be passed to your application!
|
||||
const toolboxTools = await client.loadToolset('toolsetName');
|
||||
|
||||
// Define the basics of the tool: name, description, schema and core logic
|
||||
const getTool = (toolboxTool) => tool(currTool, {
|
||||
name: toolboxTool.getName(),
|
||||
description: toolboxTool.getDescription(),
|
||||
schema: toolboxTool.getParamSchema()
|
||||
});
|
||||
|
||||
// Use these tools in your Langchain/Langraph applications
|
||||
const tools = toolboxTools.map(getTool);
|
||||
{{< /highlight >}}
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Genkit" lang="en" %}}
|
||||
|
||||
{{< highlight javascript >}}
|
||||
import { ToolboxClient } from '@toolbox-sdk/core';
|
||||
import { genkit } from 'genkit';
|
||||
|
||||
// Initialise genkit
|
||||
const ai = genkit({
|
||||
plugins: [
|
||||
googleAI({
|
||||
apiKey: process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY
|
||||
})
|
||||
],
|
||||
model: googleAI.model('gemini-2.0-flash'),
|
||||
});
|
||||
|
||||
// update the url to point to your server
|
||||
const URL = 'http://127.0.0.1:5000';
|
||||
let client = new ToolboxClient(URL);
|
||||
|
||||
// these tools can be passed to your application!
|
||||
const toolboxTools = await client.loadToolset('toolsetName');
|
||||
|
||||
// Define the basics of the tool: name, description, schema and core logic
|
||||
const getTool = (toolboxTool) => ai.defineTool({
|
||||
name: toolboxTool.getName(),
|
||||
description: toolboxTool.getDescription(),
|
||||
schema: toolboxTool.getParamSchema()
|
||||
}, toolboxTool)
|
||||
|
||||
// Use these tools in your Genkit applications
|
||||
const tools = toolboxTools.map(getTool);
|
||||
{{< /highlight >}}
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="LlamaIndex" lang="en" %}}
|
||||
|
||||
{{< highlight javascript >}}
|
||||
import { ToolboxClient } from '@toolbox-sdk/core';
|
||||
import { tool } from "llamaindex";
|
||||
|
||||
// update the url to point to your server
|
||||
const URL = 'http://127.0.0.1:5000';
|
||||
let client = new ToolboxClient(URL);
|
||||
|
||||
// these tools can be passed to your application!
|
||||
const toolboxTools = await client.loadToolset('toolsetName');
|
||||
|
||||
// Define the basics of the tool: name, description, schema and core logic
|
||||
const getTool = (toolboxTool) => tool({
|
||||
name: toolboxTool.getName(),
|
||||
description: toolboxTool.getDescription(),
|
||||
parameters: toolboxTool.getParams(),
|
||||
execute: toolboxTool
|
||||
});;
|
||||
|
||||
// Use these tools in your LlamaIndex applications
|
||||
const tools = toolboxTools.map(getTool);
|
||||
|
||||
{{< /highlight >}}
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
For more detailed instructions on using the Toolbox Core SDK, see the
|
||||
[project's README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
|
||||
@@ -3,9 +3,8 @@ title: "Quickstart (Local)"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
How to get started running Toolbox locally with Python, PostgreSQL, and
|
||||
[GoogleGenAI](https://pypi.org/project/google-genai/),
|
||||
[LangGraph](https://www.langchain.com/langgraph), [LlamaIndex](https://www.llamaindex.ai/) or [Agent Development Kit](https://google.github.io/adk-docs/).
|
||||
How to get started running Toolbox locally with Python, PostgreSQL, and [Agent Development Kit](https://google.github.io/adk-docs/),
|
||||
[LangGraph](https://www.langchain.com/langgraph), [LlamaIndex](https://www.llamaindex.ai/) or [GoogleGenAI](https://pypi.org/project/google-genai/).
|
||||
---
|
||||
|
||||
[](https://colab.research.google.com/github/googleapis/genai-toolbox/blob/main/docs/en/getting-started/colab_quickstart.ipynb)
|
||||
@@ -37,24 +36,40 @@ accessed by our agent, and create a database user for Toolbox to connect with.
|
||||
Here, `postgres` denotes the default postgres superuser.
|
||||
|
||||
{{< notice info >}}
|
||||
|
||||
#### **Having trouble connecting?**
|
||||
|
||||
* **Password Prompt:** If you are prompted for a password for the `postgres` user and do not know it (or a blank password doesn't work), your PostgreSQL installation might require a password or a different authentication method.
|
||||
* **`FATAL: role "postgres" does not exist`:** This error means the default `postgres` superuser role isn't available under that name on your system.
|
||||
* **`Connection refused`:** Ensure your PostgreSQL server is actually running. You can typically check with `sudo systemctl status postgresql` and start it with `sudo systemctl start postgresql` on Linux systems.
|
||||
* **Password Prompt:** If you are prompted for a password for the `postgres`
|
||||
user and do not know it (or a blank password doesn't work), your PostgreSQL
|
||||
installation might require a password or a different authentication method.
|
||||
* **`FATAL: role "postgres" does not exist`:** This error means the default
|
||||
`postgres` superuser role isn't available under that name on your system.
|
||||
* **`Connection refused`:** Ensure your PostgreSQL server is actually running.
|
||||
You can typically check with `sudo systemctl status postgresql` and start it
|
||||
with `sudo systemctl start postgresql` on Linux systems.
|
||||
|
||||
<br/>
|
||||
|
||||
#### **Common Solution**
|
||||
|
||||
For password issues or if the `postgres` role seems inaccessible directly, try switching to the `postgres` operating system user first. This user often has permission to connect without a password for local connections (this is called peer authentication).
|
||||
For password issues or if the `postgres` role seems inaccessible directly, try
|
||||
switching to the `postgres` operating system user first. This user often has
|
||||
permission to connect without a password for local connections (this is called
|
||||
peer authentication).
|
||||
|
||||
```bash
|
||||
sudo -i -u postgres
|
||||
psql -h 127.0.0.1
|
||||
```
|
||||
Once you are in the `psql` shell using this method, you can proceed with the database creation steps below. Afterwards, type `\q` to exit `psql`, and then `exit` to return to your normal user shell.
|
||||
|
||||
If desired, once connected to `psql` as the `postgres` OS user, you can set a password for the `postgres` *database* user using: `ALTER USER postgres WITH PASSWORD 'your_chosen_password';`. This would allow direct connection with `-U postgres` and a password next time.
|
||||
Once you are in the `psql` shell using this method, you can proceed with the
|
||||
database creation steps below. Afterwards, type `\q` to exit `psql`, and then
|
||||
`exit` to return to your normal user shell.
|
||||
|
||||
If desired, once connected to `psql` as the `postgres` OS user, you can set a
|
||||
password for the `postgres` *database* user using: `ALTER USER postgres WITH
|
||||
PASSWORD 'your_chosen_password';`. This would allow direct connection with `-U
|
||||
postgres` and a password next time.
|
||||
{{< /notice >}}
|
||||
|
||||
1. Create a new database and a new user:
|
||||
@@ -78,7 +93,10 @@ If desired, once connected to `psql` as the `postgres` OS user, you can set a pa
|
||||
```bash
|
||||
\q
|
||||
```
|
||||
(If you used `sudo -i -u postgres` and then `psql`, remember you might also need to type `exit` after `\q` to leave the `postgres` user's shell session.)
|
||||
|
||||
(If you used `sudo -i -u postgres` and then `psql`, remember you might also
|
||||
need to type `exit` after `\q` to leave the `postgres` user's shell
|
||||
session.)
|
||||
|
||||
1. Connect to your database with your new user:
|
||||
|
||||
@@ -138,7 +156,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.6.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.8.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -239,6 +257,9 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
```bash
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
|
||||
## Step 3: Connect your agent to Toolbox
|
||||
|
||||
@@ -253,10 +274,6 @@ you can connect to a
|
||||
1. In a new terminal, install the SDK package.
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="Core" lang="bash" >}}
|
||||
|
||||
pip install toolbox-core
|
||||
{{< /tab >}}
|
||||
{{< tab header="ADK" lang="bash" >}}
|
||||
|
||||
pip install toolbox-core
|
||||
@@ -269,15 +286,15 @@ pip install toolbox-langchain
|
||||
|
||||
pip install toolbox-llamaindex
|
||||
{{< /tab >}}
|
||||
{{< tab header="Core" lang="bash" >}}
|
||||
|
||||
pip install toolbox-core
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
1. Install other required dependencies:
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="Core" lang="bash" >}}
|
||||
|
||||
pip install google-genai
|
||||
{{< /tab >}}
|
||||
{{< tab header="ADK" lang="bash" >}}
|
||||
|
||||
pip install google-adk
|
||||
@@ -301,123 +318,16 @@ pip install llama-index-llms-google-genai
|
||||
|
||||
# pip install llama-index-llms-anthropic
|
||||
|
||||
{{< /tab >}}
|
||||
{{< tab header="Core" lang="bash" >}}
|
||||
|
||||
pip install google-genai
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
1. Create a new file named `hotel_agent.py` and copy the following
|
||||
code to create an agent:
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="Core" lang="python" >}}
|
||||
import asyncio
|
||||
|
||||
from google import genai
|
||||
from google.genai.types import (
|
||||
Content,
|
||||
FunctionDeclaration,
|
||||
GenerateContentConfig,
|
||||
Part,
|
||||
Tool,
|
||||
)
|
||||
|
||||
from toolbox_core import ToolboxClient
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel id while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in it's name.",
|
||||
"Please book the hotel Hilton Basel for me.",
|
||||
"This is too expensive. Please cancel it.",
|
||||
"Please book Hyatt Regency for me",
|
||||
"My check in dates for my booking would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
async def run_application():
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as toolbox_client:
|
||||
|
||||
# The toolbox_tools list contains Python callables (functions/methods) designed for LLM tool-use
|
||||
# integration. While this example uses Google's genai client, these callables can be adapted for
|
||||
# various function-calling or agent frameworks. For easier integration with supported frameworks
|
||||
# (https://github.com/googleapis/mcp-toolbox-python-sdk/tree/main/packages), use the
|
||||
# provided wrapper packages, which handle framework-specific boilerplate.
|
||||
toolbox_tools = await toolbox_client.load_toolset("my-toolset")
|
||||
genai_client = genai.Client(
|
||||
vertexai=True, project="project-id", location="us-central1"
|
||||
)
|
||||
|
||||
genai_tools = [
|
||||
Tool(
|
||||
function_declarations=[
|
||||
FunctionDeclaration.from_callable_with_api_option(callable=tool)
|
||||
]
|
||||
)
|
||||
for tool in toolbox_tools
|
||||
]
|
||||
history = []
|
||||
for query in queries:
|
||||
user_prompt_content = Content(
|
||||
role="user",
|
||||
parts=[Part.from_text(text=query)],
|
||||
)
|
||||
history.append(user_prompt_content)
|
||||
|
||||
response = genai_client.models.generate_content(
|
||||
model="gemini-2.0-flash",
|
||||
contents=history,
|
||||
config=GenerateContentConfig(
|
||||
system_instruction=prompt,
|
||||
tools=genai_tools,
|
||||
),
|
||||
)
|
||||
history.append(response.candidates[0].content)
|
||||
function_response_parts = []
|
||||
for function_call in response.function_calls:
|
||||
fn_name = function_call.name
|
||||
# The tools are sorted alphabetically
|
||||
if fn_name == "search-hotels-by-name":
|
||||
function_result = await toolbox_tools[3](**function_call.args)
|
||||
elif fn_name == "search-hotels-by-location":
|
||||
function_result = await toolbox_tools[2](**function_call.args)
|
||||
elif fn_name == "book-hotel":
|
||||
function_result = await toolbox_tools[0](**function_call.args)
|
||||
elif fn_name == "update-hotel":
|
||||
function_result = await toolbox_tools[4](**function_call.args)
|
||||
elif fn_name == "cancel-hotel":
|
||||
function_result = await toolbox_tools[1](**function_call.args)
|
||||
else:
|
||||
raise ValueError("Function name not present.")
|
||||
function_response = {"result": function_result}
|
||||
function_response_part = Part.from_function_response(
|
||||
name=function_call.name,
|
||||
response=function_response,
|
||||
)
|
||||
function_response_parts.append(function_response_part)
|
||||
|
||||
if function_response_parts:
|
||||
tool_response_content = Content(role="tool", parts=function_response_parts)
|
||||
history.append(tool_response_content)
|
||||
|
||||
response2 = genai_client.models.generate_content(
|
||||
model="gemini-2.0-flash-001",
|
||||
contents=history,
|
||||
config=GenerateContentConfig(
|
||||
tools=genai_tools,
|
||||
),
|
||||
)
|
||||
final_model_response_content = response2.candidates[0].content
|
||||
history.append(final_model_response_content)
|
||||
print(response2.text)
|
||||
|
||||
asyncio.run(run_application())
|
||||
|
||||
{{< /tab >}}
|
||||
{{< tab header="ADK" lang="python" >}}
|
||||
from google.adk.agents import Agent
|
||||
from google.adk.runners import Runner
|
||||
@@ -426,65 +336,69 @@ from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactServ
|
||||
from google.genai import types
|
||||
from toolbox_core import ToolboxSyncClient
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
# TODO(developer): replace this with your Google API key
|
||||
|
||||
os.environ['GOOGLE_API_KEY'] = 'your-api-key'
|
||||
|
||||
with ToolboxSyncClient("http://127.0.0.1:5000") as toolbox_client:
|
||||
async def main():
|
||||
with ToolboxSyncClient("<http://127.0.0.1:5000>") as toolbox_client:
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
root_agent = Agent(
|
||||
model='gemini-2.0-flash',
|
||||
name='hotel_agent',
|
||||
description='A helpful AI assistant.',
|
||||
instruction=prompt,
|
||||
tools=toolbox_client.load_toolset("my-toolset"),
|
||||
)
|
||||
root_agent = Agent(
|
||||
model='gemini-2.0-flash-001',
|
||||
name='hotel_agent',
|
||||
description='A helpful AI assistant.',
|
||||
instruction=prompt,
|
||||
tools=toolbox_client.load_toolset("my-toolset"),
|
||||
)
|
||||
|
||||
session_service = InMemorySessionService()
|
||||
artifacts_service = InMemoryArtifactService()
|
||||
session = session_service.create_session(
|
||||
state={}, app_name='hotel_agent', user_id='123'
|
||||
)
|
||||
runner = Runner(
|
||||
app_name='hotel_agent',
|
||||
agent=root_agent,
|
||||
artifact_service=artifacts_service,
|
||||
session_service=session_service,
|
||||
)
|
||||
session_service = InMemorySessionService()
|
||||
artifacts_service = InMemoryArtifactService()
|
||||
session = await session_service.create_session(
|
||||
state={}, app_name='hotel_agent', user_id='123'
|
||||
)
|
||||
runner = Runner(
|
||||
app_name='hotel_agent',
|
||||
agent=root_agent,
|
||||
artifact_service=artifacts_service,
|
||||
session_service=session_service,
|
||||
)
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in it's name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in it's name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
for query in queries:
|
||||
content = types.Content(role='user', parts=[types.Part(text=query)])
|
||||
events = runner.run(session_id=session.id,
|
||||
user_id='123', new_message=content)
|
||||
for query in queries:
|
||||
content = types.Content(role='user', parts=[types.Part(text=query)])
|
||||
events = runner.run(session_id=session.id,
|
||||
user_id='123', new_message=content)
|
||||
|
||||
responses = (
|
||||
part.text
|
||||
for event in events
|
||||
for part in event.content.parts
|
||||
if part.text is not None
|
||||
)
|
||||
responses = (
|
||||
part.text
|
||||
for event in events
|
||||
for part in event.content.parts
|
||||
if part.text is not None
|
||||
)
|
||||
|
||||
for text in responses:
|
||||
print(text)
|
||||
for text in responses:
|
||||
print(text)
|
||||
|
||||
asyncio.run(main())
|
||||
{{< /tab >}}
|
||||
{{< tab header="LangChain" lang="python" >}}
|
||||
import asyncio
|
||||
@@ -522,8 +436,8 @@ queries = [
|
||||
|
||||
async def run_application():
|
||||
# TODO(developer): replace this with another model if needed
|
||||
model = ChatVertexAI(model_name="gemini-1.5-pro")
|
||||
# model = ChatGoogleGenerativeAI(model="gemini-1.5-pro")
|
||||
model = ChatVertexAI(model_name="gemini-2.0-flash-001")
|
||||
# model = ChatGoogleGenerativeAI(model="gemini-2.0-flash-001")
|
||||
# model = ChatAnthropic(model="claude-3-5-sonnet-20240620")
|
||||
|
||||
# Load the tools from the Toolbox server
|
||||
@@ -576,12 +490,12 @@ queries = [
|
||||
async def run_application():
|
||||
# TODO(developer): replace this with another model if needed
|
||||
llm = GoogleGenAI(
|
||||
model="gemini-1.5-pro",
|
||||
model="gemini-2.0-flash-001",
|
||||
vertexai_config={"project": "project-id", "location": "us-central1"},
|
||||
)
|
||||
# llm = GoogleGenAI(
|
||||
# api_key=os.getenv("GOOGLE_API_KEY"),
|
||||
# model="gemini-1.5-pro",
|
||||
# model="gemini-2.0-flash-001",
|
||||
# )
|
||||
# llm = Anthropic(
|
||||
# model="claude-3-7-sonnet-latest",
|
||||
@@ -604,23 +518,138 @@ async def run_application():
|
||||
print(str(response))
|
||||
|
||||
asyncio.run(run_application())
|
||||
{{< /tab >}}
|
||||
{{< tab header="Core" lang="python" >}}
|
||||
import asyncio
|
||||
|
||||
from google import genai
|
||||
from google.genai.types import (
|
||||
Content,
|
||||
FunctionDeclaration,
|
||||
GenerateContentConfig,
|
||||
Part,
|
||||
Tool,
|
||||
)
|
||||
|
||||
from toolbox_core import ToolboxClient
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel id while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in it's name.",
|
||||
"Please book the hotel Hilton Basel for me.",
|
||||
"This is too expensive. Please cancel it.",
|
||||
"Please book Hyatt Regency for me",
|
||||
"My check in dates for my booking would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
async def run_application():
|
||||
async with ToolboxClient("<http://127.0.0.1:5000>") as toolbox_client:
|
||||
|
||||
# The toolbox_tools list contains Python callables (functions/methods) designed for LLM tool-use
|
||||
# integration. While this example uses Google's genai client, these callables can be adapted for
|
||||
# various function-calling or agent frameworks. For easier integration with supported frameworks
|
||||
# (https://github.com/googleapis/mcp-toolbox-python-sdk/tree/main/packages), use the
|
||||
# provided wrapper packages, which handle framework-specific boilerplate.
|
||||
toolbox_tools = await toolbox_client.load_toolset("my-toolset")
|
||||
genai_client = genai.Client(
|
||||
vertexai=True, project="project-id", location="us-central1"
|
||||
)
|
||||
|
||||
genai_tools = [
|
||||
Tool(
|
||||
function_declarations=[
|
||||
FunctionDeclaration.from_callable_with_api_option(callable=tool)
|
||||
]
|
||||
)
|
||||
for tool in toolbox_tools
|
||||
]
|
||||
history = []
|
||||
for query in queries:
|
||||
user_prompt_content = Content(
|
||||
role="user",
|
||||
parts=[Part.from_text(text=query)],
|
||||
)
|
||||
history.append(user_prompt_content)
|
||||
|
||||
response = genai_client.models.generate_content(
|
||||
model="gemini-2.0-flash-001",
|
||||
contents=history,
|
||||
config=GenerateContentConfig(
|
||||
system_instruction=prompt,
|
||||
tools=genai_tools,
|
||||
),
|
||||
)
|
||||
history.append(response.candidates[0].content)
|
||||
function_response_parts = []
|
||||
for function_call in response.function_calls:
|
||||
fn_name = function_call.name
|
||||
# The tools are sorted alphabetically
|
||||
if fn_name == "search-hotels-by-name":
|
||||
function_result = await toolbox_tools[3](**function_call.args)
|
||||
elif fn_name == "search-hotels-by-location":
|
||||
function_result = await toolbox_tools[2](**function_call.args)
|
||||
elif fn_name == "book-hotel":
|
||||
function_result = await toolbox_tools[0](**function_call.args)
|
||||
elif fn_name == "update-hotel":
|
||||
function_result = await toolbox_tools[4](**function_call.args)
|
||||
elif fn_name == "cancel-hotel":
|
||||
function_result = await toolbox_tools[1](**function_call.args)
|
||||
else:
|
||||
raise ValueError("Function name not present.")
|
||||
function_response = {"result": function_result}
|
||||
function_response_part = Part.from_function_response(
|
||||
name=function_call.name,
|
||||
response=function_response,
|
||||
)
|
||||
function_response_parts.append(function_response_part)
|
||||
|
||||
if function_response_parts:
|
||||
tool_response_content = Content(role="tool", parts=function_response_parts)
|
||||
history.append(tool_response_content)
|
||||
|
||||
response2 = genai_client.models.generate_content(
|
||||
model="gemini-2.0-flash-001",
|
||||
contents=history,
|
||||
config=GenerateContentConfig(
|
||||
tools=genai_tools,
|
||||
),
|
||||
)
|
||||
final_model_response_content = response2.candidates[0].content
|
||||
history.append(final_model_response_content)
|
||||
print(response2.text)
|
||||
|
||||
asyncio.run(run_application())
|
||||
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
{{< tabpane text=true persist=header >}}
|
||||
{{% tab header="Core" lang="en" %}}
|
||||
To learn more about tool calling with Google GenAI, check out the
|
||||
[Google GenAI Documentation](https://github.com/googleapis/python-genai?tab=readme-ov-file#manually-declare-and-invoke-a-function-for-function-calling).
|
||||
{{% /tab %}}
|
||||
{{< tabpane text=true persist=header >}}
|
||||
{{% tab header="ADK" lang="en" %}}
|
||||
To learn more about Agent Development Kit, check out the [ADK documentation.](https://google.github.io/adk-docs/)
|
||||
To learn more about Agent Development Kit, check out the [ADK
|
||||
documentation.](https://google.github.io/adk-docs/)
|
||||
{{% /tab %}}
|
||||
{{% tab header="Langchain" lang="en" %}}
|
||||
To learn more about Agents in LangChain, check out the [LangGraph Agent documentation.](https://langchain-ai.github.io/langgraph/reference/prebuilt/#langgraph.prebuilt.chat_agent_executor.create_react_agent)
|
||||
To learn more about Agents in LangChain, check out the [LangGraph Agent
|
||||
documentation.](https://langchain-ai.github.io/langgraph/reference/prebuilt/#langgraph.prebuilt.chat_agent_executor.create_react_agent)
|
||||
{{% /tab %}}
|
||||
{{% tab header="LlamaIndex" lang="en" %}}
|
||||
To learn more about Agents in LlamaIndex, check out the
|
||||
[LlamaIndex AgentWorkflow documentation.](https://docs.llamaindex.ai/en/stable/examples/agent/agent_workflow_basic/)
|
||||
To learn more about Agents in LlamaIndex, check out the [LlamaIndex
|
||||
AgentWorkflow
|
||||
documentation.](https://docs.llamaindex.ai/en/stable/examples/agent/agent_workflow_basic/)
|
||||
{{% /tab %}}
|
||||
{{% tab header="Core" lang="en" %}}
|
||||
To learn more about tool calling with Google GenAI, check out the
|
||||
[Google GenAI
|
||||
Documentation](https://github.com/googleapis/python-genai?tab=readme-ov-file#manually-declare-and-invoke-a-function-for-function-calling).
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
|
||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.6.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.8.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -199,7 +199,8 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
- cancel-hotel
|
||||
```
|
||||
|
||||
For more info on tools, check out the [Tools](../../resources/tools/_index.md) section.
|
||||
For more info on tools, check out the
|
||||
[Tools](../../resources/tools/_index.md) section.
|
||||
|
||||
1. Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
|
||||
|
||||
|
||||
@@ -5,351 +5,9 @@ weight: 2
|
||||
description: >
|
||||
Connect your IDE to AlloyDB using Toolbox.
|
||||
---
|
||||
|
||||
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like AlloyDB. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a AlloyDB for Postgres instance:
|
||||
|
||||
* [Cursor][cursor]
|
||||
* [Windsurf][windsurf] (Codium)
|
||||
* [Visual Studio Code ][vscode] (Copilot)
|
||||
* [Cline][cline] (VS Code extension)
|
||||
* [Claude desktop][claudedesktop]
|
||||
* [Claude code][claudecode]
|
||||
|
||||
[toolbox]: https://github.com/googleapis/genai-toolbox
|
||||
[cursor]: #configure-your-mcp-client
|
||||
[windsurf]: #configure-your-mcp-client
|
||||
[vscode]: #configure-your-mcp-client
|
||||
[cline]: #configure-your-mcp-client
|
||||
[claudedesktop]: #configure-your-mcp-client
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
|
||||
## Before you begin
|
||||
|
||||
1. In the Google Cloud console, on the [project selector page](https://console.cloud.google.com/projectselector2/home/dashboard), select or create a Google Cloud project.
|
||||
|
||||
1. [Make sure that billing is enabled for your Google Cloud project](https://cloud.google.com/billing/docs/how-to/verify-billing-enabled#confirm_billing_is_enabled_on_a_project).
|
||||
|
||||
|
||||
## Set up the database
|
||||
|
||||
1. [Enable the AlloyDB, Compute Engine, Cloud Resource Manager, and Service Networking APIs in the Google Cloud project](https://console.cloud.google.com/flows/enableapi?apiid=alloydb.googleapis.com,compute.googleapis.com,cloudresourcemanager.googleapis.com,servicenetworking.googleapis.com).
|
||||
|
||||
1. [Create a cluster and its primary instance](https://cloud.google.com/alloydb/docs/quickstart/create-and-connect). These instructions assume that your AlloyDB instance has a [public IP address](https://cloud.google.com/alloydb/docs/connect-public-ip). By default, AlloyDB assigns a private IP address to a new instance. Toolbox will connect securely using the [AlloyDB Language Connectors](https://cloud.google.com/alloydb/docs/language-connectors-overview).
|
||||
|
||||
1. Configure the required roles and permissions to complete this task. You will need [Cloud AlloyDB Client](https://cloud.google.com/alloydb/docs/auth-proxy/connect#required-iam-permissions) (`roles/alloydb.client`) and Service Usage Consumer (`roles/serviceusage.serviceUsageConsumer`) roles or equivalent IAM permissions to connect to the instance.
|
||||
|
||||
1. Configured [Application Default Credentials (ADC)](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment) for your environment.
|
||||
|
||||
1. Create or reuse [a database user](https://cloud.google.com/alloydb/docs/database-users/manage-roles) and have the username and password ready.
|
||||
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version V0.5.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.5.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.5.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.5.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.5.0/windows/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Verify the installation:
|
||||
|
||||
```bash
|
||||
./toolbox --version
|
||||
```
|
||||
|
||||
## Configure and run Toolbox
|
||||
|
||||
This section will create a `tools.yaml` file, which will define which tools your AI Agent will have access to. You can add, remove, or edit tools as needed to make sure you have the best tools for your workflows.
|
||||
|
||||
This will configure the following tools:
|
||||
|
||||
1. **list_tables**: lists tables and descriptions
|
||||
3. **execute_sql**: execute any SQL statement
|
||||
|
||||
To configure Toolbox, run the following steps:
|
||||
|
||||
1. Set the following environment variables:
|
||||
|
||||
```bash
|
||||
# The ID of your Google Cloud Project where the AlloyDB cluster/instance is located.
|
||||
export ALLOYDB_PROJECT="your-gcp-project-id"
|
||||
|
||||
# The region where your AlloyDB cluster is located (e.g., us-central1).
|
||||
export ALLOYDB_REGION="your-cluster-region"
|
||||
|
||||
# The name of your AlloyDB cluster.
|
||||
export ALLOYDB_CLUSTER="your-cluster-name"
|
||||
|
||||
# The name of your AlloyDB instance.
|
||||
export ALLOYDB_INSTANCE="your-instance-name"
|
||||
|
||||
# The name of the database you want to connect to within the instance.
|
||||
export ALLOYDB_DB="your-database-name"
|
||||
|
||||
# The username for connecting to the database.
|
||||
export ALLOYDB_USER="your-database-user"
|
||||
|
||||
# The password for the specified database user.
|
||||
export ALLOYDB_PASS="your-database-password"
|
||||
```
|
||||
|
||||
2. Create a `tools.yaml` file.
|
||||
|
||||
3. Copy and paste the following contents into the `tools.yaml`:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
alloydb-pg-source:
|
||||
kind: alloydb-postgres
|
||||
project: ${ALLOYDB_PROJECT}
|
||||
region: ${ALLOYDB_REGION}
|
||||
cluster: ${ALLOYDB_CLUSTER}
|
||||
instance: ${ALLOYDB_INSTANCE}
|
||||
database: ${ALLOYDB_DB}
|
||||
user: ${ALLOYDB_USER}
|
||||
password: ${ALLOYDB_PASS}
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: postgres-execute-sql
|
||||
source: alloydb-pg-source
|
||||
description: Use this tool to execute sql.
|
||||
|
||||
list_tables:
|
||||
kind: postgres-sql
|
||||
source: alloydb-pg-source
|
||||
description: "Lists detailed schema information (object type, columns, constraints, indexes, triggers, owner, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
|
||||
statement: |
|
||||
WITH desired_relkinds AS (
|
||||
SELECT ARRAY['r', 'p']::char[] AS kinds -- Always consider both 'TABLE' and 'PARTITIONED TABLE'
|
||||
),
|
||||
table_info AS (
|
||||
SELECT
|
||||
t.oid AS table_oid,
|
||||
ns.nspname AS schema_name,
|
||||
t.relname AS table_name,
|
||||
pg_get_userbyid(t.relowner) AS table_owner,
|
||||
obj_description(t.oid, 'pg_class') AS table_comment,
|
||||
t.relkind AS object_kind
|
||||
FROM
|
||||
pg_class t
|
||||
JOIN
|
||||
pg_namespace ns ON ns.oid = t.relnamespace
|
||||
CROSS JOIN desired_relkinds dk
|
||||
WHERE
|
||||
t.relkind = ANY(dk.kinds) -- Filter by selected table relkinds ('r', 'p')
|
||||
AND (NULLIF(TRIM($1), '') IS NULL OR t.relname = ANY(string_to_array($1,','))) -- $1 is object_names
|
||||
AND ns.nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
|
||||
AND ns.nspname NOT LIKE 'pg_temp_%' AND ns.nspname NOT LIKE 'pg_toast_temp_%'
|
||||
),
|
||||
columns_info AS (
|
||||
SELECT
|
||||
att.attrelid AS table_oid, att.attname AS column_name, format_type(att.atttypid, att.atttypmod) AS data_type,
|
||||
att.attnum AS column_ordinal_position, att.attnotnull AS is_not_nullable,
|
||||
pg_get_expr(ad.adbin, ad.adrelid) AS column_default, col_description(att.attrelid, att.attnum) AS column_comment
|
||||
FROM pg_attribute att LEFT JOIN pg_attrdef ad ON att.attrelid = ad.adrelid AND att.attnum = ad.adnum
|
||||
JOIN table_info ti ON att.attrelid = ti.table_oid WHERE att.attnum > 0 AND NOT att.attisdropped
|
||||
),
|
||||
constraints_info AS (
|
||||
SELECT
|
||||
con.conrelid AS table_oid, con.conname AS constraint_name, pg_get_constraintdef(con.oid) AS constraint_definition,
|
||||
CASE con.contype WHEN 'p' THEN 'PRIMARY KEY' WHEN 'f' THEN 'FOREIGN KEY' WHEN 'u' THEN 'UNIQUE' WHEN 'c' THEN 'CHECK' ELSE con.contype::text END AS constraint_type,
|
||||
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.conkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.conrelid AND att.attnum = u.attnum) AS constraint_columns,
|
||||
NULLIF(con.confrelid, 0)::regclass AS foreign_key_referenced_table,
|
||||
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.confkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.confrelid AND att.attnum = u.attnum WHERE con.contype = 'f') AS foreign_key_referenced_columns
|
||||
FROM pg_constraint con JOIN table_info ti ON con.conrelid = ti.table_oid
|
||||
),
|
||||
indexes_info AS (
|
||||
SELECT
|
||||
idx.indrelid AS table_oid, ic.relname AS index_name, pg_get_indexdef(idx.indexrelid) AS index_definition,
|
||||
idx.indisunique AS is_unique, idx.indisprimary AS is_primary, am.amname AS index_method,
|
||||
(SELECT array_agg(att.attname ORDER BY u.ord) FROM unnest(idx.indkey::int[]) WITH ORDINALITY AS u(colidx, ord) LEFT JOIN pg_attribute att ON att.attrelid = idx.indrelid AND att.attnum = u.colidx WHERE u.colidx <> 0) AS index_columns
|
||||
FROM pg_index idx JOIN pg_class ic ON ic.oid = idx.indexrelid JOIN pg_am am ON am.oid = ic.relam JOIN table_info ti ON idx.indrelid = ti.table_oid
|
||||
),
|
||||
triggers_info AS (
|
||||
SELECT tg.tgrelid AS table_oid, tg.tgname AS trigger_name, pg_get_triggerdef(tg.oid) AS trigger_definition, tg.tgenabled AS trigger_enabled_state
|
||||
FROM pg_trigger tg JOIN table_info ti ON tg.tgrelid = ti.table_oid WHERE NOT tg.tgisinternal
|
||||
)
|
||||
SELECT
|
||||
ti.schema_name,
|
||||
ti.table_name AS object_name,
|
||||
json_build_object(
|
||||
'schema_name', ti.schema_name,
|
||||
'object_name', ti.table_name,
|
||||
'object_type', CASE ti.object_kind
|
||||
WHEN 'r' THEN 'TABLE'
|
||||
WHEN 'p' THEN 'PARTITIONED TABLE'
|
||||
ELSE ti.object_kind::text -- Should not happen due to WHERE clause
|
||||
END,
|
||||
'owner', ti.table_owner,
|
||||
'comment', ti.table_comment,
|
||||
'columns', COALESCE((SELECT json_agg(json_build_object('column_name',ci.column_name,'data_type',ci.data_type,'ordinal_position',ci.column_ordinal_position,'is_not_nullable',ci.is_not_nullable,'column_default',ci.column_default,'column_comment',ci.column_comment) ORDER BY ci.column_ordinal_position) FROM columns_info ci WHERE ci.table_oid = ti.table_oid), '[]'::json),
|
||||
'constraints', COALESCE((SELECT json_agg(json_build_object('constraint_name',cons.constraint_name,'constraint_type',cons.constraint_type,'constraint_definition',cons.constraint_definition,'constraint_columns',cons.constraint_columns,'foreign_key_referenced_table',cons.foreign_key_referenced_table,'foreign_key_referenced_columns',cons.foreign_key_referenced_columns)) FROM constraints_info cons WHERE cons.table_oid = ti.table_oid), '[]'::json),
|
||||
'indexes', COALESCE((SELECT json_agg(json_build_object('index_name',ii.index_name,'index_definition',ii.index_definition,'is_unique',ii.is_unique,'is_primary',ii.is_primary,'index_method',ii.index_method,'index_columns',ii.index_columns)) FROM indexes_info ii WHERE ii.table_oid = ti.table_oid), '[]'::json),
|
||||
'triggers', COALESCE((SELECT json_agg(json_build_object('trigger_name',tri.trigger_name,'trigger_definition',tri.trigger_definition,'trigger_enabled_state',tri.trigger_enabled_state)) FROM triggers_info tri WHERE tri.table_oid = ti.table_oid), '[]'::json)
|
||||
) AS object_details
|
||||
FROM table_info ti ORDER BY ti.schema_name, ti.table_name;
|
||||
parameters:
|
||||
- name: table_names
|
||||
type: string
|
||||
description: "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed."
|
||||
```
|
||||
|
||||
4. Start Toolbox to listen on `127.0.0.1:5000`:
|
||||
|
||||
```bash
|
||||
./toolbox --tools-file tools.yaml --address 127.0.0.1 --port 5000
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
To stop the Toolbox server when you're finished, press `ctrl+c` to send the terminate signal.
|
||||
{{< /notice >}}
|
||||
|
||||
## Configure your MCP Client
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Claude code" lang="en" %}}
|
||||
|
||||
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
2. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
3. Add the following configuration and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb": {
|
||||
"type": "sse",
|
||||
"url": "http://127.0.0.1:5000/mcp/sse"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
4. Restart Claude code to apply the new configuration.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Claude desktop" lang="en" %}}
|
||||
|
||||
1. Install [`npx`](https://docs.npmjs.com/cli/v8/commands/npx).
|
||||
2. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
|
||||
3. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
4. Add the following configuration and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"mcp-remote",
|
||||
"http://127.0.0.1:5000/mcp/sse"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
5. Restart Claude desktop.
|
||||
6. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Cline" lang="en" %}}
|
||||
|
||||
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
|
||||
2. Tap Configure MCP Servers to open the configuration file.
|
||||
3. Add the following configuration and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb": {
|
||||
"type": "sse",
|
||||
"url": "http://127.0.0.1:5000/mcp/sse"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
4. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Cursor" lang="en" %}}
|
||||
|
||||
1. Create a `.cursor` directory in your project root if it doesn't exist.
|
||||
2. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
3. Add the following configuration:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb": {
|
||||
"type": "sse",
|
||||
"url": "http://127.0.0.1:5000/mcp/sse"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
4. [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
|
||||
|
||||
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
|
||||
2. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
|
||||
3. Add the following configuration:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb": {
|
||||
"type": "sse",
|
||||
"url": "http://127.0.0.1:5000/mcp/sse"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
|
||||
2. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
|
||||
3. Add the following configuration:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb": {
|
||||
"serverUrl": "http://127.0.0.1:5000/mcp/sse"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://cloud.google.com/alloydb/docs/pre-built-tools-with-mcp-toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/alloydb/docs/pre-built-tools-with-mcp-toolbox"/>
|
||||
</head>
|
||||
</html>
|
||||
|
||||
13
docs/en/how-to/connect-ide/bigquery_mcp.md
Normal file
13
docs/en/how-to/connect-ide/bigquery_mcp.md
Normal file
@@ -0,0 +1,13 @@
|
||||
---
|
||||
title: "BigQuery using MCP"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
Connect your IDE to BigQuery using Toolbox.
|
||||
---
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://cloud.google.com/bigquery/docs/pre-built-tools-with-mcp-toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/bigquery/docs/pre-built-tools-with-mcp-toolbox"/>
|
||||
</head>
|
||||
</html>
|
||||
13
docs/en/how-to/connect-ide/cloud_sql_mssql_mcp.md
Normal file
13
docs/en/how-to/connect-ide/cloud_sql_mssql_mcp.md
Normal file
@@ -0,0 +1,13 @@
|
||||
---
|
||||
title: "Cloud SQL for SQL Server using MCP"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
Connect your IDE to Cloud SQL for SQL Server using Toolbox.
|
||||
---
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://cloud.google.com/sql/docs/sqlserver/pre-built-tools-with-mcp-toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/sql/docs/sqlserver/pre-built-tools-with-mcp-toolbox"/>
|
||||
</head>
|
||||
</html>
|
||||
13
docs/en/how-to/connect-ide/cloud_sql_mysql_mcp.md
Normal file
13
docs/en/how-to/connect-ide/cloud_sql_mysql_mcp.md
Normal file
@@ -0,0 +1,13 @@
|
||||
---
|
||||
title: "Cloud SQL for MySQL using MCP"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
Connect your IDE to Cloud SQL for MySQL using Toolbox.
|
||||
---
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://cloud.google.com/sql/docs/mysql/pre-built-tools-with-mcp-toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/sql/docs/mysql/pre-built-tools-with-mcp-toolbox"/>
|
||||
</head>
|
||||
</html>
|
||||
@@ -1,353 +1,13 @@
|
||||
---
|
||||
title: "Cloud SQL using MCP"
|
||||
title: "Cloud SQL for Postgres using MCP"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
Connect your IDE to Cloud SQl for Postgres using Toolbox.
|
||||
Connect your IDE to Cloud SQL for Postgres using Toolbox.
|
||||
---
|
||||
|
||||
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like Cloud SQL. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a Cloud SQL for Postgres instance:
|
||||
|
||||
* [Cursor][cursor]
|
||||
* [Windsurf][windsurf] (Codium)
|
||||
* [Visual Studio Code ][vscode] (Copilot)
|
||||
* [Cline][cline] (VS Code extension)
|
||||
* [Claude desktop][claudedesktop]
|
||||
* [Claude code][claudecode]
|
||||
|
||||
[toolbox]: https://github.com/googleapis/genai-toolbox
|
||||
[cursor]: #configure-your-mcp-client
|
||||
[windsurf]: #configure-your-mcp-client
|
||||
[vscode]: #configure-your-mcp-client
|
||||
[cline]: #configure-your-mcp-client
|
||||
[claudedesktop]: #configure-your-mcp-client
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
|
||||
## Before you begin
|
||||
|
||||
1. In the Google Cloud console, on the [project selector page](https://console.cloud.google.com/projectselector2/home/dashboard), select or create a Google Cloud project.
|
||||
|
||||
1. [Make sure that billing is enabled for your Google Cloud project](https://cloud.google.com/billing/docs/how-to/verify-billing-enabled#confirm_billing_is_enabled_on_a_project).
|
||||
|
||||
|
||||
## Set up the database
|
||||
|
||||
1. [Enable the Cloud SQL Admin API in the Google Cloud project](https://console.cloud.google.com/flows/enableapi?apiid=sqladmin&redirect=https://console.cloud.google.com).
|
||||
|
||||
1. [Create a Cloud SQL for PostgreSQL instance](https://cloud.google.com/sql/docs/postgres/create-instance). These instructions assume that your Cloud SQL instance has a [public IP address](https://cloud.google.com/sql/docs/postgres/configure-ip). By default, Cloud SQL assigns a public IP address to a new instance. Toolbox will connect securely using the [Cloud SQL connectors](https://cloud.google.com/sql/docs/postgres/language-connectors).
|
||||
|
||||
1. Configure the required roles and permissions to complete this task. You will need [Cloud SQL > Client](https://cloud.google.com/sql/docs/postgres/roles-and-permissions#proxy-roles-permissions) role (`roles/cloudsql.client`) or equivalent IAM permissions to connect to the instance.
|
||||
|
||||
1. Configured [Application Default Credentials (ADC)](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment) for your environment.
|
||||
|
||||
1. Create or reuse [a database user](https://cloud.google.com/sql/docs/postgres/create-manage-users) and have the username and password ready.
|
||||
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version V0.5.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.5.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.5.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.5.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.5.0/windows/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Verify the installation:
|
||||
|
||||
```bash
|
||||
./toolbox --version
|
||||
```
|
||||
|
||||
## Configure and run Toolbox
|
||||
|
||||
This section will create a `tools.yaml` file, which will define which tools your AI Agent will have access to. You can add, remove, or edit tools as needed to make sure you have the best tools for your workflows.
|
||||
|
||||
This will configure the following tools:
|
||||
|
||||
1. **list_tables**: lists tables and descriptions
|
||||
3. **execute_sql**: execute any SQL statement
|
||||
|
||||
To configure Toolbox, run the following steps:
|
||||
|
||||
1. Set the following environment variables:
|
||||
|
||||
```bash
|
||||
# The ID of your Google Cloud Project where the Cloud SQL instance is located.
|
||||
export CLOUD_SQL_PROJECT="your-gcp-project-id"
|
||||
|
||||
# The region where your Cloud SQL instance is located (e.g., us-central1).
|
||||
export CLOUD_SQL_REGION="your-instance-region"
|
||||
|
||||
# The name of your Cloud SQL instance.
|
||||
export CLOUD_SQL_INSTANCE="your-instance-name"
|
||||
|
||||
# The name of the database you want to connect to within the instance.
|
||||
export CLOUD_SQL_DB="your-database-name"
|
||||
|
||||
# The username for connecting to the database.
|
||||
export CLOUD_SQL_USER="your-database-user"
|
||||
|
||||
# The password for the specified database user.
|
||||
export CLOUD_SQL_PASS="your-database-password"
|
||||
```
|
||||
|
||||
2. Create a `tools.yaml` file.
|
||||
|
||||
3. Copy and paste the following contents into the `tools.yaml`:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
cloudsql-pg-source:
|
||||
kind: cloud-sql-postgres
|
||||
project: ${CLOUD_SQL_PROJECT}
|
||||
region: ${CLOUD_SQL_REGION}
|
||||
instance: ${CLOUD_SQL_INSTANCE}
|
||||
database: ${CLOUD_SQL_DB}
|
||||
user: ${CLOUD_SQL_USER}
|
||||
password: ${CLOUD_SQL_PASS}
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: postgres-execute-sql
|
||||
source: cloudsql-pg-source
|
||||
description: Use this tool to execute SQL
|
||||
|
||||
list_tables:
|
||||
kind: postgres-sql
|
||||
source: cloudsql-pg-source
|
||||
description: >
|
||||
Lists detailed table information (object type, columns, constraints, indexes, triggers, owner, comment)
|
||||
as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names.
|
||||
If names are omitted, lists all tables in user schemas
|
||||
statement: |
|
||||
WITH desired_relkinds AS (
|
||||
SELECT ARRAY['r', 'p']::char[] AS kinds -- Always consider both 'TABLE' and 'PARTITIONED TABLE'
|
||||
),
|
||||
table_info AS (
|
||||
SELECT
|
||||
t.oid AS table_oid,
|
||||
ns.nspname AS schema_name,
|
||||
t.relname AS table_name,
|
||||
pg_get_userbyid(t.relowner) AS table_owner,
|
||||
obj_description(t.oid, 'pg_class') AS table_comment,
|
||||
t.relkind AS object_kind
|
||||
FROM
|
||||
pg_class t
|
||||
JOIN
|
||||
pg_namespace ns ON ns.oid = t.relnamespace
|
||||
CROSS JOIN desired_relkinds dk
|
||||
WHERE
|
||||
t.relkind = ANY(dk.kinds) -- Filter by selected table relkinds ('r', 'p')
|
||||
AND (NULLIF(TRIM($1), '') IS NULL OR t.relname = ANY(string_to_array($1,','))) -- $1 is object_names
|
||||
AND ns.nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
|
||||
AND ns.nspname NOT LIKE 'pg_temp_%' AND ns.nspname NOT LIKE 'pg_toast_temp_%'
|
||||
),
|
||||
columns_info AS (
|
||||
SELECT
|
||||
att.attrelid AS table_oid, att.attname AS column_name, format_type(att.atttypid, att.atttypmod) AS data_type,
|
||||
att.attnum AS column_ordinal_position, att.attnotnull AS is_not_nullable,
|
||||
pg_get_expr(ad.adbin, ad.adrelid) AS column_default, col_description(att.attrelid, att.attnum) AS column_comment
|
||||
FROM pg_attribute att LEFT JOIN pg_attrdef ad ON att.attrelid = ad.adrelid AND att.attnum = ad.adnum
|
||||
JOIN table_info ti ON att.attrelid = ti.table_oid WHERE att.attnum > 0 AND NOT att.attisdropped
|
||||
),
|
||||
constraints_info AS (
|
||||
SELECT
|
||||
con.conrelid AS table_oid, con.conname AS constraint_name, pg_get_constraintdef(con.oid) AS constraint_definition,
|
||||
CASE con.contype WHEN 'p' THEN 'PRIMARY KEY' WHEN 'f' THEN 'FOREIGN KEY' WHEN 'u' THEN 'UNIQUE' WHEN 'c' THEN 'CHECK' ELSE con.contype::text END AS constraint_type,
|
||||
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.conkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.conrelid AND att.attnum = u.attnum) AS constraint_columns,
|
||||
NULLIF(con.confrelid, 0)::regclass AS foreign_key_referenced_table,
|
||||
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.confkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.confrelid AND att.attnum = u.attnum WHERE con.contype = 'f') AS foreign_key_referenced_columns
|
||||
FROM pg_constraint con JOIN table_info ti ON con.conrelid = ti.table_oid
|
||||
),
|
||||
indexes_info AS (
|
||||
SELECT
|
||||
idx.indrelid AS table_oid, ic.relname AS index_name, pg_get_indexdef(idx.indexrelid) AS index_definition,
|
||||
idx.indisunique AS is_unique, idx.indisprimary AS is_primary, am.amname AS index_method,
|
||||
(SELECT array_agg(att.attname ORDER BY u.ord) FROM unnest(idx.indkey::int[]) WITH ORDINALITY AS u(colidx, ord) LEFT JOIN pg_attribute att ON att.attrelid = idx.indrelid AND att.attnum = u.colidx WHERE u.colidx <> 0) AS index_columns
|
||||
FROM pg_index idx JOIN pg_class ic ON ic.oid = idx.indexrelid JOIN pg_am am ON am.oid = ic.relam JOIN table_info ti ON idx.indrelid = ti.table_oid
|
||||
),
|
||||
triggers_info AS (
|
||||
SELECT tg.tgrelid AS table_oid, tg.tgname AS trigger_name, pg_get_triggerdef(tg.oid) AS trigger_definition, tg.tgenabled AS trigger_enabled_state
|
||||
FROM pg_trigger tg JOIN table_info ti ON tg.tgrelid = ti.table_oid WHERE NOT tg.tgisinternal
|
||||
)
|
||||
SELECT
|
||||
ti.schema_name,
|
||||
ti.table_name AS object_name,
|
||||
json_build_object(
|
||||
'schema_name', ti.schema_name,
|
||||
'object_name', ti.table_name,
|
||||
'object_type', CASE ti.object_kind
|
||||
WHEN 'r' THEN 'TABLE'
|
||||
WHEN 'p' THEN 'PARTITIONED TABLE'
|
||||
ELSE ti.object_kind::text -- Should not happen due to WHERE clause
|
||||
END,
|
||||
'owner', ti.table_owner,
|
||||
'comment', ti.table_comment,
|
||||
'columns', COALESCE((SELECT json_agg(json_build_object('column_name',ci.column_name,'data_type',ci.data_type,'ordinal_position',ci.column_ordinal_position,'is_not_nullable',ci.is_not_nullable,'column_default',ci.column_default,'column_comment',ci.column_comment) ORDER BY ci.column_ordinal_position) FROM columns_info ci WHERE ci.table_oid = ti.table_oid), '[]'::json),
|
||||
'constraints', COALESCE((SELECT json_agg(json_build_object('constraint_name',cons.constraint_name,'constraint_type',cons.constraint_type,'constraint_definition',cons.constraint_definition,'constraint_columns',cons.constraint_columns,'foreign_key_referenced_table',cons.foreign_key_referenced_table,'foreign_key_referenced_columns',cons.foreign_key_referenced_columns)) FROM constraints_info cons WHERE cons.table_oid = ti.table_oid), '[]'::json),
|
||||
'indexes', COALESCE((SELECT json_agg(json_build_object('index_name',ii.index_name,'index_definition',ii.index_definition,'is_unique',ii.is_unique,'is_primary',ii.is_primary,'index_method',ii.index_method,'index_columns',ii.index_columns)) FROM indexes_info ii WHERE ii.table_oid = ti.table_oid), '[]'::json),
|
||||
'triggers', COALESCE((SELECT json_agg(json_build_object('trigger_name',tri.trigger_name,'trigger_definition',tri.trigger_definition,'trigger_enabled_state',tri.trigger_enabled_state)) FROM triggers_info tri WHERE tri.table_oid = ti.table_oid), '[]'::json)
|
||||
) AS object_details
|
||||
FROM table_info ti ORDER BY ti.schema_name, ti.table_name;
|
||||
parameters:
|
||||
- name: table_names
|
||||
type: string
|
||||
description: "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed."
|
||||
```
|
||||
|
||||
4. Start Toolbox to listen on `127.0.0.1:5000`:
|
||||
|
||||
```bash
|
||||
./toolbox --tools-file tools.yaml --address 127.0.0.1 --port 5000
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
To stop the Toolbox server when you're finished, press `ctrl+c` to send the terminate signal.
|
||||
{{< /notice >}}
|
||||
|
||||
## Configure your MCP Client
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Claude code" lang="en" %}}
|
||||
|
||||
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
2. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
3. Add the following configuration and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"cloud-sql-postgres": {
|
||||
"type": "sse",
|
||||
"url": "http://127.0.0.1:5000/mcp/sse"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
4. Restart Claude code to apply the new configuration.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Claude desktop" lang="en" %}}
|
||||
|
||||
1. Install [`npx`](https://docs.npmjs.com/cli/v8/commands/npx).
|
||||
2. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
|
||||
3. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
4. Add the following configuration and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"cloud-sql-postgres": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"mcp-remote",
|
||||
"http://127.0.0.1:5000/mcp/sse"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
5. Restart Claude desktop.
|
||||
6. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Cline" lang="en" %}}
|
||||
|
||||
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
|
||||
2. Tap Configure MCP Servers to open the configuration file.
|
||||
3. Add the following configuration and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"cloud-sql-postgres": {
|
||||
"type": "sse",
|
||||
"url": "http://127.0.0.1:5000/mcp/sse"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
4. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Cursor" lang="en" %}}
|
||||
|
||||
1. Create a `.cursor` directory in your project root if it doesn't exist.
|
||||
2. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
3. Add the following configuration:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"cloud-sql-postgres": {
|
||||
"type": "sse",
|
||||
"url": "http://127.0.0.1:5000/mcp/sse"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
4. [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
|
||||
|
||||
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
|
||||
2. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
|
||||
3. Add the following configuration:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"cloud-sql-postgres": {
|
||||
"type": "sse",
|
||||
"url": "http://127.0.0.1:5000/mcp/sse"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
|
||||
2. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
|
||||
3. Add the following configuration:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"cloud-sql-postgres": {
|
||||
"serverUrl": "http://127.0.0.1:5000/mcp/sse"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://cloud.google.com/sql/docs/postgres/pre-built-tools-with-mcp-toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/sql/docs/postgres/pre-built-tools-with-mcp-toolbox"/>
|
||||
</head>
|
||||
</html>
|
||||
|
||||
@@ -6,11 +6,14 @@ description: >
|
||||
Connect your IDE to PostgreSQL using Toolbox.
|
||||
---
|
||||
|
||||
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like Postgres. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a Postgres instance:
|
||||
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is
|
||||
an open protocol for connecting Large Language Models (LLMs) to data sources
|
||||
like Postgres. This guide covers how to use [MCP Toolbox for Databases][toolbox]
|
||||
to expose your developer assistant tools to a Postgres instance:
|
||||
|
||||
* [Cursor][cursor]
|
||||
* [Windsurf][windsurf] (Codium)
|
||||
* [Visual Studio Code ][vscode] (Copilot)
|
||||
* [Visual Studio Code][vscode] (Copilot)
|
||||
* [Cline][cline] (VS Code extension)
|
||||
* [Claude desktop][claudedesktop]
|
||||
* [Claude code][claudecode]
|
||||
@@ -24,7 +27,8 @@ description: >
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
|
||||
{{< notice tip >}}
|
||||
This guide can be used with [AlloyDB Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
This guide can be used with [AlloyDB
|
||||
Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
{{< /notice >}}
|
||||
|
||||
## Set up the database
|
||||
@@ -34,34 +38,37 @@ This guide can be used with [AlloyDB Omni](https://cloud.google.com/alloydb/omni
|
||||
* [Install PostgreSQL locally](https://www.postgresql.org/download/)
|
||||
* [Install AlloyDB Omni](https://cloud.google.com/alloydb/omni/current/docs/quickstart)
|
||||
|
||||
1. Create or reuse [a database user](https://cloud.google.com/alloydb/omni/current/docs/database-users/manage-users) and have the username and password ready.
|
||||
|
||||
1. Create or reuse [a database
|
||||
user](https://cloud.google.com/alloydb/omni/current/docs/database-users/manage-users)
|
||||
and have the username and password ready.
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version V0.5.0+:
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct
|
||||
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
|
||||
to your OS and CPU architecture. You are required to use Toolbox version
|
||||
V0.6.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.5.0/linux/amd64/toolbox
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.8.0/linux/amd64/toolbox>
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.5.0/darwin/arm64/toolbox
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.8.0/darwin/arm64/toolbox>
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.5.0/darwin/amd64/toolbox
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.8.0/darwin/amd64/toolbox>
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.5.0/windows/amd64/toolbox
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.8.0/windows/amd64/toolbox>
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
@@ -74,269 +81,198 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.5.0/windows/amd64/toolbo
|
||||
./toolbox --version
|
||||
```
|
||||
|
||||
## Configure and run Toolbox
|
||||
|
||||
This section will create a `tools.yaml` file, which will define which tools your AI Agent will have access to. You can add, remove, or edit tools as needed to make sure you have the best tools for your workflows.
|
||||
|
||||
This will configure the following tools:
|
||||
|
||||
1. **list_tables**: lists tables and descriptions
|
||||
3. **execute_sql**: execute any SQL statement
|
||||
|
||||
To configure Toolbox, run the following steps:
|
||||
|
||||
1. Set the following environment variables:
|
||||
|
||||
```bash
|
||||
# The IP address of the Postgres instance.
|
||||
export POSTGRES_HOST="127.0.0.1"
|
||||
|
||||
# The port of the Postgres instance.
|
||||
export POSTGRES_PORT=5432
|
||||
|
||||
# The name of the database you want to connect to within the instance.
|
||||
export POSTGRES_DB="your-database-name"
|
||||
|
||||
# The username for connecting to the database.
|
||||
export POSTGRES_USER="your-database-user"
|
||||
|
||||
# The password for the specified database user.
|
||||
export POSTGRES_PASS="your-database-password"
|
||||
```
|
||||
|
||||
2. Create a `tools.yaml` file.
|
||||
|
||||
3. Copy and paste the following contents into the `tools.yaml`:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
postgresql-source:
|
||||
kind: postgres
|
||||
host: ${POSTGRES_HOST}
|
||||
port: ${POSTGRES_PORT}
|
||||
database: ${POSTGRES_DB}
|
||||
user: ${POSTGRES_USER}
|
||||
password: ${POSTGRES_PASS}
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: postgres-execute-sql
|
||||
source: postgresql-source
|
||||
description: Use this tool to execute SQL.
|
||||
|
||||
|
||||
list_tables:
|
||||
kind: postgres-sql
|
||||
source: postgresql-source
|
||||
description: "Lists detailed schema information (object type, columns, constraints, indexes, triggers, owner, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
|
||||
statement: |
|
||||
WITH desired_relkinds AS (
|
||||
SELECT ARRAY['r', 'p']::char[] AS kinds -- Always consider both 'TABLE' and 'PARTITIONED TABLE'
|
||||
),
|
||||
table_info AS (
|
||||
SELECT
|
||||
t.oid AS table_oid,
|
||||
ns.nspname AS schema_name,
|
||||
t.relname AS table_name,
|
||||
pg_get_userbyid(t.relowner) AS table_owner,
|
||||
obj_description(t.oid, 'pg_class') AS table_comment,
|
||||
t.relkind AS object_kind
|
||||
FROM
|
||||
pg_class t
|
||||
JOIN
|
||||
pg_namespace ns ON ns.oid = t.relnamespace
|
||||
CROSS JOIN desired_relkinds dk
|
||||
WHERE
|
||||
t.relkind = ANY(dk.kinds) -- Filter by selected table relkinds ('r', 'p')
|
||||
AND (NULLIF(TRIM($1), '') IS NULL OR t.relname = ANY(string_to_array($1,','))) -- $1 is object_names
|
||||
AND ns.nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
|
||||
AND ns.nspname NOT LIKE 'pg_temp_%' AND ns.nspname NOT LIKE 'pg_toast_temp_%'
|
||||
),
|
||||
columns_info AS (
|
||||
SELECT
|
||||
att.attrelid AS table_oid, att.attname AS column_name, format_type(att.atttypid, att.atttypmod) AS data_type,
|
||||
att.attnum AS column_ordinal_position, att.attnotnull AS is_not_nullable,
|
||||
pg_get_expr(ad.adbin, ad.adrelid) AS column_default, col_description(att.attrelid, att.attnum) AS column_comment
|
||||
FROM pg_attribute att LEFT JOIN pg_attrdef ad ON att.attrelid = ad.adrelid AND att.attnum = ad.adnum
|
||||
JOIN table_info ti ON att.attrelid = ti.table_oid WHERE att.attnum > 0 AND NOT att.attisdropped
|
||||
),
|
||||
constraints_info AS (
|
||||
SELECT
|
||||
con.conrelid AS table_oid, con.conname AS constraint_name, pg_get_constraintdef(con.oid) AS constraint_definition,
|
||||
CASE con.contype WHEN 'p' THEN 'PRIMARY KEY' WHEN 'f' THEN 'FOREIGN KEY' WHEN 'u' THEN 'UNIQUE' WHEN 'c' THEN 'CHECK' ELSE con.contype::text END AS constraint_type,
|
||||
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.conkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.conrelid AND att.attnum = u.attnum) AS constraint_columns,
|
||||
NULLIF(con.confrelid, 0)::regclass AS foreign_key_referenced_table,
|
||||
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.confkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.confrelid AND att.attnum = u.attnum WHERE con.contype = 'f') AS foreign_key_referenced_columns
|
||||
FROM pg_constraint con JOIN table_info ti ON con.conrelid = ti.table_oid
|
||||
),
|
||||
indexes_info AS (
|
||||
SELECT
|
||||
idx.indrelid AS table_oid, ic.relname AS index_name, pg_get_indexdef(idx.indexrelid) AS index_definition,
|
||||
idx.indisunique AS is_unique, idx.indisprimary AS is_primary, am.amname AS index_method,
|
||||
(SELECT array_agg(att.attname ORDER BY u.ord) FROM unnest(idx.indkey::int[]) WITH ORDINALITY AS u(colidx, ord) LEFT JOIN pg_attribute att ON att.attrelid = idx.indrelid AND att.attnum = u.colidx WHERE u.colidx <> 0) AS index_columns
|
||||
FROM pg_index idx JOIN pg_class ic ON ic.oid = idx.indexrelid JOIN pg_am am ON am.oid = ic.relam JOIN table_info ti ON idx.indrelid = ti.table_oid
|
||||
),
|
||||
triggers_info AS (
|
||||
SELECT tg.tgrelid AS table_oid, tg.tgname AS trigger_name, pg_get_triggerdef(tg.oid) AS trigger_definition, tg.tgenabled AS trigger_enabled_state
|
||||
FROM pg_trigger tg JOIN table_info ti ON tg.tgrelid = ti.table_oid WHERE NOT tg.tgisinternal
|
||||
)
|
||||
SELECT
|
||||
ti.schema_name,
|
||||
ti.table_name AS object_name,
|
||||
json_build_object(
|
||||
'schema_name', ti.schema_name,
|
||||
'object_name', ti.table_name,
|
||||
'object_type', CASE ti.object_kind
|
||||
WHEN 'r' THEN 'TABLE'
|
||||
WHEN 'p' THEN 'PARTITIONED TABLE'
|
||||
ELSE ti.object_kind::text -- Should not happen due to WHERE clause
|
||||
END,
|
||||
'owner', ti.table_owner,
|
||||
'comment', ti.table_comment,
|
||||
'columns', COALESCE((SELECT json_agg(json_build_object('column_name',ci.column_name,'data_type',ci.data_type,'ordinal_position',ci.column_ordinal_position,'is_not_nullable',ci.is_not_nullable,'column_default',ci.column_default,'column_comment',ci.column_comment) ORDER BY ci.column_ordinal_position) FROM columns_info ci WHERE ci.table_oid = ti.table_oid), '[]'::json),
|
||||
'constraints', COALESCE((SELECT json_agg(json_build_object('constraint_name',cons.constraint_name,'constraint_type',cons.constraint_type,'constraint_definition',cons.constraint_definition,'constraint_columns',cons.constraint_columns,'foreign_key_referenced_table',cons.foreign_key_referenced_table,'foreign_key_referenced_columns',cons.foreign_key_referenced_columns)) FROM constraints_info cons WHERE cons.table_oid = ti.table_oid), '[]'::json),
|
||||
'indexes', COALESCE((SELECT json_agg(json_build_object('index_name',ii.index_name,'index_definition',ii.index_definition,'is_unique',ii.is_unique,'is_primary',ii.is_primary,'index_method',ii.index_method,'index_columns',ii.index_columns)) FROM indexes_info ii WHERE ii.table_oid = ti.table_oid), '[]'::json),
|
||||
'triggers', COALESCE((SELECT json_agg(json_build_object('trigger_name',tri.trigger_name,'trigger_definition',tri.trigger_definition,'trigger_enabled_state',tri.trigger_enabled_state)) FROM triggers_info tri WHERE tri.table_oid = ti.table_oid), '[]'::json)
|
||||
) AS object_details
|
||||
FROM table_info ti ORDER BY ti.schema_name, ti.table_name;
|
||||
parameters:
|
||||
- name: table_names
|
||||
type: string
|
||||
description: "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed."
|
||||
```
|
||||
|
||||
4. Start Toolbox to listen on `127.0.0.1:5000`:
|
||||
|
||||
```bash
|
||||
./toolbox --tools-file tools.yaml --address 127.0.0.1 --port 5000
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
To stop the Toolbox server when you're finished, press `ctrl+c` to send the terminate signal.
|
||||
{{< /notice >}}
|
||||
|
||||
## Configure your MCP Client
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Claude code" lang="en" %}}
|
||||
|
||||
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
2. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
3. Add the following configuration and save:
|
||||
1. Install [Claude
|
||||
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
1. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"postgres": {
|
||||
"type": "sse",
|
||||
"url": "http://127.0.0.1:5000/mcp/sse"
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","postgres","--stdio"],
|
||||
"env": {
|
||||
"POSTGRES_HOST": "",
|
||||
"POSTGRES_PORT": "",
|
||||
"POSTGRES_DATABASE": "",
|
||||
"POSTGRES_USER": "",
|
||||
"POSTGRES_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
4. Restart Claude code to apply the new configuration.
|
||||
1. Restart Claude code to apply the new configuration.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Claude desktop" lang="en" %}}
|
||||
|
||||
1. Install [`npx`](https://docs.npmjs.com/cli/v8/commands/npx).
|
||||
2. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
|
||||
3. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
4. Add the following configuration and save:
|
||||
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
|
||||
1. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"postgres": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"mcp-remote",
|
||||
"http://127.0.0.1:5000/mcp/sse"
|
||||
]
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","postgres","--stdio"],
|
||||
"env": {
|
||||
"POSTGRES_HOST": "",
|
||||
"POSTGRES_PORT": "",
|
||||
"POSTGRES_DATABASE": "",
|
||||
"POSTGRES_USER": "",
|
||||
"POSTGRES_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
5. Restart Claude desktop.
|
||||
6. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
|
||||
1. Restart Claude desktop.
|
||||
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
|
||||
new MCP server available.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Cline" lang="en" %}}
|
||||
|
||||
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
|
||||
2. Tap Configure MCP Servers to open the configuration file.
|
||||
3. Add the following configuration and save:
|
||||
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap
|
||||
the **MCP Servers** icon.
|
||||
1. Tap Configure MCP Servers to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"postgres": {
|
||||
"type": "sse",
|
||||
"url": "http://127.0.0.1:5000/mcp/sse"
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","postgres","--stdio"],
|
||||
"env": {
|
||||
"POSTGRES_HOST": "",
|
||||
"POSTGRES_PORT": "",
|
||||
"POSTGRES_DATABASE": "",
|
||||
"POSTGRES_USER": "",
|
||||
"POSTGRES_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
4. You should see a green active status after the server is successfully connected.
|
||||
1. You should see a green active status after the server is successfully
|
||||
connected.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Cursor" lang="en" %}}
|
||||
|
||||
1. Create a `.cursor` directory in your project root if it doesn't exist.
|
||||
2. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
3. Add the following configuration:
|
||||
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"postgres": {
|
||||
"type": "sse",
|
||||
"url": "http://127.0.0.1:5000/mcp/sse"
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","postgres","--stdio"],
|
||||
"env": {
|
||||
"POSTGRES_HOST": "",
|
||||
"POSTGRES_PORT": "",
|
||||
"POSTGRES_DATABASE": "",
|
||||
"POSTGRES_USER": "",
|
||||
"POSTGRES_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
4. [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
|
||||
1. [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
|
||||
Settings > MCP**. You should see a green active status after the server is
|
||||
successfully connected.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
|
||||
|
||||
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
|
||||
2. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
|
||||
3. Add the following configuration:
|
||||
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
|
||||
create a `.vscode` directory in your project root if it doesn't exist.
|
||||
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"postgres": {
|
||||
"type": "sse",
|
||||
"url": "http://127.0.0.1:5000/mcp/sse"
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","postgres","--stdio"],
|
||||
"env": {
|
||||
"POSTGRES_HOST": "",
|
||||
"POSTGRES_PORT": "",
|
||||
"POSTGRES_DATABASE": "",
|
||||
"POSTGRES_USER": "",
|
||||
"POSTGRES_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
|
||||
2. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
|
||||
3. Add the following configuration:
|
||||
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
|
||||
Cascade assistant.
|
||||
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"postgres": {
|
||||
"serverUrl": "http://127.0.0.1:5000/mcp/sse"
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","postgres","--stdio"],
|
||||
"env": {
|
||||
"POSTGRES_HOST": "",
|
||||
"POSTGRES_PORT": "",
|
||||
"POSTGRES_DATABASE": "",
|
||||
"POSTGRES_USER": "",
|
||||
"POSTGRES_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Use Tools
|
||||
|
||||
Your AI tool is now connected to Postgres using MCP. Try asking your AI
|
||||
assistant to list tables, create a table, or define and execute other SQL
|
||||
statements.
|
||||
|
||||
The following tools are available to the LLM:
|
||||
|
||||
1. **list_tables**: lists tables and descriptions
|
||||
1. **execute_sql**: execute any SQL statement
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
will adapt to the tools available, so this shouldn't affect most users.
|
||||
{{< /notice >}}
|
||||
|
||||
13
docs/en/how-to/connect-ide/spanner_mcp.md
Normal file
13
docs/en/how-to/connect-ide/spanner_mcp.md
Normal file
@@ -0,0 +1,13 @@
|
||||
---
|
||||
title: "Spanner using MCP"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
Connect your IDE to Spanner using Toolbox.
|
||||
---
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://cloud.google.com/spanner/docs/pre-built-tools-with-mcp-toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/spanner/docs/pre-built-tools-with-mcp-toolbox"/>
|
||||
</head>
|
||||
</html>
|
||||
@@ -7,36 +7,49 @@ description: >
|
||||
---
|
||||
|
||||
## Toolbox SDKs vs Model Context Protocol (MCP)
|
||||
Toolbox now supports connections via both the native Toolbox SDKs and via [Model Context Protocol (MCP)](https://modelcontextprotocol.io/). However, Toolbox has several features which are not supported in the MCP specification (such as Authenticated Parameters and Authorized invocation).
|
||||
|
||||
We recommend using the native SDKs over MCP clients to leverage these features. The native SDKs can be combined with MCP clients in many cases.
|
||||
Toolbox now supports connections via both the native Toolbox SDKs and via [Model
|
||||
Context Protocol (MCP)](https://modelcontextprotocol.io/). However, Toolbox has
|
||||
several features which are not supported in the MCP specification (such as
|
||||
Authenticated Parameters and Authorized invocation).
|
||||
|
||||
We recommend using the native SDKs over MCP clients to leverage these features.
|
||||
The native SDKs can be combined with MCP clients in many cases.
|
||||
|
||||
### Protocol Versions
|
||||
|
||||
Toolbox currently supports the following versions of MCP specification:
|
||||
|
||||
* [2024-11-05](https://spec.modelcontextprotocol.io/specification/2024-11-05/)
|
||||
|
||||
### Features Not Supported by MCP
|
||||
|
||||
Toolbox has several features that are not yet supported in the MCP specification:
|
||||
* **AuthZ/AuthN:** There are no auth implementation in the `2024-11-05` specification. This includes:
|
||||
|
||||
* **AuthZ/AuthN:** There are no auth implementation in the `2024-11-05`
|
||||
specification. This includes:
|
||||
* [Authenticated Parameters](../resources/tools/_index.md#authenticated-parameters)
|
||||
* [Authorized Invocations](../resources/tools/_index.md#authorized-invocations)
|
||||
* **Notifications:** Currently, editing Toolbox Tools requires a server restart. Clients should reload tools on disconnect to get the latest version.
|
||||
|
||||
* **Notifications:** Currently, editing Toolbox Tools requires a server restart.
|
||||
Clients should reload tools on disconnect to get the latest version.
|
||||
|
||||
## Connecting to Toolbox with an MCP client
|
||||
|
||||
### Before you begin
|
||||
|
||||
{{< notice note >}}
|
||||
{{< notice note >}}
|
||||
MCP is only compatible with Toolbox version 0.3.0 and above.
|
||||
{{< /notice >}}
|
||||
|
||||
1. [Install](../getting-started/introduction/_index.md#installing-the-server) Toolbox version 0.3.0+.
|
||||
1. [Install](../getting-started/introduction/_index.md#installing-the-server)
|
||||
Toolbox version 0.3.0+.
|
||||
|
||||
1. Make sure you've set up and initialized your database.
|
||||
|
||||
1. [Set up](../getting-started/configure.md) your `tools.yaml` file.
|
||||
|
||||
### Connecting via Standard Input/Output (stdio)
|
||||
|
||||
Toolbox supports the
|
||||
[stdio](https://modelcontextprotocol.io/docs/concepts/transports#standard-input%2Foutput-stdio)
|
||||
transport protocol. Users that wish to use stdio will have to include the
|
||||
@@ -47,14 +60,20 @@ transport protocol. Users that wish to use stdio will have to include the
|
||||
```
|
||||
|
||||
When running with stdio, Toolbox will listen via stdio instead of acting as a
|
||||
remote HTTP server. Logs will be set to the `warn` level by default. `debug` and `info` logs are not
|
||||
supported with stdio.
|
||||
remote HTTP server. Logs will be set to the `warn` level by default. `debug` and
|
||||
`info` logs are not supported with stdio.
|
||||
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
|
||||
### Connecting via HTTP
|
||||
|
||||
Toolbox supports the HTTP transport protocol with and without SSE.
|
||||
|
||||
{{< tabpane text=true >}} {{% tab header="HTTP with SSE" lang="en" %}}
|
||||
Add the following configuration to your MCP client configuration:
|
||||
|
||||
```bash
|
||||
{
|
||||
"mcpServers": {
|
||||
@@ -66,11 +85,13 @@ Add the following configuration to your MCP client configuration:
|
||||
}
|
||||
```
|
||||
|
||||
If you would like to connect to a specific toolset, replace `url` with `"http://127.0.0.1:5000/mcp/{toolset_name}/sse"`.
|
||||
If you would like to connect to a specific toolset, replace `url` with
|
||||
`"http://127.0.0.1:5000/mcp/{toolset_name}/sse"`.
|
||||
{{% /tab %}} {{% tab header="HTTP POST" lang="en" %}}
|
||||
Connect to Toolbox HTTP POST via `http://127.0.0.1:5000/mcp`.
|
||||
|
||||
If you would like to connect to a specific toolset, connect via `http://127.0.0.1:5000/mcp/{toolset_name}`.
|
||||
If you would like to connect to a specific toolset, connect via
|
||||
`http://127.0.0.1:5000/mcp/{toolset_name}`.
|
||||
{{% /tab %}} {{< /tabpane >}}
|
||||
|
||||
### Using the MCP Inspector with Toolbox
|
||||
@@ -80,6 +101,7 @@ testing and debugging Toolbox server.
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="STDIO" lang="en" %}}
|
||||
|
||||
1. Run Inspector with Toolbox as a subprocess:
|
||||
|
||||
```bash
|
||||
@@ -88,7 +110,8 @@ testing and debugging Toolbox server.
|
||||
|
||||
1. For `Transport Type` dropdown menu, select `STDIO`.
|
||||
|
||||
1. In `Command`, make sure that it is set to :`./toolbox` (or the correct path to where the Toolbox binary is installed).
|
||||
1. In `Command`, make sure that it is set to :`./toolbox` (or the correct path
|
||||
to where the Toolbox binary is installed).
|
||||
|
||||
1. In `Arguments`, make sure that it's filled with `--stdio`.
|
||||
|
||||
@@ -117,8 +140,8 @@ testing and debugging Toolbox server.
|
||||
|
||||
| Client | SSE Works | MCP Config Docs |
|
||||
|--------|--------|--------|
|
||||
| Claude Desktop | ✅ | https://modelcontextprotocol.io/quickstart/user#1-download-claude-for-desktop |
|
||||
| MCP Inspector | ✅ | https://github.com/modelcontextprotocol/inspector |
|
||||
| Cursor | ✅ | https://docs.cursor.com/context/model-context-protocol |
|
||||
| Windsurf | ✅ | https://docs.windsurf.com/windsurf/mcp |
|
||||
| VS Code (Insiders) | ✅ | https://code.visualstudio.com/docs/copilot/chat/mcp-servers |
|
||||
| Claude Desktop | ✅ | <https://modelcontextprotocol.io/quickstart/user#1-download-claude-for-desktop> |
|
||||
| MCP Inspector | ✅ | <https://github.com/modelcontextprotocol/inspector> |
|
||||
| Cursor | ✅ | <https://docs.cursor.com/context/model-context-protocol> |
|
||||
| Windsurf | ✅ | <https://docs.windsurf.com/windsurf/mcp> |
|
||||
| VS Code (Insiders) | ✅ | <https://code.visualstudio.com/docs/copilot/chat/mcp-servers> |
|
||||
|
||||
@@ -8,7 +8,6 @@ description: >
|
||||
|
||||
<!-- Contributor: Sujith R Pillai <sujithrpillai@gmail.com> -->
|
||||
|
||||
|
||||
## Before you begin
|
||||
|
||||
1. [Install Docker Compose.](https://docs.docker.com/compose/install/)
|
||||
@@ -74,7 +73,6 @@ networks:
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
|
||||
{{< notice tip >}}
|
||||
|
||||
You can use this setup quickly set up Toolbox + Postgres to follow along in our
|
||||
@@ -82,8 +80,6 @@ You can use this setup quickly set up Toolbox + Postgres to follow along in our
|
||||
|
||||
{{< /notice >}}
|
||||
|
||||
|
||||
|
||||
## Connecting with Toolbox Client SDK
|
||||
|
||||
Next, we will use Toolbox with the Client SDKs:
|
||||
@@ -101,14 +97,14 @@ Next, we will use Toolbox with the Client SDKs:
|
||||
from toolbox_langchain import ToolboxClient
|
||||
|
||||
# Replace with the cloud run service URL generated above
|
||||
|
||||
async with ToolboxClient("http://$YOUR_URL") as toolbox:
|
||||
{{< /tab >}}
|
||||
{{< tab header="Llamaindex" lang="Python" >}}
|
||||
from toolbox_llamaindex import ToolboxClient
|
||||
|
||||
# Replace with the cloud run service URL generated above
|
||||
|
||||
async with ToolboxClient("http://$YOUR_URL") as toolbox:
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,6 @@ description: >
|
||||
|
||||
## Before you begin
|
||||
|
||||
|
||||
1. Set the PROJECT_ID environment variable:
|
||||
|
||||
```bash
|
||||
@@ -41,7 +40,6 @@ description: >
|
||||
kubectl version --client
|
||||
```
|
||||
|
||||
|
||||
1. If needed, install `kubectl` component using the Google Cloud CLI:
|
||||
|
||||
```bash
|
||||
@@ -62,7 +60,7 @@ description: >
|
||||
gcloud iam service-accounts create $SA_NAME
|
||||
```
|
||||
|
||||
1. Grant any IAM roles necessary to the IAM service account. Each source have a
|
||||
1. Grant any IAM roles necessary to the IAM service account. Each source have a
|
||||
list of necessary IAM permissions listed on it's page. The example below is
|
||||
for cloud sql postgres source:
|
||||
|
||||
@@ -254,6 +252,7 @@ description: >
|
||||
```
|
||||
|
||||
## Clean up resources
|
||||
|
||||
1. Delete secret.
|
||||
|
||||
```bash
|
||||
|
||||
@@ -54,7 +54,6 @@ AlloyDB or Cloud SQL over private IP), make sure your Cloud Run service and the
|
||||
database are in the same VPC network.
|
||||
{{< /notice >}}
|
||||
|
||||
|
||||
## Create a service account
|
||||
|
||||
1. Create a backend service account if you don't already have one:
|
||||
@@ -63,7 +62,7 @@ database are in the same VPC network.
|
||||
gcloud iam service-accounts create toolbox-identity
|
||||
```
|
||||
|
||||
1. Grant permissions to use secret manager:
|
||||
1. Grant permissions to use secret manager:
|
||||
|
||||
```bash
|
||||
gcloud projects add-iam-policy-binding $PROJECT_ID \
|
||||
@@ -71,7 +70,8 @@ database are in the same VPC network.
|
||||
--role roles/secretmanager.secretAccessor
|
||||
```
|
||||
|
||||
1. Grant additional permissions to the service account that are specific to the source, e.g.:
|
||||
1. Grant additional permissions to the service account that are specific to the
|
||||
source, e.g.:
|
||||
- [AlloyDB for PostgreSQL](../resources/sources/alloydb-pg.md#iam-permissions)
|
||||
- [Cloud SQL for PostgreSQL](../resources/sources/cloud-sql-pg.md#iam-permissions)
|
||||
|
||||
@@ -79,7 +79,7 @@ database are in the same VPC network.
|
||||
|
||||
Create a `tools.yaml` file that contains your configuration for Toolbox. For
|
||||
details, see the
|
||||
[configuration](https://github.com/googleapis/genai-toolbox/blob/main/README.md#configuration)
|
||||
[configuration](https://googleapis.github.io/genai-toolbox/resources/sources/)
|
||||
section.
|
||||
|
||||
## Deploy to Cloud Run
|
||||
@@ -97,7 +97,8 @@ section.
|
||||
gcloud secrets versions add tools --data-file=tools.yaml
|
||||
```
|
||||
|
||||
1. Set an environment variable to the container image that you want to use for cloud run:
|
||||
1. Set an environment variable to the container image that you want to use for
|
||||
cloud run:
|
||||
|
||||
```bash
|
||||
export IMAGE=us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:latest
|
||||
@@ -134,12 +135,14 @@ section.
|
||||
|
||||
You can connect to Toolbox Cloud Run instances directly through the SDK
|
||||
|
||||
1. [Set up `Cloud Run Invoker` role access](https://cloud.google.com/run/docs/securing/managing-access#service-add-principals) to your Cloud Run service.
|
||||
1. [Set up `Cloud Run Invoker` role
|
||||
access](https://cloud.google.com/run/docs/securing/managing-access#service-add-principals)
|
||||
to your Cloud Run service.
|
||||
|
||||
1. Set up [Application Default
|
||||
Credentials](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment)
|
||||
for the principle you set up the `Cloud Run Invoker` role access to.
|
||||
|
||||
|
||||
{{< notice tip >}}
|
||||
If you're working in some other environment than local, set up [environment
|
||||
specific Default
|
||||
|
||||
@@ -20,6 +20,7 @@ the need to run, operate, and maintain multiple agents/collectors.
|
||||
To configure the collector, you will have to provide a configuration file. The
|
||||
configuration file consists of four classes of pipeline component that access
|
||||
telemetry data.
|
||||
|
||||
- `Receivers`
|
||||
- `Processors`
|
||||
- `Exporters`
|
||||
|
||||
@@ -2,5 +2,6 @@
|
||||
title: "Resources"
|
||||
type: docs
|
||||
weight: 4
|
||||
description: List of reference documentation for resources in Toolbox.
|
||||
description: >
|
||||
List of reference documentation for resources in Toolbox.
|
||||
---
|
||||
|
||||
@@ -62,52 +62,106 @@ token you will provide a function (that returns an id). This function is called
|
||||
when the tool is invoked. This allows you to cache and refresh the ID token as
|
||||
needed.
|
||||
|
||||
The primary method for providing these getters is via the `auth_token_getters`
|
||||
parameter when loading tools, or the `add_auth_token_getter`() /
|
||||
`add_auth_token_getters()` methods on a loaded tool object.
|
||||
|
||||
### Specifying tokens during load
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="LangChain" lang="Python" >}}
|
||||
{{< tab header="Core" lang="Python" >}}
|
||||
import asyncio
|
||||
from toolbox_core import ToolboxClient
|
||||
|
||||
async def get_auth_token():
|
||||
# ... Logic to retrieve ID token (e.g., from local storage, OAuth flow)
|
||||
# This example just returns a placeholder. Replace with your actual token retrieval.
|
||||
return "YOUR_ID_TOKEN" # Placeholder
|
||||
|
||||
# for a single tool use
|
||||
async def main():
|
||||
async with ToolboxClient("<http://127.0.0.1:5000>") as toolbox:
|
||||
auth_tool = await toolbox.load_tool(
|
||||
"get_sensitive_data",
|
||||
auth_token_getters={"my_auth_app_1": get_auth_token}
|
||||
)
|
||||
result = await auth_tool(param="value")
|
||||
print(result)
|
||||
|
||||
authorized_tool = toolbox.load_tool("my-tool-name", auth_tokens={"my_auth": get_auth_token})
|
||||
if **name** == "**main**":
|
||||
asyncio.run(main())
|
||||
{{< /tab >}}
|
||||
{{< tab header="LangChain" lang="Python" >}}
|
||||
import asyncio
|
||||
from toolbox_langchain import ToolboxClient
|
||||
|
||||
# for a toolset use
|
||||
async def get_auth_token():
|
||||
# ... Logic to retrieve ID token (e.g., from local storage, OAuth flow)
|
||||
# This example just returns a placeholder. Replace with your actual token retrieval.
|
||||
return "YOUR_ID_TOKEN" # Placeholder
|
||||
|
||||
authorized_tools = toolbox.load_toolset("my-toolset-name", auth_tokens={"my_auth": get_auth_token})
|
||||
async def main():
|
||||
toolbox = ToolboxClient("<http://127.0.0.1:5000>")
|
||||
|
||||
auth_tool = await toolbox.aload_tool(
|
||||
"get_sensitive_data",
|
||||
auth_token_getters={"my_auth_app_1": get_auth_token}
|
||||
)
|
||||
result = await auth_tool.ainvoke({"param": "value"})
|
||||
print(result)
|
||||
|
||||
if **name** == "**main**":
|
||||
asyncio.run(main())
|
||||
{{< /tab >}}
|
||||
{{< tab header="Llamaindex" lang="Python" >}}
|
||||
import asyncio
|
||||
from toolbox_llamaindex import ToolboxClient
|
||||
|
||||
async def get_auth_token():
|
||||
# ... Logic to retrieve ID token (e.g., from local storage, OAuth flow)
|
||||
# This example just returns a placeholder. Replace with your actual token retrieval.
|
||||
return "YOUR_ID_TOKEN" # Placeholder
|
||||
|
||||
# for a single tool use
|
||||
async def main():
|
||||
toolbox = ToolboxClient("<http://127.0.0.1:5000>")
|
||||
|
||||
authorized_tool = toolbox.load_tool("my-tool-name", auth_tokens={"my_auth": get_auth_token})
|
||||
auth_tool = await toolbox.aload_tool(
|
||||
"get_sensitive_data",
|
||||
auth_token_getters={"my_auth_app_1": get_auth_token}
|
||||
)
|
||||
# result = await auth_tool.acall(param="value")
|
||||
# print(result.content)
|
||||
|
||||
# for a toolset use
|
||||
|
||||
authorized_tools = toolbox.load_toolset("my-toolset-name", auth_tokens={"my_auth": get_auth_token})
|
||||
{{< /tab >}}
|
||||
if **name** == "**main**":
|
||||
asyncio.run(main()){{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
### Specifying tokens for existing tools
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="Core" lang="Python" >}}
|
||||
tools = await toolbox.load_toolset()
|
||||
|
||||
# for a single token
|
||||
|
||||
authorized_tool = tools[0].add_auth_token_getter("my_auth", get_auth_token)
|
||||
|
||||
# OR, if multiple tokens are needed
|
||||
|
||||
authorized_tool = tools[0].add_auth_token_getters({
|
||||
"my_auth1": get_auth1_token,
|
||||
"my_auth2": get_auth2_token,
|
||||
})
|
||||
{{< /tab >}}
|
||||
{{< tab header="LangChain" lang="Python" >}}
|
||||
tools = toolbox.load_toolset()
|
||||
|
||||
# for a single token
|
||||
|
||||
auth_tools = [tool.add_auth_token("my_auth", get_auth_token) for tool in tools]
|
||||
authorized_tool = tools[0].add_auth_token_getter("my_auth", get_auth_token)
|
||||
|
||||
# OR, if multiple tokens are needed
|
||||
|
||||
authorized_tool = tools[0].add_auth_tokens({
|
||||
authorized_tool = tools[0].add_auth_token_getters({
|
||||
"my_auth1": get_auth1_token,
|
||||
"my_auth2": get_auth2_token,
|
||||
})
|
||||
@@ -117,11 +171,11 @@ tools = toolbox.load_toolset()
|
||||
|
||||
# for a single token
|
||||
|
||||
auth_tools = [tool.add_auth_token("my_auth", get_auth_token) for tool in tools]
|
||||
authorized_tool = tools[0].add_auth_token_getter("my_auth", get_auth_token)
|
||||
|
||||
# OR, if multiple tokens are needed
|
||||
|
||||
authorized_tool = tools[0].add_auth_tokens({
|
||||
authorized_tool = tools[0].add_auth_token_getters({
|
||||
"my_auth1": get_auth1_token,
|
||||
"my_auth2": get_auth2_token,
|
||||
})
|
||||
|
||||
@@ -81,8 +81,8 @@ To connect using IAM authentication:
|
||||
1. Prepare your database instance and user following this [guide][iam-guide].
|
||||
2. You could choose one of the two ways to log in:
|
||||
- Specify your IAM email as the `user`.
|
||||
- Leave your `user` field blank. Toolbox
|
||||
will fetch the [ADC][adc] automatically and log in using the email associated with it.
|
||||
- Leave your `user` field blank. Toolbox will fetch the [ADC][adc]
|
||||
automatically and log in using the email associated with it.
|
||||
3. Leave the `password` field blank.
|
||||
|
||||
[iam-guide]: https://cloud.google.com/alloydb/docs/database-users/manage-iam-auth
|
||||
|
||||
@@ -63,8 +63,8 @@ mTLS.
|
||||
|
||||
### Database User
|
||||
|
||||
Currently, this source only uses standard authentication. You will need to [create a
|
||||
SQL Server user][cloud-sql-users] to login to the database with.
|
||||
Currently, this source only uses standard authentication. You will need to
|
||||
[create a SQL Server user][cloud-sql-users] to login to the database with.
|
||||
|
||||
[cloud-sql-users]: https://cloud.google.com/sql/docs/sqlserver/create-manage-users
|
||||
|
||||
@@ -96,7 +96,7 @@ instead of hardcoding your secrets into the configuration file.
|
||||
| kind | string | true | Must be "cloud-sql-mssql". |
|
||||
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
|
||||
| region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). |
|
||||
| instance | string | true | Name of the Cloud SQL instance within the cluster (e.g. "my-instance"). |
|
||||
| instance | string | true | Name of the Cloud SQL instance within the cluster (e.g. "my-instance"). |
|
||||
| database | string | true | Name of the Cloud SQL database to connect to (e.g. "my_db"). |
|
||||
| ipAddress | string | true | IP address of the Cloud SQL instance to connect to. |
|
||||
| user | string | true | Name of the SQL Server user to connect as (e.g. "my-pg-user"). |
|
||||
|
||||
@@ -85,8 +85,8 @@ To connect using IAM authentication:
|
||||
1. Prepare your database instance and user following this [guide][iam-guide].
|
||||
2. You could choose one of the two ways to log in:
|
||||
- Specify your IAM email as the `user`.
|
||||
- Leave your `user` field blank. Toolbox
|
||||
will fetch the [ADC][adc] automatically and log in using the email associated with it.
|
||||
- Leave your `user` field blank. Toolbox will fetch the [ADC][adc]
|
||||
automatically and log in using the email associated with it.
|
||||
|
||||
3. Leave the `password` field blank.
|
||||
|
||||
@@ -115,13 +115,13 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|---------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "cloud-sql-postgres". |
|
||||
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
|
||||
| region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). |
|
||||
| instance | string | true | Name of the Cloud SQL instance within the cluster (e.g. "my-instance"). |
|
||||
| database | string | true | Name of the Postgres database to connect to (e.g. "my_db"). |
|
||||
| user | string | false | Name of the Postgres user to connect as (e.g. "my-pg-user"). Defaults to IAM auth using [ADC][adc] email if unspecified. |
|
||||
| password | string | false | Password of the Postgres user (e.g. "my-password"). Defaults to attempting IAM authentication if unspecified. |
|
||||
| ipType | string | false | IP Type of the Cloud SQL instance; must be one of `public` or `private`. Default: `public`. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "cloud-sql-postgres". |
|
||||
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
|
||||
| region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). |
|
||||
| instance | string | true | Name of the Cloud SQL instance within the cluster (e.g. "my-instance"). |
|
||||
| database | string | true | Name of the Postgres database to connect to (e.g. "my_db"). |
|
||||
| user | string | false | Name of the Postgres user to connect as (e.g. "my-pg-user"). Defaults to IAM auth using [ADC][adc] email if unspecified. |
|
||||
| password | string | false | Password of the Postgres user (e.g. "my-password"). Defaults to attempting IAM authentication if unspecified. |
|
||||
| ipType | string | false | IP Type of the Cloud SQL instance; must be one of `public` or `private`. Default: `public`. |
|
||||
|
||||
@@ -8,7 +8,8 @@ description: >
|
||||
|
||||
## About
|
||||
|
||||
A `couchbase` source establishes a connection to a Couchbase database cluster, allowing tools to execute SQL queries against it.
|
||||
A `couchbase` source establishes a connection to a Couchbase database cluster,
|
||||
allowing tools to execute SQL queries against it.
|
||||
|
||||
## Example
|
||||
|
||||
@@ -25,19 +26,19 @@ sources:
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|---------------------|:--------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "couchbase". |
|
||||
| connectionString | string | true | Connection string for the Couchbase cluster. |
|
||||
| bucket | string | true | Name of the bucket to connect to. |
|
||||
| scope | string | true | Name of the scope within the bucket. |
|
||||
| username | string | false | Username for authentication. |
|
||||
| password | string | false | Password for authentication. |
|
||||
| clientCert | string | false | Path to client certificate file for TLS authentication. |
|
||||
| clientCertPassword| string | false | Password for the client certificate. |
|
||||
| clientKey | string | false | Path to client key file for TLS authentication. |
|
||||
| clientKeyPassword | string | false | Password for the client key. |
|
||||
| caCert | string | false | Path to CA certificate file. |
|
||||
| noSslVerify | boolean | false | If true, skip server certificate verification. **Warning:** This option should only be used in development or testing environments. Disabling SSL verification poses significant security risks in production as it makes your connection vulnerable to man-in-the-middle attacks. |
|
||||
| profile | string | false | Name of the connection profile to apply. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------------|:--------:|:------------:|---------------------------------------------------------|
|
||||
| kind | string | true | Must be "couchbase". |
|
||||
| connectionString | string | true | Connection string for the Couchbase cluster. |
|
||||
| bucket | string | true | Name of the bucket to connect to. |
|
||||
| scope | string | true | Name of the scope within the bucket. |
|
||||
| username | string | false | Username for authentication. |
|
||||
| password | string | false | Password for authentication. |
|
||||
| clientCert | string | false | Path to client certificate file for TLS authentication. |
|
||||
| clientCertPassword | string | false | Password for the client certificate. |
|
||||
| clientKey | string | false | Path to client key file for TLS authentication. |
|
||||
| clientKeyPassword | string | false | Password for the client key. |
|
||||
| caCert | string | false | Path to CA certificate file. |
|
||||
| noSslVerify | boolean | false | If true, skip server certificate verification. **Warning:** This option should only be used in development or testing environments. Disabling SSL verification poses significant security risks in production as it makes your connection vulnerable to man-in-the-middle attacks. |
|
||||
| profile | string | false | Name of the connection profile to apply. |
|
||||
| queryScanConsistency | integer | false | Query scan consistency. Controls the consistency guarantee for index scanning. Values: 1 for "not_bounded" (fastest option, but results may not include the most recent operations), 2 for "request_plus" (highest consistency level, includes all operations up until the query started, but incurs a performance penalty). If not specified, defaults to the Couchbase Go SDK default. |
|
||||
|
||||
@@ -9,7 +9,10 @@ description: >
|
||||
|
||||
## About
|
||||
|
||||
[Dgraph][dgraph-docs] is an open-source graph database. It is designed for real-time workloads, horizontal scalability, and data flexibility. Implemented as a distributed system, Dgraph processes queries in parallel to deliver the fastest result.
|
||||
[Dgraph][dgraph-docs] is an open-source graph database. It is designed for
|
||||
real-time workloads, horizontal scalability, and data flexibility. Implemented
|
||||
as a distributed system, Dgraph processes queries in parallel to deliver the
|
||||
fastest result.
|
||||
|
||||
This source can connect to either a self-managed Dgraph cluster or one hosted on
|
||||
Dgraph Cloud. If you're new to Dgraph, the fastest way to get started is to
|
||||
@@ -52,7 +55,7 @@ instead of hardcoding your secrets into the configuration file.
|
||||
| **Field** | **Type** | **Required** | **Description** |
|
||||
|-------------|:--------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "dgraph". |
|
||||
| dgraphUrl | string | true | Connection URI (e.g. "<https://xxx.cloud.dgraph.io>", "<https://localhost:8080>"). |
|
||||
| dgraphUrl | string | true | Connection URI (e.g. "<https://xxx.cloud.dgraph.io>", "<https://localhost:8080>"). |
|
||||
| user | string | false | Name of the Dgraph user to connect as (e.g., "groot"). |
|
||||
| password | string | false | Password of the Dgraph user (e.g., "password"). |
|
||||
| apiKey | string | false | API key to connect to a Dgraph Cloud instance. |
|
||||
|
||||
@@ -27,6 +27,7 @@ sources:
|
||||
queryParams:
|
||||
param1: value1
|
||||
param2: value2
|
||||
# disableSslVerification: false
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -36,12 +37,13 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:-----------------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "http". |
|
||||
| baseUrl | string | true | The base URL for the HTTP requests (e.g., `https://api.example.com`). |
|
||||
| timeout | string | false | The timeout for HTTP requests (e.g., "5s", "1m", refer to [ParseDuration][parse-duration-doc] for more examples). Defaults to 30s. |
|
||||
| headers | map[string]string | false | Default headers to include in the HTTP requests. |
|
||||
| queryParams | map[string]string | false | Default query parameters to include in the HTTP requests. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|------------------------|:-----------------:|:------------:|------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "http". |
|
||||
| baseUrl | string | true | The base URL for the HTTP requests (e.g., `https://api.example.com`). |
|
||||
| timeout | string | false | The timeout for HTTP requests (e.g., "5s", "1m", refer to [ParseDuration][parse-duration-doc] for more examples). Defaults to 30s. |
|
||||
| headers | map[string]string | false | Default headers to include in the HTTP requests. |
|
||||
| queryParams | map[string]string | false | Default query parameters to include in the HTTP requests. |
|
||||
| disableSslVerification | bool | false | Disable SSL certificate verification. This should only be used for local development. Defaults to `false`. |
|
||||
|
||||
[parse-duration-doc]: https://pkg.go.dev/time#ParseDuration
|
||||
|
||||
96
docs/en/resources/sources/redis.md
Normal file
96
docs/en/resources/sources/redis.md
Normal file
@@ -0,0 +1,96 @@
|
||||
---
|
||||
title: "Redis"
|
||||
linkTitle: "Redis"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Redis is an open-source, in-memory data structure store.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
Redis is an open-source, in-memory data structure store, used as a database,
|
||||
cache, and message broker. It supports data structures such as strings, hashes,
|
||||
lists, sets, sorted sets with range queries, bitmaps, hyperloglogs, and
|
||||
geospatial indexes with radius queries.
|
||||
|
||||
If you are new to Redis, you can find installation and getting started guides on
|
||||
the [official Redis website](https://redis.io/docs/getting-started/).
|
||||
|
||||
## Requirements
|
||||
|
||||
### Redis
|
||||
|
||||
[AUTH string][auth] is a password for connection to Redis. If you have the
|
||||
`requirepass` directive set in your Redis configuration, incoming client
|
||||
connections must authenticate in order to connect.
|
||||
|
||||
Specify your AUTH string in the password field:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-redis-instance:
|
||||
kind: redis
|
||||
address:
|
||||
- 127.0.0.1
|
||||
username: ${MY_USER_NAME}
|
||||
password: ${MY_AUTH_STRING} # Omit this field if you don't have a password.
|
||||
# database: 0
|
||||
# clusterEnabled: false
|
||||
# useGCPIAM: false
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
Use environment variable replacement with the format ${ENV_NAME}
|
||||
instead of hardcoding your secrets into the configuration file.
|
||||
{{< /notice >}}
|
||||
|
||||
### Memorystore For Redis
|
||||
|
||||
Memorystore standalone instances support authentication using an [AUTH][auth]
|
||||
string.
|
||||
|
||||
Here is an example tools.yaml config with [AUTH][auth] enabled:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-redis-cluster-instance:
|
||||
kind: memorystore-redis
|
||||
address:
|
||||
- 127.0.0.1
|
||||
password: ${MY_AUTH_STRING}
|
||||
# useGCPIAM: false
|
||||
# clusterEnabled: false
|
||||
```
|
||||
|
||||
Memorystore Redis Cluster supports IAM authentication instead. Grant your
|
||||
account the required [IAM role][iam] and make sure to set `useGCPIAM` to `true`.
|
||||
|
||||
Here is an example tools.yaml config for Memorystore Redis Cluster instances
|
||||
using IAM authentication:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-redis-cluster-instance:
|
||||
kind: memorystore-redis
|
||||
address: 127.0.0.1
|
||||
useGCPIAM: true
|
||||
clusterEnabled: true
|
||||
```
|
||||
|
||||
[iam]: https://cloud.google.com/memorystore/docs/cluster/about-iam-auth
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "memorystore-redis". |
|
||||
| address | string | true | Primary endpoint for the Memorystore Redis instance to connect to. |
|
||||
| username | string | false | If you are using a non-default user, specify the user name here. If you are using Memorystore for Redis, leave this field blank |
|
||||
| password | string | false | If you have [Redis AUTH][auth] enabled, specify the AUTH string here |
|
||||
| database | int | false | The Redis database to connect to. Not applicable for cluster enabled instances. The default database is `0`. |
|
||||
| clusterEnabled | bool | false | Set it to `true` if using a Redis Cluster instance. Defaults to `false`. |
|
||||
| useGCPIAM | string | false | Set it to `true` if you are using GCP's IAM authentication. Defaults to `false`. |
|
||||
|
||||
[auth]: https://cloud.google.com/memorystore/docs/redis/about-redis-auth
|
||||
@@ -15,6 +15,7 @@ database management system. The lite in SQLite means lightweight in terms of
|
||||
setup, database administration, and required resources.
|
||||
|
||||
SQLite has the following notable characteristics:
|
||||
|
||||
- Self-contained with no external dependencies
|
||||
- Serverless - the SQLite library accesses its storage files directly
|
||||
- Single database file that can be easily copied or moved
|
||||
@@ -26,6 +27,7 @@ SQLite has the following notable characteristics:
|
||||
### Database File
|
||||
|
||||
You need a SQLite database file. This can be:
|
||||
|
||||
- An existing database file
|
||||
- A path where a new database file should be created
|
||||
- `:memory:` for an in-memory database
|
||||
@@ -40,6 +42,7 @@ sources:
|
||||
```
|
||||
|
||||
For an in-memory database:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-sqlite-memory-db:
|
||||
@@ -51,13 +54,14 @@ sources:
|
||||
|
||||
### Configuration Fields
|
||||
|
||||
| Field | Type | Required | Description |
|
||||
|-------|------|----------|-------------|
|
||||
| kind | string | Yes | Must be "sqlite" |
|
||||
| database | string | Yes | Path to SQLite database file, or ":memory:" for an in-memory database |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "spanner". |
|
||||
| database | string | true | Path to SQLite database file, or ":memory:" for an in-memory database. |
|
||||
|
||||
### Connection Properties
|
||||
|
||||
SQLite connections are configured with these defaults for optimal performance:
|
||||
|
||||
- `MaxOpenConns`: 1 (SQLite only supports one writer at a time)
|
||||
- `MaxIdleConns`: 1
|
||||
- `MaxIdleConns`: 1
|
||||
|
||||
69
docs/en/resources/sources/valkey.md
Normal file
69
docs/en/resources/sources/valkey.md
Normal file
@@ -0,0 +1,69 @@
|
||||
---
|
||||
title: "Valkey"
|
||||
linkTitle: "Valkey"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Valkey is an open-source, in-memory data structure store, forked from Redis.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
Valkey is an open-source, in-memory data structure store that originated as a
|
||||
fork of Redis. It's designed to be used as a database, cache, and message
|
||||
broker, supporting a wide range of data structures like strings, hashes, lists,
|
||||
sets, sorted sets with range queries, bitmaps, hyperloglogs, and geospatial
|
||||
indexes with radius queries.
|
||||
|
||||
If you're new to Valkey, you can find installation and getting started guides on
|
||||
the [official Valkey website](https://valkey.io/docs/getting-started/).
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-valkey-instance:
|
||||
kind: valkey
|
||||
address:
|
||||
- 127.0.0.1
|
||||
username: ${YOUR_USERNAME}
|
||||
password: ${YOUR_PASSWORD}
|
||||
# database: 0
|
||||
# useGCPIAM: false
|
||||
# disableCache: false
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
Use environment variable replacement with the format ${ENV_NAME}
|
||||
instead of hardcoding your secrets into the configuration file.
|
||||
{{< /notice >}}
|
||||
|
||||
### IAM Authentication
|
||||
|
||||
If you are using GCP's Memorystore for Valkey, you can connect using IAM
|
||||
authentication. Grant your account the required [IAM role][iam] and set
|
||||
`useGCPIAM` to `true`:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-valkey-instance:
|
||||
kind: valkey
|
||||
address:
|
||||
- 127.0.0.1
|
||||
useGCPIAM: true
|
||||
```
|
||||
|
||||
[iam]: https://cloud.google.com/memorystore/docs/valkey/about-iam-auth
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------|:--------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "valkey". |
|
||||
| address | []string | true | Endpoints for the Valkey instance to connect to. |
|
||||
| username | string | false | If you are using a non-default user, specify the user name here. If you are using Memorystore for Valkey, leave this field blank |
|
||||
| password | string | false | Password for the Valkey instance |
|
||||
| database | int | false | The Valkey database to connect to. Not applicable for cluster enabled instances. The default database is `0`. |
|
||||
| useGCPIAM | bool | false | Set it to `true` if you are using GCP's IAM authentication. Defaults to `false`. |
|
||||
| disableCache | bool | false | Set it to `true` if you want to enable client-side caching. Defaults to `false`. |
|
||||
@@ -11,7 +11,6 @@ A tool represents an action your agent can take, such as running a SQL
|
||||
statement. You can define Tools as a map in the `tools` section of your
|
||||
`tools.yaml` file. Typically, a tool will require a source to act on:
|
||||
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search_flights_by_number:
|
||||
@@ -50,7 +49,6 @@ tools:
|
||||
description: 1 to 4 digit number
|
||||
```
|
||||
|
||||
|
||||
## Specifying Parameters
|
||||
|
||||
Parameters for each Tool will define what inputs the agent will need to provide
|
||||
@@ -79,44 +77,53 @@ the parameter.
|
||||
description: Airline unique 2 letter identifier
|
||||
```
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|----------------------------------------------------------------------------|
|
||||
| name | string | true | Name of the parameter. |
|
||||
| type | string | true | Must be one of "string", "integer", "float", "boolean" "array" |
|
||||
| description | string | true | Natural language description of the parameter to describe it to the agent. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:---------------:|:------------:|-----------------------------------------------------------------------------|
|
||||
| name | string | true | Name of the parameter. |
|
||||
| type | string | true | Must be one of "string", "integer", "float", "boolean" "array" |
|
||||
| default | parameter type | false | Default value of the parameter. If provided, the parameter is not required. |
|
||||
| description | string | true | Natural language description of the parameter to describe it to the agent. |
|
||||
|
||||
### Array Parameters
|
||||
|
||||
The `array` type is a list of items passed in as a single parameter.
|
||||
To use the `array` type, you must also specify what kind of items are
|
||||
To use the `array` type, you must also specify what kind of items are
|
||||
in the list using the items field:
|
||||
|
||||
```yaml
|
||||
parameters:
|
||||
- name: preferred_airlines
|
||||
type: array
|
||||
description: A list of airline, ordered by preference.
|
||||
description: A list of airline, ordered by preference.
|
||||
items:
|
||||
name: name
|
||||
name: name
|
||||
type: string
|
||||
description: Name of the airline.
|
||||
description: Name of the airline.
|
||||
statement: |
|
||||
SELECT * FROM airlines WHERE preferred_airlines = ANY($1);
|
||||
```
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:----------------:|:------------:|----------------------------------------------------------------------------|
|
||||
| name | string | true | Name of the parameter. |
|
||||
| type | string | true | Must be "array" |
|
||||
| description | string | true | Natural language description of the parameter to describe it to the agent. |
|
||||
| items | parameter object | true | Specify a Parameter object for the type of the values in the array. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:----------------:|:------------:|-----------------------------------------------------------------------------|
|
||||
| name | string | true | Name of the parameter. |
|
||||
| type | string | true | Must be "array" |
|
||||
| default | parameter type | false | Default value of the parameter. If provided, the parameter is not required. |
|
||||
| description | string | true | Natural language description of the parameter to describe it to the agent. |
|
||||
| items | parameter object | true | Specify a Parameter object for the type of the values in the array. |
|
||||
|
||||
{{< notice note >}}
|
||||
Items in array should not have a default value. If provided, it will be ignored.
|
||||
{{< /notice >}}
|
||||
|
||||
### Authenticated Parameters
|
||||
|
||||
Authenticated parameters are automatically populated with user
|
||||
information decoded from [ID tokens](../authsources/#specifying-id-tokens-from-clients) that
|
||||
are passed in request headers. They do not take input values in request bodies
|
||||
like other parameters. To use authenticated parameters, you must configure
|
||||
the tool to map the required [authServices](../authservices) to
|
||||
specific claims within the user's ID token.
|
||||
information decoded from [ID
|
||||
tokens](../authsources/#specifying-id-tokens-from-clients) that are passed in
|
||||
request headers. They do not take input values in request bodies like other
|
||||
parameters. To use authenticated parameters, you must configure the tool to map
|
||||
the required [authServices](../authservices) to specific claims within the
|
||||
user's ID token.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
@@ -141,6 +148,60 @@ specific claims within the user's ID token.
|
||||
| name | string | true | Name of the [authServices](../authservices) used to verify the OIDC auth token. |
|
||||
| field | string | true | Claim field decoded from the OIDC token used to auto-populate this parameter. |
|
||||
|
||||
### Template Parameters
|
||||
|
||||
Template parameters types include `string`, `integer`, `float`, `boolean` types.
|
||||
In most cases, the description will be provided to the LLM as context on
|
||||
specifying the parameter. Template parameters will be inserted into the SQL
|
||||
statement before executing the prepared statement. They will be inserted without
|
||||
quotes, so to insert a string using template parameters, quotes must be
|
||||
explicitly added within the string.
|
||||
|
||||
Template parameter arrays can also be used similarly to basic parameters, and array
|
||||
items must be strings. Once inserted into the SQL statement, the outer layer of
|
||||
quotes will be removed. Therefore to insert strings into the SQL statement, a
|
||||
set of quotes must be explicitly added within the string.
|
||||
|
||||
{{< notice warning >}}
|
||||
Because template parameters can directly replace identifiers, column names, and
|
||||
table names, they are prone to SQL injections. Basic parameters are preferred
|
||||
for performance and safety reasons.
|
||||
{{< /notice >}}
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
select_columns_from_table:
|
||||
kind: postgres-sql
|
||||
source: my-pg-instance
|
||||
statement: |
|
||||
SELECT {{array .columnNames}} FROM {{.tableName}}
|
||||
description: |
|
||||
Use this tool to list all information from a specific table.
|
||||
Example:
|
||||
{{
|
||||
"tableName": "flights",
|
||||
"columnNames": ["id", "name"]
|
||||
}}
|
||||
templateParameters:
|
||||
- name: tableName
|
||||
type: string
|
||||
description: Table to select from
|
||||
- name: columnNames
|
||||
type: array
|
||||
description: The columns to select
|
||||
items:
|
||||
name: column
|
||||
type: string
|
||||
description: Name of a column to select
|
||||
```
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:----------------:|:-------------:|-------------------------------------------------------------------------------------|
|
||||
| name | string | true | Name of the template parameter. |
|
||||
| type | string | true | Must be one of "string", "integer", "float", "boolean" "array" |
|
||||
| description | string | true | Natural language description of the template parameter to describe it to the agent. |
|
||||
| items | parameter object |true (if array)| Specify a Parameter object for the type of the values in the array (string only). |
|
||||
|
||||
## Authorized Invocations
|
||||
|
||||
You can require an authorization check for any Tool invocation request by
|
||||
|
||||
7
docs/en/resources/tools/alloydbainl/_index.md
Normal file
7
docs/en/resources/tools/alloydbainl/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "AlloyDB AI NL"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
AlloyDB AI NL Tool.
|
||||
---
|
||||
@@ -7,6 +7,8 @@ description: >
|
||||
[AlloyDB AI](https://cloud.google.com/alloydb/ai) next-generation Natural
|
||||
Language support to provide the ability to query the database directly using
|
||||
natural language.
|
||||
aliases:
|
||||
- /resources/tools/alloydb-ai-nl
|
||||
---
|
||||
|
||||
## About
|
||||
@@ -16,22 +18,23 @@ Language][alloydb-ai-nl-overview] support to allow an Agent the ability to query
|
||||
the database directly using natural language. Natural language streamlines the
|
||||
development of generative AI applications by transferring the complexity of
|
||||
converting natural language to SQL from the application layer to the database
|
||||
layer.
|
||||
layer.
|
||||
|
||||
This tool is compatible with the following sources:
|
||||
|
||||
- [alloydb-postgres](../sources/alloydb-pg.md)
|
||||
|
||||
AlloyDB AI Natural Language delivers secure and accurate responses for
|
||||
application end user natural language questions. Natural language streamlines
|
||||
the development of generative AI applications by transferring the complexity
|
||||
of converting natural language to SQL from the application layer to the
|
||||
AlloyDB AI Natural Language delivers secure and accurate responses for
|
||||
application end user natural language questions. Natural language streamlines
|
||||
the development of generative AI applications by transferring the complexity
|
||||
of converting natural language to SQL from the application layer to the
|
||||
database layer.
|
||||
|
||||
## Requirements
|
||||
|
||||
{{< notice tip >}} AlloyDB AI natural language is currently in gated public
|
||||
preview. For more information on availability and limitations, please see
|
||||
[AlloyDB AI natural language
|
||||
overview](https://cloud.google.com/alloydb/docs/natural-language-questions-overview)
|
||||
[AlloyDB AI natural language overview](https://cloud.google.com/alloydb/docs/ai/natural-language-overview)
|
||||
{{< /notice >}}
|
||||
|
||||
To enable AlloyDB AI natural language for your AlloyDB cluster, please follow
|
||||
@@ -39,19 +42,19 @@ the steps listed in the [Generate SQL queries that answer natural language
|
||||
questions][alloydb-ai-gen-nl], including enabling the extension and configuring
|
||||
context for your application.
|
||||
|
||||
[alloydb-ai-nl-overview]: https://cloud.google.com/alloydb/docs/natural-language-questions-overview
|
||||
[alloydb-ai-gen-nl]: https://cloud.google.com/alloydb/docs/alloydb/docs/ai/generate-queries-natural-language
|
||||
|
||||
[alloydb-ai-nl-overview]: https://cloud.google.com/alloydb/docs/ai/natural-language-overview
|
||||
[alloydb-ai-gen-nl]: https://cloud.google.com/alloydb/docs/ai/generate-sql-queries-natural-language
|
||||
|
||||
## Configuration
|
||||
|
||||
|
||||
### Specifying an `nl_config`
|
||||
|
||||
A `nl_config` is a configuration that associates an application to schema
|
||||
objects, examples and other contexts that can be used. A large application can
|
||||
also use different configurations for different parts of the app, as long as the
|
||||
correct configuration can be specified when a question is sent from that part of
|
||||
the application.
|
||||
|
||||
|
||||
Once you've followed the steps for configuring context, you can use the
|
||||
`context` field when configuring a `alloydb-ai-nl` tool. When this tool is
|
||||
invoked, the SQL will be generated and executed using this context.
|
||||
@@ -59,9 +62,9 @@ invoked, the SQL will be generated and executed using this context.
|
||||
### Specifying Parameters to PSV's
|
||||
|
||||
[Parameterized Secure Views (PSVs)][alloydb-psv] are a feature unique to AlloyDB
|
||||
that allows you allow you to require one or more named parameter values passed
|
||||
that allows you to require one or more named parameter values passed
|
||||
to the view when querying it, somewhat like bind variables with ordinary
|
||||
database queries.
|
||||
database queries.
|
||||
|
||||
You can use the `nlConfigParameters` to list the parameters required for your
|
||||
`nl_config`. You **must** supply all parameters required for all PSVs in the
|
||||
@@ -70,7 +73,7 @@ Parameters](../tools/#array-parameters) or Bound Parameters to provide secure
|
||||
access to queries generated using natural language, as these parameters are not
|
||||
visible to the LLM.
|
||||
|
||||
[alloydb-psv]: https://cloud.google.com/alloydb/docs/ai/use-psvs#parameterized_secure_views
|
||||
[alloydb-psv]: https://cloud.google.com/alloydb/docs/parameterized-secure-views-overview
|
||||
|
||||
## Example
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
---
|
||||
title: "bigquery-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "bigquery-sql" tool executes a pre-defined SQL statement.
|
||||
---
|
||||
|
||||
## About
|
||||
A `bigquery-sql` tool executes a pre-defined SQL statement. It's compatible with
|
||||
the following sources:
|
||||
|
||||
- [bigquery](../sources/bigquery.md)
|
||||
|
||||
### GoogleSQL
|
||||
|
||||
BigQuery uses [GoogleSQL][bigquery-googlesql] for querying data. The integration
|
||||
with Toolbox supports this dialect. The specified SQL statement is executed, and
|
||||
parameters can be inserted into the query. BigQuery supports both named parameters
|
||||
(e.g., `@name`) and positional parameters (`?`), but they cannot be mixed in the
|
||||
same query.
|
||||
|
||||
> **Note:** This tool uses [parameterized queries](https://cloud.google.com/bigquery/docs/parameterized-queries) to prevent SQL injections. Query parameters can be used as substitutes for arbitrary expressions. Parameters cannot be used as substitutes for identifiers, column names, table names, or other parts of the query.
|
||||
|
||||
[bigquery-googlesql]: https://cloud.google.com/bigquery/docs/reference/standard-sql/
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
# Example: Querying a user table in BigQuery
|
||||
search_users_bq:
|
||||
kind: bigquery-sql
|
||||
source: my-bigquery-source
|
||||
statement: |
|
||||
SELECT
|
||||
id,
|
||||
name,
|
||||
email
|
||||
FROM
|
||||
`my-project.my-dataset.users`
|
||||
WHERE
|
||||
id = @id OR email = @email;
|
||||
description: |
|
||||
Use this tool to get information for a specific user.
|
||||
Takes an id number or a name and returns info on the user.
|
||||
|
||||
Example:
|
||||
{{
|
||||
"id": 123,
|
||||
"name": "Alice",
|
||||
}}
|
||||
parameters:
|
||||
- name: id
|
||||
type: integer
|
||||
description: User ID
|
||||
- name: email
|
||||
type: string
|
||||
description: Email address of the user
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "bigquery-sql". |
|
||||
| source | string | true | Name of the source the GoogleSQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | The GoogleSQL statement to execute. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
7
docs/en/resources/tools/bigquery/_index.md
Normal file
7
docs/en/resources/tools/bigquery/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "BigQuery"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with BigQuery Sources.
|
||||
---
|
||||
@@ -4,6 +4,8 @@ type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "bigquery-execute-sql" tool executes a SQL statement against BigQuery.
|
||||
aliases:
|
||||
- /resources/tools/bigquery-execute-sql
|
||||
---
|
||||
|
||||
## About
|
||||
@@ -30,6 +32,6 @@ tools:
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "bigquery-execute-sql". |
|
||||
| kind | string | true | Must be "bigquery-execute-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -4,6 +4,8 @@ type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "bigquery-get-dataset-info" tool retrieves metadata for a BigQuery dataset.
|
||||
aliases:
|
||||
- /resources/tools/bigquery-get-dataset-info
|
||||
---
|
||||
|
||||
## About
|
||||
@@ -13,8 +15,10 @@ It's compatible with the following sources:
|
||||
|
||||
- [bigquery](../sources/bigquery.md)
|
||||
|
||||
bigquery-get-dataset-info takes a dataset parameter to specify the dataset
|
||||
on the given source.
|
||||
`bigquery-get-dataset-info` takes a `dataset` parameter to specify the dataset
|
||||
on the given source. It also optionally accepts a `project` parameter to
|
||||
define the Google Cloud project ID. If the `project` parameter is not provided,
|
||||
the tool defaults to using the project defined in the source configuration.
|
||||
|
||||
## Example
|
||||
|
||||
@@ -30,6 +34,6 @@ tools:
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "bigquery-get-dataset-info". |
|
||||
| kind | string | true | Must be "bigquery-get-dataset-info". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -4,6 +4,8 @@ type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "bigquery-get-table-info" tool retrieves metadata for a BigQuery table.
|
||||
aliases:
|
||||
- /resources/tools/bigquery-get-table-info
|
||||
---
|
||||
|
||||
## About
|
||||
@@ -13,8 +15,10 @@ It's compatible with the following sources:
|
||||
|
||||
- [bigquery](../sources/bigquery.md)
|
||||
|
||||
bigquery-get-table-info takes dataset and table parameters to specify
|
||||
the target table.
|
||||
`bigquery-get-table-info` takes `dataset` and `table` parameters to specify
|
||||
the target table. It also optionally accepts a `project` parameter to define
|
||||
the Google Cloud project ID. If the `project` parameter is not provided, the
|
||||
tool defaults to using the project defined in the source configuration.
|
||||
|
||||
## Example
|
||||
|
||||
@@ -30,6 +34,6 @@ tools:
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "bigquery-get-table-info". |
|
||||
| kind | string | true | Must be "bigquery-get-table-info". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -4,6 +4,8 @@ type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "bigquery-list-dataset-ids" tool returns all dataset IDs from the source.
|
||||
aliases:
|
||||
- /resources/tools/bigquery-list-dataset-ids
|
||||
---
|
||||
|
||||
## About
|
||||
@@ -13,8 +15,9 @@ It's compatible with the following sources:
|
||||
|
||||
- [bigquery](../sources/bigquery.md)
|
||||
|
||||
bigquery-list-dataset-ids requires no input parameters beyond the configured
|
||||
source.
|
||||
`bigquery-list-dataset-ids` optionally accepts a `project` parameter to define
|
||||
the Google Cloud project ID. If the `project` parameter is not provided, the
|
||||
tool defaults to using the project defined in the source configuration.
|
||||
|
||||
## Example
|
||||
|
||||
@@ -30,6 +33,6 @@ tools:
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "bigquery-list-dataset-ids". |
|
||||
| kind | string | true | Must be "bigquery-list-dataset-ids". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -4,6 +4,8 @@ type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "bigquery-list-table-ids" tool returns table IDs in a given BigQuery dataset.
|
||||
aliases:
|
||||
- /resources/tools/bigquery-list-table-ids
|
||||
---
|
||||
|
||||
## About
|
||||
@@ -13,8 +15,10 @@ It's compatible with the following sources:
|
||||
|
||||
- [bigquery](../sources/bigquery.md)
|
||||
|
||||
bigquery-get-dataset-info takes a dataset parameter to specify the dataset
|
||||
from which to list table IDs.
|
||||
`bigquery-get-dataset-info` takes a required `dataset` parameter to specify the dataset
|
||||
from which to list table IDs. It also optionally accepts a `project` parameter to
|
||||
define the Google Cloud project ID. If the `project` parameter is not provided, the
|
||||
tool defaults to using the project defined in the source configuration.
|
||||
|
||||
## Example
|
||||
|
||||
@@ -30,6 +34,6 @@ tools:
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "bigquery-list-table-ids". |
|
||||
| kind | string | true | Must be "bigquery-list-table-ids". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
105
docs/en/resources/tools/bigquery/bigquery-sql.md
Normal file
105
docs/en/resources/tools/bigquery/bigquery-sql.md
Normal file
@@ -0,0 +1,105 @@
|
||||
---
|
||||
title: "bigquery-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "bigquery-sql" tool executes a pre-defined SQL statement.
|
||||
aliases:
|
||||
- /resources/tools/bigquery-sql
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `bigquery-sql` tool executes a pre-defined SQL statement. It's compatible with
|
||||
the following sources:
|
||||
|
||||
- [bigquery](../sources/bigquery.md)
|
||||
|
||||
### GoogleSQL
|
||||
|
||||
BigQuery uses [GoogleSQL][bigquery-googlesql] for querying data. The integration
|
||||
with Toolbox supports this dialect. The specified SQL statement is executed, and
|
||||
parameters can be inserted into the query. BigQuery supports both named parameters
|
||||
(e.g., `@name`) and positional parameters (`?`), but they cannot be mixed in the
|
||||
same query.
|
||||
|
||||
[bigquery-googlesql]: https://cloud.google.com/bigquery/docs/reference/standard-sql/
|
||||
|
||||
## Example
|
||||
|
||||
> **Note:** This tool uses [parameterized
|
||||
> queries](https://cloud.google.com/bigquery/docs/parameterized-queries) to
|
||||
> prevent SQL injections. Query parameters can be used as substitutes for
|
||||
> arbitrary expressions. Parameters cannot be used as substitutes for
|
||||
> identifiers, column names, table names, or other parts of the query.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
# Example: Querying a user table in BigQuery
|
||||
search_users_bq:
|
||||
kind: bigquery-sql
|
||||
source: my-bigquery-source
|
||||
statement: |
|
||||
SELECT
|
||||
id,
|
||||
name,
|
||||
email
|
||||
FROM
|
||||
`my-project.my-dataset.users`
|
||||
WHERE
|
||||
id = @id OR email = @email;
|
||||
description: |
|
||||
Use this tool to get information for a specific user.
|
||||
Takes an id number or a name and returns info on the user.
|
||||
|
||||
Example:
|
||||
{{
|
||||
"id": 123,
|
||||
"name": "Alice",
|
||||
}}
|
||||
parameters:
|
||||
- name: id
|
||||
type: integer
|
||||
description: User ID
|
||||
- name: email
|
||||
type: string
|
||||
description: Email address of the user
|
||||
```
|
||||
|
||||
### Example with Template Parameters
|
||||
|
||||
> **Note:** This tool allows direct modifications to the SQL statement,
|
||||
> including identifiers, column names, and table names. **This makes it more
|
||||
> vulnerable to SQL injections**. Using basic parameters only (see above) is
|
||||
> recommended for performance and safety reasons. For more details, please check
|
||||
> [templateParameters](_index#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_table:
|
||||
kind: bigquery-sql
|
||||
source: my-bigquery-source
|
||||
statement: |
|
||||
SELECT * FROM {{.tableName}};
|
||||
description: |
|
||||
Use this tool to list all information from a specific table.
|
||||
Example:
|
||||
{{
|
||||
"tableName": "flights",
|
||||
}}
|
||||
templateParameters:
|
||||
- name: tableName
|
||||
type: string
|
||||
description: Table to select from
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------------|:------------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "bigquery-sql". |
|
||||
| source | string | true | Name of the source the GoogleSQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | The GoogleSQL statement to execute. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
@@ -1,87 +0,0 @@
|
||||
---
|
||||
title: "bigtable-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "bigtable-sql" tool executes a pre-defined SQL statement against a Google
|
||||
Cloud Bigtable instance.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `bigtable-sql` tool executes a pre-defined SQL statement against a Bigtable
|
||||
instance. It's compatible with any of the following sources:
|
||||
|
||||
- [bigtable](../sources/bigtable.md)
|
||||
|
||||
### GoogleSQL
|
||||
|
||||
Bigtable supports SQL queries. The integration with Toolbox supports `googlesql`
|
||||
dialect, the specified SQL statement is executed as a [data manipulation
|
||||
language (DML)][bigtable-googlesql] statements, and specified parameters will
|
||||
inserted according to their name: e.g. `@name`.
|
||||
|
||||
> **Note:** This tool uses parameterized queries to prevent SQL injections.
|
||||
> Query parameters can be used as substitutes for arbitrary expressions.
|
||||
> Parameters cannot be used as substitutes for identifiers, column names, table
|
||||
> names, or other parts of the query.
|
||||
|
||||
[bigtable-googlesql]: https://cloud.google.com/bigtable/docs/googlesql-overview
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search_user_by_id_or_name:
|
||||
kind: bigtable-sql
|
||||
source: my-bigtable-instance
|
||||
statement: |
|
||||
SELECT
|
||||
TO_INT64(cf[ 'id' ]) as id,
|
||||
CAST(cf[ 'name' ] AS string) as name,
|
||||
FROM
|
||||
mytable
|
||||
WHERE
|
||||
TO_INT64(cf[ 'id' ]) = @id
|
||||
OR CAST(cf[ 'name' ] AS string) = @name;
|
||||
description: |
|
||||
Use this tool to get information for a specific user.
|
||||
Takes an id number or a name and returns info on the user.
|
||||
|
||||
Example:
|
||||
{{
|
||||
"id": 123,
|
||||
"name": "Alice",
|
||||
}}
|
||||
parameters:
|
||||
- name: id
|
||||
type: integer
|
||||
description: User ID
|
||||
- name: name
|
||||
type: string
|
||||
description: Name of the user
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "bigtable-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute on. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
|
||||
## Tips
|
||||
|
||||
- [Bigtable Studio][bigtable-studio] is a useful to explore and manage your
|
||||
Bigtable data. If you're unfamiliar with the query syntax, [Query
|
||||
Builder][bigtable-querybuilder] lets you build a query, run it against a
|
||||
table, and then view the results in the console.
|
||||
- Some Python libraries limit the use of underscore columns such as `_key`. A
|
||||
workaround would be to leverage Bigtable [Logical
|
||||
Views][bigtable-logical-view] to rename the columns.
|
||||
|
||||
[bigtable-studio]: https://cloud.google.com/bigtable/docs/manage-data-using-console
|
||||
[bigtable-logical-view]: https://cloud.google.com/bigtable/docs/create-manage-logical-views
|
||||
[bigtable-querybuilder]: https://cloud.google.com/bigtable/docs/query-builder
|
||||
7
docs/en/resources/tools/bigtable/_index.md
Normal file
7
docs/en/resources/tools/bigtable/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "Bigtable"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with Bigtable Sources.
|
||||
---
|
||||
117
docs/en/resources/tools/bigtable/bigtable-sql.md
Normal file
117
docs/en/resources/tools/bigtable/bigtable-sql.md
Normal file
@@ -0,0 +1,117 @@
|
||||
---
|
||||
title: "bigtable-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "bigtable-sql" tool executes a pre-defined SQL statement against a Google
|
||||
Cloud Bigtable instance.
|
||||
aliases:
|
||||
- /resources/tools/bigtable-sql
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `bigtable-sql` tool executes a pre-defined SQL statement against a Bigtable
|
||||
instance. It's compatible with any of the following sources:
|
||||
|
||||
- [bigtable](../sources/bigtable.md)
|
||||
|
||||
### GoogleSQL
|
||||
|
||||
Bigtable supports SQL queries. The integration with Toolbox supports `googlesql`
|
||||
dialect, the specified SQL statement is executed as a [data manipulation
|
||||
language (DML)][bigtable-googlesql] statements, and specified parameters will
|
||||
inserted according to their name: e.g. `@name`.
|
||||
|
||||
[bigtable-googlesql]: https://cloud.google.com/bigtable/docs/googlesql-overview
|
||||
|
||||
## Example
|
||||
|
||||
> **Note:** This tool uses parameterized queries to prevent SQL injections.
|
||||
> Query parameters can be used as substitutes for arbitrary expressions.
|
||||
> Parameters cannot be used as substitutes for identifiers, column names, table
|
||||
> names, or other parts of the query.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search_user_by_id_or_name:
|
||||
kind: bigtable-sql
|
||||
source: my-bigtable-instance
|
||||
statement: |
|
||||
SELECT
|
||||
TO_INT64(cf[ 'id' ]) as id,
|
||||
CAST(cf[ 'name' ] AS string) as name,
|
||||
FROM
|
||||
mytable
|
||||
WHERE
|
||||
TO_INT64(cf[ 'id' ]) = @id
|
||||
OR CAST(cf[ 'name' ] AS string) = @name;
|
||||
description: |
|
||||
Use this tool to get information for a specific user.
|
||||
Takes an id number or a name and returns info on the user.
|
||||
|
||||
Example:
|
||||
{{
|
||||
"id": 123,
|
||||
"name": "Alice",
|
||||
}}
|
||||
parameters:
|
||||
- name: id
|
||||
type: integer
|
||||
description: User ID
|
||||
- name: name
|
||||
type: string
|
||||
description: Name of the user
|
||||
```
|
||||
|
||||
### Example with Template Parameters
|
||||
|
||||
> **Note:** This tool allows direct modifications to the SQL statement,
|
||||
> including identifiers, column names, and table names. **This makes it more
|
||||
> vulnerable to SQL injections**. Using basic parameters only (see above) is
|
||||
> recommended for performance and safety reasons. For more details, please check
|
||||
> [templateParameters](_index#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_table:
|
||||
kind: bigtable-sql
|
||||
source: my-bigtable-instance
|
||||
statement: |
|
||||
SELECT * FROM {{.tableName}};
|
||||
description: |
|
||||
Use this tool to list all information from a specific table.
|
||||
Example:
|
||||
{{
|
||||
"tableName": "flights",
|
||||
}}
|
||||
templateParameters:
|
||||
- name: tableName
|
||||
type: string
|
||||
description: Table to select from
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------------|:------------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "bigtable-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute on. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
|
||||
## Tips
|
||||
|
||||
- [Bigtable Studio][bigtable-studio] is a useful to explore and manage your
|
||||
Bigtable data. If you're unfamiliar with the query syntax, [Query
|
||||
Builder][bigtable-querybuilder] lets you build a query, run it against a
|
||||
table, and then view the results in the console.
|
||||
- Some Python libraries limit the use of underscore columns such as `_key`. A
|
||||
workaround would be to leverage Bigtable [Logical
|
||||
Views][bigtable-logical-view] to rename the columns.
|
||||
|
||||
[bigtable-studio]: https://cloud.google.com/bigtable/docs/manage-data-using-console
|
||||
[bigtable-logical-view]: https://cloud.google.com/bigtable/docs/create-manage-logical-views
|
||||
[bigtable-querybuilder]: https://cloud.google.com/bigtable/docs/query-builder
|
||||
@@ -1,70 +0,0 @@
|
||||
---
|
||||
title: "couchbase-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "couchbase-sql" tool executes a pre-defined SQL statement against a Couchbase
|
||||
database.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `couchbase-sql` tool executes a pre-defined SQL statement against a Couchbase
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [couchbase](../sources/couchbase.md)
|
||||
|
||||
The specified SQL statement is executed as a parameterized statement, and specified
|
||||
parameters will be used according to their name: e.g. `$id`.
|
||||
|
||||
> **Note:** This tool uses parameterized queries to prevent SQL injections.
|
||||
> Query parameters can be used as substitutes for arbitrary expressions.
|
||||
> Parameters cannot be used as substitutes for identifiers, column names, table
|
||||
> names, or other parts of the query.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search_products_by_category:
|
||||
kind: couchbase-sql
|
||||
source: my-couchbase-instance
|
||||
statement: |
|
||||
SELECT p.name, p.price, p.description
|
||||
FROM products p
|
||||
WHERE p.category = $category AND p.price < $max_price
|
||||
ORDER BY p.price DESC
|
||||
LIMIT 10
|
||||
description: |
|
||||
Use this tool to get a list of products for a specific category under a maximum price.
|
||||
Takes a category name, e.g. "Electronics" and a maximum price e.g 500 and returns a list of product names, prices, and descriptions.
|
||||
Do NOT use this tool with invalid category names. Do NOT guess a category name, Do NOT guess a price.
|
||||
Example:
|
||||
{{
|
||||
"category": "Electronics",
|
||||
"max_price": 500
|
||||
}}
|
||||
Example:
|
||||
{{
|
||||
"category": "Furniture",
|
||||
"max_price": 1000
|
||||
}}
|
||||
parameters:
|
||||
- name: category
|
||||
type: string
|
||||
description: Product category name
|
||||
- name: max_price
|
||||
type: integer
|
||||
description: Maximum price (positive integer)
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "couchbase-sql". |
|
||||
| source | string | true | Name of the source the SQL query should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be used with the SQL statement. |
|
||||
| authRequired| array[string] | false | List of auth services that are required to use this tool. |
|
||||
7
docs/en/resources/tools/couchbase/_index.md
Normal file
7
docs/en/resources/tools/couchbase/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "Couchbase"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with Couchbase Sources.
|
||||
---
|
||||
100
docs/en/resources/tools/couchbase/couchbase-sql.md
Normal file
100
docs/en/resources/tools/couchbase/couchbase-sql.md
Normal file
@@ -0,0 +1,100 @@
|
||||
---
|
||||
title: "couchbase-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "couchbase-sql" tool executes a pre-defined SQL statement against a Couchbase
|
||||
database.
|
||||
aliases:
|
||||
- /resources/tools/couchbase-sql
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `couchbase-sql` tool executes a pre-defined SQL statement against a Couchbase
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [couchbase](../sources/couchbase.md)
|
||||
|
||||
The specified SQL statement is executed as a parameterized statement, and specified
|
||||
parameters will be used according to their name: e.g. `$id`.
|
||||
|
||||
## Example
|
||||
|
||||
> **Note:** This tool uses parameterized queries to prevent SQL injections.
|
||||
> Query parameters can be used as substitutes for arbitrary expressions.
|
||||
> Parameters cannot be used as substitutes for identifiers, column names, table
|
||||
> names, or other parts of the query.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search_products_by_category:
|
||||
kind: couchbase-sql
|
||||
source: my-couchbase-instance
|
||||
statement: |
|
||||
SELECT p.name, p.price, p.description
|
||||
FROM products p
|
||||
WHERE p.category = $category AND p.price < $max_price
|
||||
ORDER BY p.price DESC
|
||||
LIMIT 10
|
||||
description: |
|
||||
Use this tool to get a list of products for a specific category under a maximum price.
|
||||
Takes a category name, e.g. "Electronics" and a maximum price e.g 500 and returns a list of product names, prices, and descriptions.
|
||||
Do NOT use this tool with invalid category names. Do NOT guess a category name, Do NOT guess a price.
|
||||
Example:
|
||||
{{
|
||||
"category": "Electronics",
|
||||
"max_price": 500
|
||||
}}
|
||||
Example:
|
||||
{{
|
||||
"category": "Furniture",
|
||||
"max_price": 1000
|
||||
}}
|
||||
parameters:
|
||||
- name: category
|
||||
type: string
|
||||
description: Product category name
|
||||
- name: max_price
|
||||
type: integer
|
||||
description: Maximum price (positive integer)
|
||||
```
|
||||
|
||||
### Example with Template Parameters
|
||||
|
||||
> **Note:** This tool allows direct modifications to the SQL statement,
|
||||
> including identifiers, column names, and table names. **This makes it more
|
||||
> vulnerable to SQL injections**. Using basic parameters only (see above) is
|
||||
> recommended for performance and safety reasons. For more details, please check
|
||||
> [templateParameters](_index#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_table:
|
||||
kind: couchbase-sql
|
||||
source: my-couchbase-instance
|
||||
statement: |
|
||||
SELECT * FROM {{.tableName}};
|
||||
description: |
|
||||
Use this tool to list all information from a specific table.
|
||||
Example:
|
||||
{{
|
||||
"tableName": "flights",
|
||||
}}
|
||||
templateParameters:
|
||||
- name: tableName
|
||||
type: string
|
||||
description: Table to select from
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------------|:------------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "couchbase-sql". |
|
||||
| source | string | true | Name of the source the SQL query should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be used with the SQL statement. |
|
||||
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
| authRequired | array[string] | false | List of auth services that are required to use this tool. |
|
||||
7
docs/en/resources/tools/dgraph/_index.md
Normal file
7
docs/en/resources/tools/dgraph/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "Dgraph"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with Dgraph Sources.
|
||||
---
|
||||
@@ -5,6 +5,8 @@ weight: 1
|
||||
description: >
|
||||
A "dgraph-dql" tool executes a pre-defined DQL statement against a Dgraph
|
||||
database.
|
||||
aliases:
|
||||
- /resources/tools/dgraph-dql
|
||||
---
|
||||
|
||||
## About
|
||||
@@ -117,6 +119,6 @@ tools:
|
||||
| source | string | true | Name of the source the dql query should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | dql statement to execute |
|
||||
| isQuery | boolean | false | To run statement as query set true otherwise false |
|
||||
| timeout | string | false | To set timeout for query |
|
||||
| isQuery | boolean | false | To run statement as query set true otherwise false |
|
||||
| timeout | string | false | To set timeout for query |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be used with the dql statement. |
|
||||
7
docs/en/resources/tools/http/_index.md
Normal file
7
docs/en/resources/tools/http/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "HTTP"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with HTTP Sources.
|
||||
---
|
||||
@@ -4,20 +4,31 @@ type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "http" tool sends out an HTTP request to an HTTP endpoint.
|
||||
aliases:
|
||||
- /resources/tools/http
|
||||
---
|
||||
|
||||
|
||||
## About
|
||||
|
||||
The `http` tool allows you to make HTTP requests to APIs to retrieve data.
|
||||
An HTTP request is the method by which a client communicates with a server to retrieve or manipulate resources.
|
||||
Toolbox allows you to configure the request URL, method, headers, query parameters, and the request body for an HTTP Tool.
|
||||
An HTTP request is the method by which a client communicates with a server to
|
||||
retrieve or manipulate resources.
|
||||
Toolbox allows you to configure the request URL, method, headers, query
|
||||
parameters, and the request body for an HTTP Tool.
|
||||
|
||||
### URL
|
||||
|
||||
An HTTP request URL identifies the target the client wants to access.
|
||||
Toolbox composes the request URL from the HTTP Source's `baseUrl` and the HTTP Tool's `path`.
|
||||
For example, the following config allows you to reach different paths of the same server using multiple Tools:
|
||||
Toolbox composes the request URL from 3 places:
|
||||
|
||||
1. The HTTP Source's `baseUrl`.
|
||||
2. The HTTP Tool's `path` field.
|
||||
3. The HTTP Tool's `pathParams` for dynamic path composed during Tool
|
||||
invocation.
|
||||
|
||||
For example, the following config allows you to reach different paths of the
|
||||
same server using multiple Tools:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
@@ -39,16 +50,31 @@ tools:
|
||||
method: GET
|
||||
path: /search
|
||||
description: Tool to search information from the example API
|
||||
|
||||
my-dynamic-path-tool:
|
||||
kind: http
|
||||
source: my-http-source
|
||||
method: GET
|
||||
path: /{{.myPathParam}}/search
|
||||
description: Tool to reach endpoint based on the input to `myPathParam`
|
||||
pathParams:
|
||||
- name: myPathParam
|
||||
type: string
|
||||
description: The dynamic path parameter
|
||||
|
||||
```
|
||||
|
||||
### Headers
|
||||
|
||||
An HTTP request header is a key-value pair sent by a client to a server, providing additional information about the request, such as the client's preferences, the request body content type, and other metadata.
|
||||
Headers specified by the HTTP Tool are combined with its HTTP Source headers for the resulting HTTP request, and override the Source headers in case of conflict.
|
||||
An HTTP request header is a key-value pair sent by a client to a server,
|
||||
providing additional information about the request, such as the client's
|
||||
preferences, the request body content type, and other metadata.
|
||||
Headers specified by the HTTP Tool are combined with its HTTP Source headers for
|
||||
the resulting HTTP request, and override the Source headers in case of conflict.
|
||||
The HTTP Tool allows you to specify headers in two different ways:
|
||||
|
||||
- Static headers can be specified using the `headers` field, and will be the same for every invocation:
|
||||
- Static headers can be specified using the `headers` field, and will be the
|
||||
same for every invocation:
|
||||
|
||||
```yaml
|
||||
my-http-tool:
|
||||
@@ -62,7 +88,9 @@ my-http-tool:
|
||||
Content-Type: application/json
|
||||
```
|
||||
|
||||
- Dynamic headers can be specified as parameters in the `headerParams` field. The `name` of the `headerParams` will be used as the header key, and the value is determined by the LLM input upon Tool invocation:
|
||||
- Dynamic headers can be specified as parameters in the `headerParams` field.
|
||||
The `name` of the `headerParams` will be used as the header key, and the value
|
||||
is determined by the LLM input upon Tool invocation:
|
||||
|
||||
```yaml
|
||||
my-http-tool:
|
||||
@@ -79,9 +107,12 @@ my-http-tool:
|
||||
|
||||
### Query parameters
|
||||
|
||||
Query parameters are key-value pairs appended to a URL after a question mark (?) to provide additional information to the server for processing the request, like filtering or sorting data.
|
||||
Query parameters are key-value pairs appended to a URL after a question mark (?)
|
||||
to provide additional information to the server for processing the request, like
|
||||
filtering or sorting data.
|
||||
|
||||
- Static request query parameters should be specified in the `path` as part of the URL itself:
|
||||
- Static request query parameters should be specified in the `path` as part of
|
||||
the URL itself:
|
||||
|
||||
```yaml
|
||||
my-http-tool:
|
||||
@@ -92,7 +123,8 @@ my-http-tool:
|
||||
description: Tool to search for item with ID 1 in English
|
||||
```
|
||||
|
||||
- Dynamic request query parameters should be specified as parameters in the `queryParams` section:
|
||||
- Dynamic request query parameters should be specified as parameters in the
|
||||
`queryParams` section:
|
||||
|
||||
```yaml
|
||||
my-http-tool:
|
||||
@@ -109,8 +141,11 @@ my-http-tool:
|
||||
|
||||
### Request body
|
||||
|
||||
The request body payload is a string that supports parameter replacement following [Go template][go-template-doc]'s annotations.
|
||||
The parameter names in the `requestBody` should be preceded by "." and enclosed by double curly brackets "{{}}". The values will be populated into the request body payload upon Tool invocation.
|
||||
The request body payload is a string that supports parameter replacement
|
||||
following [Go template][go-template-doc]'s annotations.
|
||||
The parameter names in the `requestBody` should be preceded by "." and enclosed
|
||||
by double curly brackets "{{}}". The values will be populated into the request
|
||||
body payload upon Tool invocation.
|
||||
|
||||
Example:
|
||||
|
||||
@@ -137,14 +172,17 @@ my-http-tool:
|
||||
|
||||
#### Formatting Parameters
|
||||
|
||||
Some complex parameters (such as arrays) may require additional formatting to match the expected output. For convenience, you can specify one of the following pre-defined functions before the parameter name to format it:
|
||||
Some complex parameters (such as arrays) may require additional formatting to
|
||||
match the expected output. For convenience, you can specify one of the following
|
||||
pre-defined functions before the parameter name to format it:
|
||||
|
||||
##### JSON
|
||||
|
||||
The `json` keyword converts a parameter into a JSON format.
|
||||
|
||||
{{< notice note >}}
|
||||
Using JSON may add quotes to the variable name for certain types (such as strings).
|
||||
Using JSON may add quotes to the variable name for certain types (such as
|
||||
strings).
|
||||
{{< /notice >}}
|
||||
|
||||
Example:
|
||||
@@ -1,81 +0,0 @@
|
||||
---
|
||||
title: "mssql-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "mssql-sql" tool executes a pre-defined SQL statement against a SQL Server
|
||||
database.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `mssql-sql` tool executes a pre-defined SQL statement against a SQL Server
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [cloud-sql-mssql](../sources/cloud-sql-mssql.md)
|
||||
- [mssql](../sources/mssql.md)
|
||||
|
||||
Toolbox supports the [prepare statement syntax][prepare-statement] of MS SQL
|
||||
Server and expects parameters in the SQL query to be in the form of either
|
||||
`@Name` or `@p1` to `@pN` (ordinal position).
|
||||
|
||||
```go
|
||||
db.QueryContext(ctx, `select * from t where ID = @ID and Name = @p2;`, sql.Named("ID", 6), "Bob")
|
||||
```
|
||||
|
||||
> **Note:** This tool uses parameterized queries to prevent SQL injections.
|
||||
> Query parameters can be used as substitutes for arbitrary expressions.
|
||||
> Parameters cannot be used as substitutes for identifiers, column names, table
|
||||
> names, or other parts of the query.
|
||||
|
||||
[prepare-statement]: https://learn.microsoft.com/sql/relational-databases/system-stored-procedures/sp-prepare-transact-sql?view=sql-server-ver16
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search_flights_by_number:
|
||||
kind: mssql-sql
|
||||
source: my-instance
|
||||
statement: |
|
||||
SELECT * FROM flights
|
||||
WHERE airline = @airline
|
||||
AND flight_number = @flight_number
|
||||
LIMIT 10
|
||||
description: |
|
||||
Use this tool to get information for a specific flight.
|
||||
Takes an airline code and flight number and returns info on the flight.
|
||||
Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number.
|
||||
A airline code is a code for an airline service consisting of two-character
|
||||
airline designator and followed by flight number, which is 1 to 4 digit number.
|
||||
For example, if given CY 0123, the airline is "CY", and flight_number is "123".
|
||||
Another example for this is DL 1234, the airline is "DL", and flight_number is "1234".
|
||||
If the tool returns more than one option choose the date closes to today.
|
||||
Example:
|
||||
{{
|
||||
"airline": "CY",
|
||||
"flight_number": "888",
|
||||
}}
|
||||
Example:
|
||||
{{
|
||||
"airline": "DL",
|
||||
"flight_number": "1234",
|
||||
}}
|
||||
parameters:
|
||||
- name: airline
|
||||
type: string
|
||||
description: Airline unique 2 letter identifier
|
||||
- name: flight_number
|
||||
type: string
|
||||
description: 1 to 4 digit number
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "mssql-sql". |
|
||||
| source | string | true | Name of the source the T-SQL statement should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
7
docs/en/resources/tools/mssql/_index.md
Normal file
7
docs/en/resources/tools/mssql/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "SQL Server"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with SQL Server Sources, such as CloudSQL for SQL Server.
|
||||
---
|
||||
@@ -5,6 +5,8 @@ weight: 1
|
||||
description: >
|
||||
A "mssql-execute-sql" tool executes a SQL statement against a SQL Server
|
||||
database.
|
||||
aliases:
|
||||
- /resources/tools/mssql-execute-sql
|
||||
---
|
||||
|
||||
## About
|
||||
@@ -30,6 +32,7 @@ tools:
|
||||
source: my-mssql-instance
|
||||
description: Use this tool to execute sql statement.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
111
docs/en/resources/tools/mssql/mssql-sql.md
Normal file
111
docs/en/resources/tools/mssql/mssql-sql.md
Normal file
@@ -0,0 +1,111 @@
|
||||
---
|
||||
title: "mssql-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "mssql-sql" tool executes a pre-defined SQL statement against a SQL Server
|
||||
database.
|
||||
aliases:
|
||||
- /resources/tools/mssql-sql
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `mssql-sql` tool executes a pre-defined SQL statement against a SQL Server
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [cloud-sql-mssql](../sources/cloud-sql-mssql.md)
|
||||
- [mssql](../sources/mssql.md)
|
||||
|
||||
Toolbox supports the [prepare statement syntax][prepare-statement] of MS SQL
|
||||
Server and expects parameters in the SQL query to be in the form of either
|
||||
`@Name` or `@p1` to `@pN` (ordinal position).
|
||||
|
||||
```go
|
||||
db.QueryContext(ctx, `select * from t where ID = @ID and Name = @p2;`, sql.Named("ID", 6), "Bob")
|
||||
```
|
||||
|
||||
[prepare-statement]: https://learn.microsoft.com/sql/relational-databases/system-stored-procedures/sp-prepare-transact-sql?view=sql-server-ver16
|
||||
|
||||
## Example
|
||||
|
||||
> **Note:** This tool uses parameterized queries to prevent SQL injections.
|
||||
> Query parameters can be used as substitutes for arbitrary expressions.
|
||||
> Parameters cannot be used as substitutes for identifiers, column names, table
|
||||
> names, or other parts of the query.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search_flights_by_number:
|
||||
kind: mssql-sql
|
||||
source: my-instance
|
||||
statement: |
|
||||
SELECT * FROM flights
|
||||
WHERE airline = @airline
|
||||
AND flight_number = @flight_number
|
||||
LIMIT 10
|
||||
description: |
|
||||
Use this tool to get information for a specific flight.
|
||||
Takes an airline code and flight number and returns info on the flight.
|
||||
Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number.
|
||||
A airline code is a code for an airline service consisting of two-character
|
||||
airline designator and followed by flight number, which is 1 to 4 digit number.
|
||||
For example, if given CY 0123, the airline is "CY", and flight_number is "123".
|
||||
Another example for this is DL 1234, the airline is "DL", and flight_number is "1234".
|
||||
If the tool returns more than one option choose the date closes to today.
|
||||
Example:
|
||||
{{
|
||||
"airline": "CY",
|
||||
"flight_number": "888",
|
||||
}}
|
||||
Example:
|
||||
{{
|
||||
"airline": "DL",
|
||||
"flight_number": "1234",
|
||||
}}
|
||||
parameters:
|
||||
- name: airline
|
||||
type: string
|
||||
description: Airline unique 2 letter identifier
|
||||
- name: flight_number
|
||||
type: string
|
||||
description: 1 to 4 digit number
|
||||
```
|
||||
|
||||
### Example with Template Parameters
|
||||
|
||||
> **Note:** This tool allows direct modifications to the SQL statement,
|
||||
> including identifiers, column names, and table names. **This makes it more
|
||||
> vulnerable to SQL injections**. Using basic parameters only (see above) is
|
||||
> recommended for performance and safety reasons. For more details, please check
|
||||
> [templateParameters](_index#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_table:
|
||||
kind: mssql-sql
|
||||
source: my-instance
|
||||
statement: |
|
||||
SELECT * FROM {{.tableName}};
|
||||
description: |
|
||||
Use this tool to list all information from a specific table.
|
||||
Example:
|
||||
{{
|
||||
"tableName": "flights",
|
||||
}}
|
||||
templateParameters:
|
||||
- name: tableName
|
||||
type: string
|
||||
description: Table to select from
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------------|:------------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "mssql-sql". |
|
||||
| source | string | true | Name of the source the T-SQL statement should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
@@ -1,76 +0,0 @@
|
||||
---
|
||||
title: "mysql-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "mysql-sql" tool executes a pre-defined SQL statement against a MySQL
|
||||
database.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `mysql-sql` tool executes a pre-defined SQL statement against a MySQL
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [cloud-sql-mysql](../sources/cloud-sql-mysql.md)
|
||||
- [mysql](../sources/mysql.md)
|
||||
|
||||
The specified SQL statement is executed as a [prepared statement][mysql-prepare],
|
||||
and expects parameters in the SQL query to be in the form of placeholders `?`.
|
||||
|
||||
> **Note:** This tool uses parameterized queries to prevent SQL injections.
|
||||
> Query parameters can be used as substitutes for arbitrary expressions.
|
||||
> Parameters cannot be used as substitutes for identifiers, column names, table
|
||||
> names, or other parts of the query.
|
||||
|
||||
[mysql-prepare]: https://dev.mysql.com/doc/refman/8.4/en/sql-prepared-statements.html
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search_flights_by_number:
|
||||
kind: mysql-sql
|
||||
source: my-mysql-instance
|
||||
statement: |
|
||||
SELECT * FROM flights
|
||||
WHERE airline = ?
|
||||
AND flight_number = ?
|
||||
LIMIT 10
|
||||
description: |
|
||||
Use this tool to get information for a specific flight.
|
||||
Takes an airline code and flight number and returns info on the flight.
|
||||
Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number.
|
||||
A airline code is a code for an airline service consisting of two-character
|
||||
airline designator and followed by flight number, which is 1 to 4 digit number.
|
||||
For example, if given CY 0123, the airline is "CY", and flight_number is "123".
|
||||
Another example for this is DL 1234, the airline is "DL", and flight_number is "1234".
|
||||
If the tool returns more than one option choose the date closes to today.
|
||||
Example:
|
||||
{{
|
||||
"airline": "CY",
|
||||
"flight_number": "888",
|
||||
}}
|
||||
Example:
|
||||
{{
|
||||
"airline": "DL",
|
||||
"flight_number": "1234",
|
||||
}}
|
||||
parameters:
|
||||
- name: airline
|
||||
type: string
|
||||
description: Airline unique 2 letter identifier
|
||||
- name: flight_number
|
||||
type: string
|
||||
description: 1 to 4 digit number
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "mysql-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute on. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
7
docs/en/resources/tools/mysql/_index.md
Normal file
7
docs/en/resources/tools/mysql/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "MySQL"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with MySQL Sources, such as Cloud SQL for MySQL.
|
||||
---
|
||||
@@ -5,6 +5,8 @@ weight: 1
|
||||
description: >
|
||||
A "mysql-execute-sql" tool executes a SQL statement against a MySQL
|
||||
database.
|
||||
aliases:
|
||||
- /resources/tools/mysql-execute-sql
|
||||
---
|
||||
|
||||
## About
|
||||
@@ -35,6 +37,6 @@ tools:
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "mysql-execute-sql". |
|
||||
| kind | string | true | Must be "mysql-execute-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
106
docs/en/resources/tools/mysql/mysql-sql.md
Normal file
106
docs/en/resources/tools/mysql/mysql-sql.md
Normal file
@@ -0,0 +1,106 @@
|
||||
---
|
||||
title: "mysql-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "mysql-sql" tool executes a pre-defined SQL statement against a MySQL
|
||||
database.
|
||||
aliases:
|
||||
- /resources/tools/mysql-sql
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `mysql-sql` tool executes a pre-defined SQL statement against a MySQL
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [cloud-sql-mysql](../sources/cloud-sql-mysql.md)
|
||||
- [mysql](../sources/mysql.md)
|
||||
|
||||
The specified SQL statement is executed as a [prepared statement][mysql-prepare],
|
||||
and expects parameters in the SQL query to be in the form of placeholders `?`.
|
||||
|
||||
[mysql-prepare]: https://dev.mysql.com/doc/refman/8.4/en/sql-prepared-statements.html
|
||||
|
||||
## Example
|
||||
|
||||
> **Note:** This tool uses parameterized queries to prevent SQL injections.
|
||||
> Query parameters can be used as substitutes for arbitrary expressions.
|
||||
> Parameters cannot be used as substitutes for identifiers, column names, table
|
||||
> names, or other parts of the query.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search_flights_by_number:
|
||||
kind: mysql-sql
|
||||
source: my-mysql-instance
|
||||
statement: |
|
||||
SELECT * FROM flights
|
||||
WHERE airline = ?
|
||||
AND flight_number = ?
|
||||
LIMIT 10
|
||||
description: |
|
||||
Use this tool to get information for a specific flight.
|
||||
Takes an airline code and flight number and returns info on the flight.
|
||||
Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number.
|
||||
A airline code is a code for an airline service consisting of two-character
|
||||
airline designator and followed by flight number, which is 1 to 4 digit number.
|
||||
For example, if given CY 0123, the airline is "CY", and flight_number is "123".
|
||||
Another example for this is DL 1234, the airline is "DL", and flight_number is "1234".
|
||||
If the tool returns more than one option choose the date closes to today.
|
||||
Example:
|
||||
{{
|
||||
"airline": "CY",
|
||||
"flight_number": "888",
|
||||
}}
|
||||
Example:
|
||||
{{
|
||||
"airline": "DL",
|
||||
"flight_number": "1234",
|
||||
}}
|
||||
parameters:
|
||||
- name: airline
|
||||
type: string
|
||||
description: Airline unique 2 letter identifier
|
||||
- name: flight_number
|
||||
type: string
|
||||
description: 1 to 4 digit number
|
||||
```
|
||||
|
||||
### Example with Template Parameters
|
||||
|
||||
> **Note:** This tool allows direct modifications to the SQL statement,
|
||||
> including identifiers, column names, and table names. **This makes it more
|
||||
> vulnerable to SQL injections**. Using basic parameters only (see above) is
|
||||
> recommended for performance and safety reasons. For more details, please check
|
||||
> [templateParameters](_index#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_table:
|
||||
kind: mysql-sql
|
||||
source: my-mysql-instance
|
||||
statement: |
|
||||
SELECT * FROM {{.tableName}};
|
||||
description: |
|
||||
Use this tool to list all information from a specific table.
|
||||
Example:
|
||||
{{
|
||||
"tableName": "flights",
|
||||
}}
|
||||
templateParameters:
|
||||
- name: tableName
|
||||
type: string
|
||||
description: Table to select from
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------------|:------------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "mysql-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute on. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
7
docs/en/resources/tools/neo4j/_index.md
Normal file
7
docs/en/resources/tools/neo4j/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "Neo4j"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with Neo4j Sources.
|
||||
---
|
||||
@@ -5,6 +5,8 @@ weight: 1
|
||||
description: >
|
||||
A "neo4j-cypher" tool executes a pre-defined cypher statement against a Neo4j
|
||||
database.
|
||||
aliases:
|
||||
- /resources/tools/neo4j-cypher
|
||||
---
|
||||
|
||||
## About
|
||||
@@ -1,79 +0,0 @@
|
||||
---
|
||||
title: "postgres-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "postgres-sql" tool executes a pre-defined SQL statement against a Postgres
|
||||
database.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `postgres-sql` tool executes a pre-defined SQL statement against a Postgres
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [alloydb-postgres](../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../sources/cloud-sql-pg.md)
|
||||
- [postgres](../sources/postgres.md)
|
||||
|
||||
The specified SQL statement is executed as a [prepared statement][pg-prepare],
|
||||
and specified parameters will inserted according to their position: e.g. `1`
|
||||
will be the first parameter specified, `$@` will be the second parameter, and so
|
||||
on.
|
||||
|
||||
> **Note:** This tool uses parameterized queries to prevent SQL injections.
|
||||
> Query parameters can be used as substitutes for arbitrary expressions.
|
||||
> Parameters cannot be used as substitutes for identifiers, column names, table
|
||||
> names, or other parts of the query.
|
||||
|
||||
[pg-prepare]: https://www.postgresql.org/docs/current/sql-prepare.html
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search_flights_by_number:
|
||||
kind: postgres-sql
|
||||
source: my-pg-instance
|
||||
statement: |
|
||||
SELECT * FROM flights
|
||||
WHERE airline = $1
|
||||
AND flight_number = $2
|
||||
LIMIT 10
|
||||
description: |
|
||||
Use this tool to get information for a specific flight.
|
||||
Takes an airline code and flight number and returns info on the flight.
|
||||
Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number.
|
||||
A airline code is a code for an airline service consisting of two-character
|
||||
airline designator and followed by flight number, which is 1 to 4 digit number.
|
||||
For example, if given CY 0123, the airline is "CY", and flight_number is "123".
|
||||
Another example for this is DL 1234, the airline is "DL", and flight_number is "1234".
|
||||
If the tool returns more than one option choose the date closes to today.
|
||||
Example:
|
||||
{{
|
||||
"airline": "CY",
|
||||
"flight_number": "888",
|
||||
}}
|
||||
Example:
|
||||
{{
|
||||
"airline": "DL",
|
||||
"flight_number": "1234",
|
||||
}}
|
||||
parameters:
|
||||
- name: airline
|
||||
type: string
|
||||
description: Airline unique 2 letter identifier
|
||||
- name: flight_number
|
||||
type: string
|
||||
description: 1 to 4 digit number
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "postgres-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute on. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
7
docs/en/resources/tools/postgres/_index.md
Normal file
7
docs/en/resources/tools/postgres/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "Postgres"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with Postgres Sources, such as Cloud SQL for Postgres and AlloyDB.
|
||||
---
|
||||
@@ -5,6 +5,8 @@ weight: 1
|
||||
description: >
|
||||
A "postgres-execute-sql" tool executes a SQL statement against a Postgres
|
||||
database.
|
||||
aliases:
|
||||
- /resources/tools/postgres-execute-sql
|
||||
---
|
||||
|
||||
## About
|
||||
@@ -36,6 +38,6 @@ tools:
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "postgres-execute-sql". |
|
||||
| kind | string | true | Must be "postgres-execute-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
110
docs/en/resources/tools/postgres/postgres-sql.md
Normal file
110
docs/en/resources/tools/postgres/postgres-sql.md
Normal file
@@ -0,0 +1,110 @@
|
||||
---
|
||||
title: "postgres-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "postgres-sql" tool executes a pre-defined SQL statement against a Postgres
|
||||
database.
|
||||
aliases:
|
||||
- /resources/tools/postgres-sql
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `postgres-sql` tool executes a pre-defined SQL statement against a Postgres
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [alloydb-postgres](../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../sources/cloud-sql-pg.md)
|
||||
- [postgres](../sources/postgres.md)
|
||||
|
||||
The specified SQL statement is executed as a [prepared statement][pg-prepare],
|
||||
and specified parameters will inserted according to their position: e.g. `1`
|
||||
will be the first parameter specified, `$@` will be the second parameter, and so
|
||||
on. If template parameters are included, they will be resolved before execution
|
||||
of the prepared statement.
|
||||
|
||||
[pg-prepare]: https://www.postgresql.org/docs/current/sql-prepare.html
|
||||
|
||||
## Example
|
||||
|
||||
> **Note:** This tool uses parameterized queries to prevent SQL injections.
|
||||
> Query parameters can be used as substitutes for arbitrary expressions.
|
||||
> Parameters cannot be used as substitutes for identifiers, column names, table
|
||||
> names, or other parts of the query.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search_flights_by_number:
|
||||
kind: postgres-sql
|
||||
source: my-pg-instance
|
||||
statement: |
|
||||
SELECT * FROM flights
|
||||
WHERE airline = $1
|
||||
AND flight_number = $2
|
||||
LIMIT 10
|
||||
description: |
|
||||
Use this tool to get information for a specific flight.
|
||||
Takes an airline code and flight number and returns info on the flight.
|
||||
Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number.
|
||||
A airline code is a code for an airline service consisting of two-character
|
||||
airline designator and followed by flight number, which is 1 to 4 digit number.
|
||||
For example, if given CY 0123, the airline is "CY", and flight_number is "123".
|
||||
Another example for this is DL 1234, the airline is "DL", and flight_number is "1234".
|
||||
If the tool returns more than one option choose the date closes to today.
|
||||
Example:
|
||||
{{
|
||||
"airline": "CY",
|
||||
"flight_number": "888",
|
||||
}}
|
||||
Example:
|
||||
{{
|
||||
"airline": "DL",
|
||||
"flight_number": "1234",
|
||||
}}
|
||||
parameters:
|
||||
- name: airline
|
||||
type: string
|
||||
description: Airline unique 2 letter identifier
|
||||
- name: flight_number
|
||||
type: string
|
||||
description: 1 to 4 digit number
|
||||
```
|
||||
|
||||
### Example with Template Parameters
|
||||
|
||||
> **Note:** This tool allows direct modifications to the SQL statement,
|
||||
> including identifiers, column names, and table names. **This makes it more
|
||||
> vulnerable to SQL injections**. Using basic parameters only (see above) is
|
||||
> recommended for performance and safety reasons. For more details, please check
|
||||
> [templateParameters](_index#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_table:
|
||||
kind: postgres-sql
|
||||
source: my-pg-instance
|
||||
statement: |
|
||||
SELECT * FROM {{.tableName}}
|
||||
description: |
|
||||
Use this tool to list all information from a specific table.
|
||||
Example:
|
||||
{{
|
||||
"tableName": "flights",
|
||||
}}
|
||||
templateParameters:
|
||||
- name: tableName
|
||||
type: string
|
||||
description: Table to select from
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|---------------------|:---------------------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "postgres-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute on. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
7
docs/en/resources/tools/redis/_index.md
Normal file
7
docs/en/resources/tools/redis/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "Redis"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with Redis Sources.
|
||||
---
|
||||
58
docs/en/resources/tools/redis/redis.md
Normal file
58
docs/en/resources/tools/redis/redis.md
Normal file
@@ -0,0 +1,58 @@
|
||||
---
|
||||
title: "redis"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "redis" tool executes a set of pre-defined Redis commands against a Redis instance.
|
||||
aliases:
|
||||
- /resources/tools/redis
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A redis tool executes a series of pre-defined Redis commands against a
|
||||
Redis source.
|
||||
|
||||
The specified Redis commands are executed sequentially. Each command is
|
||||
represented as a string list, where the first element is the command name (e.g.,
|
||||
SET, GET, HGETALL) and subsequent elements are its arguments.
|
||||
|
||||
### Dynamic Command Parameters
|
||||
|
||||
Command arguments can be templated using the `$variableName` annotation. The
|
||||
array type parameters will be expanded once into multiple arguments. Take the
|
||||
following config for example:
|
||||
|
||||
```yaml
|
||||
commands:
|
||||
- [SADD, userNames, $userNames] # Array will be flattened into multiple arguments.
|
||||
parameters:
|
||||
- name: userNames
|
||||
type: array
|
||||
description: The user names to be set.
|
||||
```
|
||||
|
||||
If the input is an array of strings `["Alice", "Sid", "Bob"]`, The final command
|
||||
to be executed after argument expansion will be `[SADD, userNames, Alice, Sid, Bob]`.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
user_data_tool:
|
||||
kind: redis
|
||||
source: my-redis-instance
|
||||
description: |
|
||||
Use this tool to interact with user data stored in Redis.
|
||||
It can set, retrieve, and delete user-specific information.
|
||||
commands:
|
||||
- [SADD, userNames, $userNames] # Array will be flattened into multiple arguments.
|
||||
- [GET, $userId]
|
||||
parameters:
|
||||
- name: userId
|
||||
type: string
|
||||
description: The unique identifier for the user.
|
||||
- name: userNames
|
||||
type: array
|
||||
description: The user names to be set.
|
||||
```
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user