Compare commits

..

6 Commits

Author SHA1 Message Date
Averi Kitsch
b2cbb4098b debug 2025-08-05 11:05:39 -07:00
Yuan Teoh
4c177198c9 update Dockerfile 2025-08-05 09:57:04 -07:00
Averi Kitsch
618ae82176 debug 2025-08-05 09:54:07 -07:00
Yuan Teoh
9b11a59519 update continuous release 2025-08-05 09:42:38 -07:00
Averi Kitsch
f4219dc00a Merge branch 'main' into fix-ci 2025-08-05 09:12:48 -07:00
Averi Kitsch
10f79b7a97 ci: enable C++ compiler for duckDB 2025-08-05 08:53:37 -07:00
339 changed files with 4481 additions and 23886 deletions

View File

@@ -14,116 +14,117 @@
steps:
- id: "build-docker"
name: "gcr.io/cloud-builders/docker"
waitFor: ['-']
waitFor: ["-"]
script: |
#!/usr/bin/env bash
docker buildx create --name container-builder --driver docker-container --bootstrap --use
docker buildx build --platform linux/amd64,linux/arm64 --build-arg COMMIT_SHA=$(git rev-parse HEAD) -t ${_DOCKER_URI}:$REF_NAME --push .
#!/usr/bin/env bash
docker buildx create --name container-builder --driver docker-container --bootstrap --use
docker buildx build --platform linux/amd64,linux/arm64 --build-arg COMMIT_SHA=$(git rev-parse HEAD) -t ${_DOCKER_URI}:$REF_NAME --push .
- id: "install-dependencies"
name: golang:1
waitFor: ['-']
name: golang:1-bookworm
waitFor: ["-"]
env:
- 'GOPATH=/gopath'
- "GOPATH=/gopath"
volumes:
- name: 'go'
path: '/gopath'
- name: "go"
path: "/gopath"
script: |
go get -d ./...
apt-get install -y clang
go get -d ./...
- id: "build-linux-amd64"
name: golang:1
waitFor:
name: golang:1-bookworm
waitFor:
- "install-dependencies"
env:
- 'GOPATH=/gopath'
- "GOPATH=/gopath"
volumes:
- name: 'go'
path: '/gopath'
- name: "go"
path: "/gopath"
script: |
#!/usr/bin/env bash
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.linux.amd64
#!/usr/bin/env bash
CGO_ENABLED=1 GOOS=linux GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.linux.amd64
- id: "store-linux-amd64"
name: "gcr.io/cloud-builders/gcloud:latest"
waitFor:
- "build-linux-amd64"
script: |
#!/usr/bin/env bash
gcloud storage cp toolbox.linux.amd64 gs://$_BUCKET_NAME/$REF_NAME/linux/amd64/toolbox
#!/usr/bin/env bash
gcloud storage cp toolbox.linux.amd64 gs://$_BUCKET_NAME/$REF_NAME/linux/amd64/toolbox
- id: "build-darwin-arm64"
name: golang:1
waitFor:
name: golang:1-bookworm
waitFor:
- "install-dependencies"
env:
- 'GOPATH=/gopath'
- "GOPATH=/gopath"
volumes:
- name: 'go'
path: '/gopath'
- name: "go"
path: "/gopath"
script: |
#!/usr/bin/env bash
CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.arm64
#!/usr/bin/env bash
CGO_ENABLED=1 GOOS=darwin GOARCH=arm64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.arm64
- id: "store-darwin-arm64"
name: "gcr.io/cloud-builders/gcloud:latest"
waitFor:
- "build-darwin-arm64"
script: |
#!/usr/bin/env bash
gcloud storage cp toolbox.darwin.arm64 gs://$_BUCKET_NAME/$REF_NAME/darwin/arm64/toolbox
#!/usr/bin/env bash
gcloud storage cp toolbox.darwin.arm64 gs://$_BUCKET_NAME/$REF_NAME/darwin/arm64/toolbox
- id: "build-darwin-amd64"
name: golang:1
waitFor:
name: golang:1-bookworm
waitFor:
- "install-dependencies"
env:
- 'GOPATH=/gopath'
- "GOPATH=/gopath"
volumes:
- name: 'go'
path: '/gopath'
- name: "go"
path: "/gopath"
script: |
#!/usr/bin/env bash
CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.amd64
#!/usr/bin/env bash
CGO_ENABLED=1 GOOS=darwin GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.amd64
- id: "store-darwin-amd64"
name: "gcr.io/cloud-builders/gcloud:latest"
waitFor:
- "build-darwin-amd64"
script: |
#!/usr/bin/env bash
gcloud storage cp toolbox.darwin.amd64 gs://$_BUCKET_NAME/$REF_NAME/darwin/amd64/toolbox
#!/usr/bin/env bash
gcloud storage cp toolbox.darwin.amd64 gs://$_BUCKET_NAME/$REF_NAME/darwin/amd64/toolbox
- id: "build-windows-amd64"
name: golang:1
waitFor:
name: golang:1-bookworm
waitFor:
- "install-dependencies"
env:
- 'GOPATH=/gopath'
- "GOPATH=/gopath"
volumes:
- name: 'go'
path: '/gopath'
- name: "go"
path: "/gopath"
script: |
#!/usr/bin/env bash
CGO_ENABLED=0 GOOS=windows GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.windows.amd64
#!/usr/bin/env bash
CGO_ENABLED=1 GOOS=windows GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.windows.amd64
- id: "store-windows-amd64"
name: "gcr.io/cloud-builders/gcloud:latest"
waitFor:
- "build-windows-amd64"
script: |
#!/usr/bin/env bash
gcloud storage cp toolbox.windows.amd64 gs://$_BUCKET_NAME/$REF_NAME/windows/amd64/toolbox.exe
#!/usr/bin/env bash
gcloud storage cp toolbox.windows.amd64 gs://$_BUCKET_NAME/$REF_NAME/windows/amd64/toolbox.exe
options:
automapSubstitutions: true
dynamicSubstitutions: true
logging: CLOUD_LOGGING_ONLY # Necessary for custom service account
machineType: 'E2_HIGHCPU_32'
machineType: "E2_HIGHCPU_32"
substitutions:
_REGION: us-central1

View File

@@ -62,6 +62,7 @@ steps:
postgressql \
postgresexecutesql
- id: "alloydb-pg"
name: golang:1
waitFor: ["compile-test-binary"]
@@ -120,7 +121,8 @@ steps:
- "BIGTABLE_PROJECT=$PROJECT_ID"
- "BIGTABLE_INSTANCE=$_BIGTABLE_INSTANCE"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID"]
secretEnv:
["CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
@@ -151,7 +153,7 @@ steps:
"BigQuery" \
bigquery \
bigquery
- id: "dataplex"
name: golang:1
waitFor: ["compile-test-binary"]
@@ -272,7 +274,8 @@ steps:
- "CLOUD_SQL_MYSQL_DATABASE=$_DATABASE_NAME"
- "CLOUD_SQL_MYSQL_REGION=$_REGION"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLOUD_SQL_MYSQL_USER", "CLOUD_SQL_MYSQL_PASS", "CLIENT_ID"]
secretEnv:
["CLOUD_SQL_MYSQL_USER", "CLOUD_SQL_MYSQL_PASS", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
@@ -384,7 +387,7 @@ steps:
sqlite
- id: "couchbase"
name: golang:1
name : golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
@@ -392,8 +395,7 @@ steps:
- "COUCHBASE_SCOPE=$_COUCHBASE_SCOPE"
- "COUCHBASE_BUCKET=$_COUCHBASE_BUCKET"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv:
["COUCHBASE_CONNECTION", "COUCHBASE_USER", "COUCHBASE_PASS", "CLIENT_ID"]
secretEnv: ["COUCHBASE_CONNECTION", "COUCHBASE_USER", "COUCHBASE_PASS", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
@@ -406,7 +408,7 @@ steps:
couchbase
- id: "redis"
name: golang:1
name : golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
@@ -423,9 +425,9 @@ steps:
"Redis" \
redis \
redis
- id: "valkey"
name: golang:1
name : golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
@@ -444,27 +446,6 @@ steps:
valkey \
valkey
- id: "oceanbase"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "OCEANBASE_PORT=$_OCEANBASE_PORT"
- "OCEANBASE_DATABASE=$_OCEANBASE_DATABASE"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID", "OCEANBASE_HOST", "OCEANBASE_USER", "OCEANBASE_PASSWORD"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"OceanBase" \
oceanbase \
oceanbase
- id: "firestore"
name: golang:1
waitFor: ["compile-test-binary"]
@@ -494,13 +475,7 @@ steps:
- "FIRESTORE_PROJECT=$PROJECT_ID"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
- "LOOKER_VERIFY_SSL=$_LOOKER_VERIFY_SSL"
secretEnv:
[
"CLIENT_ID",
"LOOKER_BASE_URL",
"LOOKER_CLIENT_ID",
"LOOKER_CLIENT_SECRET",
]
secretEnv: ["CLIENT_ID", "LOOKER_BASE_URL", "LOOKER_CLIENT_ID", "LOOKER_CLIENT_SECRET"]
volumes:
- name: "go"
path: "/gopath"
@@ -512,6 +487,25 @@ steps:
looker \
looker
- id: "duckdb"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
volumes:
- name: "go"
path: "/gopath"
secretEnv: ["CLIENT_ID"]
args:
- -c
- |
.ci/test_with_coverage.sh \
"DuckDB" \
duckdb \
duckdb
- id: "alloydbwaitforoperation"
name: golang:1
waitFor: ["compile-test-binary"]
@@ -551,75 +545,8 @@ steps:
.ci/test_with_coverage.sh \
"TiDB" \
tidb \
tidbsql tidbexecutesql
- id: "firebird"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "FIREBIRD_DATABASE=$_FIREBIRD_DATABASE_NAME"
- "FIREBIRD_HOST=$_FIREBIRD_HOST"
- "FIREBIRD_PORT=$_FIREBIRD_PORT"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID", "FIREBIRD_USER", "FIREBIRD_PASS"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Firebird" \
firebird \
firebirdsql firebirdexecutesql
- id: "clickhouse"
name : golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "CLICKHOUSE_DATABASE=$_CLICKHOUSE_DATABASE"
- "CLICKHOUSE_PORT=$_CLICKHOUSE_PORT"
- "CLICKHOUSE_PROTOCOL=$_CLICKHOUSE_PROTOCOL"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLICKHOUSE_HOST", "CLICKHOUSE_USER", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"ClickHouse" \
clickhouse \
clickhouse
- id: "trino"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "TRINO_HOST=$_TRINO_HOST"
- "TRINO_PORT=$_TRINO_PORT"
- "TRINO_CATALOG=$_TRINO_CATALOG"
- "TRINO_SCHEMA=$_TRINO_SCHEMA"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID", "TRINO_USER"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Trino" \
trino \
trinosql trinoexecutesql
tidbsql tidbexecutesql
availableSecrets:
secretManager:
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_user/versions/latest
@@ -672,32 +599,16 @@ availableSecrets:
env: REDIS_PASS
- versionName: projects/$PROJECT_ID/secrets/memorystore_valkey_address/versions/latest
env: VALKEY_ADDRESS
- versionName: projects/$PROJECT_ID/secrets/looker_base_url/versions/latest
- versionName: projects/107716898620/secrets/looker_base_url/versions/latest
env: LOOKER_BASE_URL
- versionName: projects/$PROJECT_ID/secrets/looker_client_id/versions/latest
- versionName: projects/107716898620/secrets/looker_client_id/versions/latest
env: LOOKER_CLIENT_ID
- versionName: projects/$PROJECT_ID/secrets/looker_client_secret/versions/latest
- versionName: projects/107716898620/secrets/looker_client_secret/versions/latest
env: LOOKER_CLIENT_SECRET
- versionName: projects/$PROJECT_ID/secrets/tidb_user/versions/latest
- versionName: projects/107716898620/secrets/tidb_user/versions/latest
env: TIDB_USER
- versionName: projects/$PROJECT_ID/secrets/tidb_pass/versions/latest
- versionName: projects/107716898620/secrets/tidb_pass/versions/latest
env: TIDB_PASS
- versionName: projects/$PROJECT_ID/secrets/clickhouse_host/versions/latest
env: CLICKHOUSE_HOST
- versionName: projects/$PROJECT_ID/secrets/clickhouse_user/versions/latest
env: CLICKHOUSE_USER
- versionName: projects/$PROJECT_ID/secrets/firebird_user/versions/latest
env: FIREBIRD_USER
- versionName: projects/$PROJECT_ID/secrets/firebird_pass/versions/latest
env: FIREBIRD_PASS
- versionName: projects/$PROJECT_ID/secrets/trino_user/versions/latest
env: TRINO_USER
- versionName: projects/$PROJECT_ID/secrets/oceanbase_host/versions/latest
env: OCEANBASE_HOST
- versionName: projects/$PROJECT_ID/secrets/oceanbase_user/versions/latest
env: OCEANBASE_USER
- versionName: projects/$PROJECT_ID/secrets/oceanbase_pass/versions/latest
env: OCEANBASE_PASSWORD
options:
logging: CLOUD_LOGGING_ONLY
@@ -709,7 +620,6 @@ options:
substitutions:
_DATABASE_NAME: test_database
_FIREBIRD_DATABASE_NAME: /firebird/test_database.fdb
_REGION: "us-central1"
_CLOUD_SQL_POSTGRES_INSTANCE: "cloud-sql-pg-testing"
_ALLOYDB_POSTGRES_CLUSTER: "alloydb-pg-testing"
@@ -733,14 +643,3 @@ substitutions:
_LOOKER_VERIFY_SSL: "true"
_TIDB_HOST: 127.0.0.1
_TIDB_PORT: "4000"
_CLICKHOUSE_DATABASE: "default"
_CLICKHOUSE_PORT: "8123"
_CLICKHOUSE_PROTOCOL: "http"
_FIREBIRD_HOST: 127.0.0.1
_FIREBIRD_PORT: "3050"
_TRINO_HOST: 127.0.0.1
_TRINO_PORT: "8080"
_TRINO_CATALOG: "memory"
_TRINO_SCHEMA: "default"
_OCEANBASE_PORT: "2883"
_OCEANBASE_DATABASE: "oceanbase"

View File

@@ -14,130 +14,130 @@
steps:
- id: "build-docker"
name: "gcr.io/cloud-builders/docker"
waitFor: ['-']
waitFor: ["-"]
script: |
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
docker buildx create --name container-builder --driver docker-container --bootstrap --use
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
docker buildx create --name container-builder --driver docker-container --bootstrap --use
export TAGS="-t ${_DOCKER_URI}:$VERSION"
if [[ $_PUSH_LATEST == 'true' ]]; then
export TAGS="$TAGS -t ${_DOCKER_URI}:latest"
fi
docker buildx build --platform linux/amd64,linux/arm64 --build-arg BUILD_TYPE=container.release --build-arg COMMIT_SHA=$(git rev-parse HEAD) $TAGS --push .
export TAGS="-t ${_DOCKER_URI}:$VERSION"
if [[ $_PUSH_LATEST == 'true' ]]; then
export TAGS="$TAGS -t ${_DOCKER_URI}:latest"
fi
docker buildx build --platform linux/amd64,linux/arm64 --build-arg BUILD_TYPE=container.release --build-arg COMMIT_SHA=$(git rev-parse HEAD) $TAGS --push .
- id: "install-dependencies"
name: golang:1
waitFor: ['-']
env:
- 'GOPATH=/gopath'
waitFor: ["-"]
env:
- "GOPATH=/gopath"
volumes:
- name: 'go'
path: '/gopath'
- name: "go"
path: "/gopath"
script: |
go get -d ./...
go get -d ./...
- id: "build-linux-amd64"
name: golang:1
waitFor:
waitFor:
- "install-dependencies"
env:
- 'GOPATH=/gopath'
env:
- "GOPATH=/gopath"
volumes:
- name: 'go'
path: '/gopath'
- name: "go"
path: "/gopath"
script: |
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.linux.amd64
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=1 GOOS=linux GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.linux.amd64
- id: "store-linux-amd64"
name: "gcr.io/cloud-builders/gcloud:latest"
waitFor:
- "build-linux-amd64"
script: |
#!/usr/bin/env bash
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.linux.amd64 gs://$_BUCKET_NAME/$VERSION/linux/amd64/toolbox
#!/usr/bin/env bash
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.linux.amd64 gs://$_BUCKET_NAME/$VERSION/linux/amd64/toolbox
- id: "build-darwin-arm64"
name: golang:1
waitFor:
waitFor:
- "install-dependencies"
env:
- 'GOPATH=/gopath'
env:
- "GOPATH=/gopath"
volumes:
- name: 'go'
path: '/gopath'
- name: "go"
path: "/gopath"
script: |
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.arm64
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=1 GOOS=darwin GOARCH=arm64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.arm64
- id: "store-darwin-arm64"
name: "gcr.io/cloud-builders/gcloud:latest"
waitFor:
- "build-darwin-arm64"
script: |
#!/usr/bin/env bash
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.darwin.arm64 gs://$_BUCKET_NAME/$VERSION/darwin/arm64/toolbox
#!/usr/bin/env bash
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.darwin.arm64 gs://$_BUCKET_NAME/$VERSION/darwin/arm64/toolbox
- id: "build-darwin-amd64"
name: golang:1
waitFor:
waitFor:
- "install-dependencies"
env:
- 'GOPATH=/gopath'
env:
- "GOPATH=/gopath"
volumes:
- name: 'go'
path: '/gopath'
- name: "go"
path: "/gopath"
script: |
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.amd64
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=1 GOOS=darwin GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.amd64
- id: "store-darwin-amd64"
name: "gcr.io/cloud-builders/gcloud:latest"
waitFor:
- "build-darwin-amd64"
script: |
#!/usr/bin/env bash
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.darwin.amd64 gs://$_BUCKET_NAME/$VERSION/darwin/amd64/toolbox
#!/usr/bin/env bash
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.darwin.amd64 gs://$_BUCKET_NAME/$VERSION/darwin/amd64/toolbox
- id: "build-windows-amd64"
name: golang:1
waitFor:
waitFor:
- "install-dependencies"
env:
- 'GOPATH=/gopath'
env:
- "GOPATH=/gopath"
volumes:
- name: 'go'
path: '/gopath'
- name: "go"
path: "/gopath"
script: |
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=0 GOOS=windows GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.windows.amd64
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=1 GOOS=windows GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.windows.amd64
- id: "store-windows-amd64"
name: "gcr.io/cloud-builders/gcloud:latest"
waitFor:
- "build-windows-amd64"
script: |
#!/usr/bin/env bash
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.windows.amd64 gs://$_BUCKET_NAME/$VERSION/windows/amd64/toolbox.exe
#!/usr/bin/env bash
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.windows.amd64 gs://$_BUCKET_NAME/$VERSION/windows/amd64/toolbox.exe
options:
automapSubstitutions: true
dynamicSubstitutions: true
logging: CLOUD_LOGGING_ONLY # Necessary for custom service account
machineType: 'E2_HIGHCPU_32'
machineType: "E2_HIGHCPU_32"
substitutions:
_REGION: us-central1

View File

@@ -1,21 +0,0 @@
## Description
---
> Should include a concise description of the changes (bug or feature), it's
> impact, along with a summary of the solution
## PR Checklist
---
> Thank you for opening a Pull Request! Before submitting your PR, there are a
> few things you can do to make sure it goes smoothly:
- [ ] Make sure you reviewed
[CONTRIBUTING.md](https://github.com/googleapis/genai-toolbox/blob/main/CONTRIBUTING.md)
- [ ] Make sure to open an issue as a
[bug/issue](https://github.com/googleapis/langchain-google-alloydb-pg-python/issues/new/choose)
before writing your code! That way we can discuss the change, evaluate
designs, and agree on the general idea
- [ ] Ensure the tests and linter pass
- [ ] Code coverage does not decrease (if any source code was changed)
- [ ] Appropriate docs were updated (if necessary)
- [ ] Make sure to add `!` if this involve a breaking change
🛠️ Fixes #<issue_number_goes_here>

View File

@@ -9,10 +9,6 @@ assign_issues_by:
- Genesis929
- shobsi
- jiaxunwu
- labels:
- 'product: looker'
to:
- drstrangelooker
assign_prs:
- Yuan325
- duwenxin99

View File

@@ -20,5 +20,3 @@ sourceFileExtensions:
- 'go'
- 'yaml'
- 'yml'
ignoreFiles:
- 'docs/en/getting-started/quickstart/**'

8
.github/labels.yaml vendored
View File

@@ -84,15 +84,7 @@
color: 8befd7
description: 'Status: waiting for feedback from community or issue author.'
- name: 'status: waiting for response'
color: 8befd7
description: 'Status: reviewer is awaiting feedback or responses from the author before proceeding.'
# Product Labels
- name: 'product: bigquery'
color: 5065c7
description: 'Product: Assigned to the BigQuery team.'
# Product Labels
- name: 'product: looker'
color: 5065c7
description: 'Product: Assigned to the Looker team.'

View File

@@ -24,7 +24,6 @@ extraFiles: [
"docs/en/getting-started/local_quickstart_js.md",
"docs/en/getting-started/local_quickstart_go.md",
"docs/en/getting-started/mcp_quickstart/_index.md",
"docs/en/samples/alloydb/_index.md",
"docs/en/samples/bigquery/local_quickstart.md",
"docs/en/samples/bigquery/mcp_quickstart/_index.md",
"docs/en/samples/bigquery/colab_quickstart_bigquery.ipynb",
@@ -38,8 +37,6 @@ extraFiles: [
"docs/en/how-to/connect-ide/cloud_sql_mysql_mcp.md",
"docs/en/how-to/connect-ide/firestore_mcp.md",
"docs/en/how-to/connect-ide/looker_mcp.md",
"docs/en/how-to/connect-ide/mysql_mcp.md",
"docs/en/how-to/connect-ide/mssql_mcp.md",
"docs/en/how-to/connect-ide/postgres_mcp.md",
"docs/en/how-to/connect-ide/spanner_mcp.md",
]

View File

@@ -39,7 +39,7 @@ jobs:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
@@ -55,7 +55,7 @@ jobs:
node-version: "22"
- name: Cache dependencies
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}

View File

@@ -34,7 +34,7 @@ jobs:
group: "preview-${{ github.event.number }}"
cancel-in-progress: true
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
ref: gh-pages

View File

@@ -49,24 +49,7 @@ jobs:
group: "preview-${{ github.event.number }}"
cancel-in-progress: true
steps:
- name: Remove PR label
if: "${{ github.event.action == 'labeled' && github.event.label.name == 'docs: deploy-preview' }}"
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
try {
await github.rest.issues.removeLabel({
name: 'docs: deploy-preview',
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.payload.pull_request.number
});
} catch (e) {
console.log('Failed to remove label. Another job may have already removed it!');
}
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
# Checkout the PR's HEAD commit (supports forks).
ref: ${{ github.event.pull_request.head.sha }}
@@ -84,7 +67,7 @@ jobs:
node-version: "22"
- name: Cache dependencies
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@@ -115,4 +98,4 @@ jobs:
owner: context.repo.owner,
repo: context.repo.repo,
body: "🔎 Preview at https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/previews/PR-${{ github.event.number }}/"
})
})

View File

@@ -55,7 +55,7 @@ jobs:
with:
go-version: "1.22"
- name: Checkout code
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ github.event.pull_request.head.sha }}
repository: ${{ github.event.pull_request.head.repo.full_name }}

View File

@@ -26,4 +26,4 @@ jobs:
contents: 'read'
uses: ./.github/workflows/cloud_build_failure_reporter.yml
with:
trigger_names: "toolbox-test-nightly,toolbox-test-on-merge,toolbox-continuous-release"
trigger_names: "toolbox-test-nightly,toolbox-test-on-merge"

View File

@@ -29,7 +29,7 @@ jobs:
issues: 'write'
pull-requests: 'write'
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: micnncim/action-label-syncer@3abd5ab72fda571e69fffd97bd4e0033dd5f495c # v1.3.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -16,7 +16,7 @@ name: tests
on:
push:
branches:
- "main"
- 'main'
pull_request:
pull_request_target:
types: [labeled]
@@ -35,9 +35,9 @@ jobs:
os: [macos-latest, windows-latest, ubuntu-latest]
fail-fast: false
permissions:
contents: "read"
issues: "write"
pull-requests: "write"
contents: 'read'
issues: 'write'
pull-requests: 'write'
steps:
- name: Remove PR label
if: "${{ github.event.action == 'labeled' && github.event.label.name == 'tests: run' }}"
@@ -62,7 +62,7 @@ jobs:
go-version: "1.22"
- name: Checkout code
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ github.event.pull_request.head.sha }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
@@ -76,8 +76,6 @@ jobs:
- name: Run tests with coverage
if: ${{ runner.os == 'Linux' }}
env:
GOTOOLCHAIN: go1.25.0+auto
run: |
source_dir="./internal/sources/*"
tool_dir="./internal/tools/*"

2
.gitignore vendored
View File

@@ -20,4 +20,4 @@ node_modules
# executable
genai-toolbox
toolbox
toolbox

View File

@@ -8,8 +8,6 @@ defaultContentLanguageInSubdir = false
enableGitInfo = true
enableRobotsTXT = true
ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quickstart/go"]
[languages]
[languages.en]
languageName ="English"

View File

@@ -1,47 +0,0 @@
{{ $notebookFile := .Get 0 }}
{{ with .Page.Resources.Get $notebookFile }}
{{ $content := .Content | transform.Unmarshal }}
{{ range $content.cells }}
{{ if eq .cell_type "markdown" }}
<div class="notebook-markdown">
{{ $markdown := "" }}
{{ range .source }}{{ $markdown = print $markdown . }}{{ end }}
{{ $markdown | markdownify }}
</div>
{{ end }}
{{ if eq .cell_type "code" }}
<div class="notebook-code">
{{ $code := "" }}
{{ range .source }}{{ $code = print $code . }}{{ end }}
{{ highlight $code "python" "" }}
{{ range .outputs }}
<div class="notebook-output">
{{ with .text }}
<pre class="notebook-stream"><code>{{- range . }}{{ . }}{{ end -}}</code></pre>
{{ end }}
{{ with .data }}
{{ with index . "image/png" }}
<img src="data:image/png;base64,{{ . }}" alt="Notebook output image">
{{ end }}
{{ with index . "image/jpeg" }}
<img src="data:image/jpeg;base64,{{ . }}" alt="Notebook output image">
{{ end }}
{{ with index . "text/html" }}
{{ $html := "" }}
{{ range . }}{{ $html = print $html . }}{{ end }}
{{ $html | safeHTML }}
{{ end }}
{{ end }}
</div>
{{ end }}
</div>
{{ end }}
{{ end }}
{{ else }}
<p style="color: red;">Error: Notebook '{{ $notebookFile }}' not found in page resources.</p>
{{ end }}

View File

@@ -1,49 +0,0 @@
{{/*
snippet.html
Usage:
{{< regionInclude "filename.md" "region_name" >}}
{{< regionInclude "filename.python" "region_name" "python" >}}
*/}}
{{ $file := .Get 0 }}
{{ $region := .Get 1 }}
{{ $lang := .Get 2 | default "text" }}
{{ $path := printf "%s%s" .Page.File.Dir $file }}
{{ if or (not $file) (eq $file "") }}
{{ errorf "The file parameter (first argument) is required and must be non-empty in %s" .Page.File.Path }}
{{ end }}
{{ if or (not $region) (eq $region "") }}
{{ errorf "The region parameter (second argument) is required and must be non-empty in %s" .Page.File.Path }}
{{ end }}
{{ if not (fileExists $path) }}
{{ errorf "File %q not found (referenced in %s)" $path .Page.File.Path }}
{{ end }}
{{ $content := readFile $path }}
{{ $start_tag := printf "[START %s]" $region }}
{{ $end_tag := printf "[END %s]" $region }}
{{ $snippet := "" }}
{{ $in_snippet := false }}
{{ range split $content "\n" }}
{{ if $in_snippet }}
{{ if in . $end_tag }}
{{ $in_snippet = false }}
{{ else }}
{{ $snippet = printf "%s%s\n" $snippet . }}
{{ end }}
{{ else if in . $start_tag }}
{{ $in_snippet = true }}
{{ end }}
{{ end }}
{{ if eq (trim $snippet "") "" }}
{{ errorf "Region %q not found or empty in file %s (referenced in %s)" $region $file .Page.File.Path }}
{{ end }}
{{ if eq $lang "text" }}
{{ $snippet | markdownify }}
{{ else }}
{{ highlight (trim $snippet "\n") $lang "" }}
{{ end }}

View File

@@ -1,62 +1,6 @@
# Changelog
## [0.13.0](https://github.com/googleapis/genai-toolbox/compare/v0.12.0...v0.13.0) (2025-08-27)
### ⚠ BREAKING CHANGES
* **prebuilt/alloydb:** Add bearer token support for alloydb-wait-for-operation ([#1183](https://github.com/googleapis/genai-toolbox/issues/1183))
### Features
* Add capability to set default for environment variable in config ([#1248](https://github.com/googleapis/genai-toolbox/issues/1248)) ([5bcd52e](https://github.com/googleapis/genai-toolbox/commit/5bcd52e7dcd0773ded723585f4abe29d044e1540))
* **firebird:** Add Firebird SQL 2.5+ source and tool ([#1011](https://github.com/googleapis/genai-toolbox/issues/1011)) ([4f6b806](https://github.com/googleapis/genai-toolbox/commit/4f6b806de947efc4e12bdb50dff7781aedb7b966))
* **oceanbase:** Add Oceanbase source and tool ([#895](https://github.com/googleapis/genai-toolbox/issues/895)) ([6fc4982](https://github.com/googleapis/genai-toolbox/commit/6fc49826d43f46c84028e752ebebddf3d94b3d13))
* **server/mcp:** Support `ping` mechanism ([#1178](https://github.com/googleapis/genai-toolbox/issues/1178)) ([5dcc66c](https://github.com/googleapis/genai-toolbox/commit/5dcc66c84fa72c75ec50a9ac5198018212ec2979))
* **server:** Fail-fast on environment variable substitution ([#1177](https://github.com/googleapis/genai-toolbox/issues/1177)) ([212aaba](https://github.com/googleapis/genai-toolbox/commit/212aaba74c8b431de8a5f7b9822a0af4afcaaa0e))
* **server:** Implement Tool call auth error propagation ([#1235](https://github.com/googleapis/genai-toolbox/issues/1235)) ([b94a021](https://github.com/googleapis/genai-toolbox/commit/b94a021ca11c6637cf8038449483b5e75f2012b3))
* **sources/bigquery:** Add support for user-credential passthrough ([#1067](https://github.com/googleapis/genai-toolbox/issues/1067)) ([650e2e2](https://github.com/googleapis/genai-toolbox/commit/650e2e26f51bff75ce66343f64944d0a89a58b69))
* **tool/looker:** Add support for `description` field in looker tool ([#1199](https://github.com/googleapis/genai-toolbox/issues/1199)) ([97f0dd2](https://github.com/googleapis/genai-toolbox/commit/97f0dd2acf26caf28ecad65abea8779c196a27f1))
* **tools/bigquery-ask-data-insights:** Add bigquery `ask-data-insights` tool ([#932](https://github.com/googleapis/genai-toolbox/issues/932)) ([7651357](https://github.com/googleapis/genai-toolbox/commit/7651357d424a2b6656d8b6818cebc5c8a86ed053))
* **tools/bigquery-forecast:** Add bigqueryforecast tool ([#1148](https://github.com/googleapis/genai-toolbox/issues/1148)) ([2ad0ccf](https://github.com/googleapis/genai-toolbox/commit/2ad0ccf83df542340087742468d6762f81eedee6))
* **tools/firestore-add-documents:** Add firestore-add-documents tool ([#1107](https://github.com/googleapis/genai-toolbox/issues/1107)) ([ee4a70a](https://github.com/googleapis/genai-toolbox/commit/ee4a70a0e82b346b07b5b4c60dfa060da2273f50))
* **tools/firestore-update-document:** Add firestore-update-document tool ([#1191](https://github.com/googleapis/genai-toolbox/issues/1191)) ([0010123](https://github.com/googleapis/genai-toolbox/commit/00101232a39c70288aac5715649c184858d351e3))
* **tools/looker:** Control over whether hidden objects are surfaced ([#1222](https://github.com/googleapis/genai-toolbox/issues/1222)) ([bc91559](https://github.com/googleapis/genai-toolbox/commit/bc91559cc4e5b20385b84cc562b624fabf7e47a8))
* **trino:** Add Trino source and tools ([#948](https://github.com/googleapis/genai-toolbox/issues/948)) ([7dd123b](https://github.com/googleapis/genai-toolbox/commit/7dd123b3d76b8eb2b74b5d960959c1f90684b37e))
### Bug Fixes
* **tools/looker:** Lookergetdashboards uses proper Authorized helper func ([#1255](https://github.com/googleapis/genai-toolbox/issues/1255)) ([00866bc](https://github.com/googleapis/genai-toolbox/commit/00866bc7fc33115c547213e60316ae889735fdbb))
* **tools/mongodb-find-one:** ProjectPayload unmarshaling ([#1167](https://github.com/googleapis/genai-toolbox/issues/1167)) ([8ea6a98](https://github.com/googleapis/genai-toolbox/commit/8ea6a98bd9096ba97722e5f807366887e864004f))
* **tools/mysql:** Fix encoded text for mysql ([#1161](https://github.com/googleapis/genai-toolbox/issues/1161)) ([a37cfa8](https://github.com/googleapis/genai-toolbox/commit/a37cfa841d151b9995d4fab73cfc5e4d30d2cc57)), closes [#840](https://github.com/googleapis/genai-toolbox/issues/840)
## [0.12.0](https://github.com/googleapis/genai-toolbox/compare/v0.11.0...v0.12.0) (2025-08-14)
### Features
* **prebuiltconfig:** Introduce additional parameter to limit context in list_tables ([#1151](https://github.com/googleapis/genai-toolbox/issues/1151)) ([497d3b1](https://github.com/googleapis/genai-toolbox/commit/497d3b126da252a4b59806ca2ca3c56e78efc13d))
* **prebuiltconfig/alloydb-admin:** Add list cluster, instance and users ([#1126](https://github.com/googleapis/genai-toolbox/issues/1126)) ([b42c139](https://github.com/googleapis/genai-toolbox/commit/b42c139158650fb1f3b696965e840c52e2016bf0))
* **prebuiltconfig/alloydb-admin:** Add tool to create user via Built in user type or IAM ([#1130](https://github.com/googleapis/genai-toolbox/issues/1130)) ([f5bcb9c](https://github.com/googleapis/genai-toolbox/commit/f5bcb9c755a2c1747d0beeda568b6217d7420e7a))
* **source/http:** Add User Agent to `http` invocations ([#1102](https://github.com/googleapis/genai-toolbox/issues/1102)) ([6f55b78](https://github.com/googleapis/genai-toolbox/commit/6f55b78e96b8c7aa9aca601cfae4d62f3e1eb42b))
* **sources/postgres:** Add support for `queryParams` ([#1047](https://github.com/googleapis/genai-toolbox/issues/1047)) ([7b57251](https://github.com/googleapis/genai-toolbox/commit/7b5725140279de21fece45e860945b7a7d23e7d0)), closes [#963](https://github.com/googleapis/genai-toolbox/issues/963)
* **tools/bigquery-execute-sql:** Add dry run support ([#1057](https://github.com/googleapis/genai-toolbox/issues/1057)) ([1cac9b5](https://github.com/googleapis/genai-toolbox/commit/1cac9b5b378153c7dc65ff3dfb4ebd852b715a10))
* **tools/dataplex-search-aspect-types:** Add support for `dataplex-search-aspect-types` tool ([#1061](https://github.com/googleapis/genai-toolbox/issues/1061)) ([d940187](https://github.com/googleapis/genai-toolbox/commit/d940187c851666cc201f519665fb4f2e1478465c))
* **tools/looker:** Add `looker-make-look` tool to create Looks ([#1099](https://github.com/googleapis/genai-toolbox/issues/1099)) ([61d9489](https://github.com/googleapis/genai-toolbox/commit/61d94893448f633a5f2b9d7f0744ab40704af824))
* **tools/looker:** Add visualizations to `query-url` tool ([#1090](https://github.com/googleapis/genai-toolbox/issues/1090)) ([5bf2758](https://github.com/googleapis/genai-toolbox/commit/5bf275846a268a8d305d6392fa4e8e79e365f00d))
* **tools/looker:** New Looker tools for dashboards ([#1118](https://github.com/googleapis/genai-toolbox/issues/1118)) ([42be3f5](https://github.com/googleapis/genai-toolbox/commit/42be3f550ceab34baf43fe2a246ded7a09cff8e3))
* **ui:** Add login with google button for automatic id token retrieval ([#1044](https://github.com/googleapis/genai-toolbox/issues/1044)) ([d91bdfc](https://github.com/googleapis/genai-toolbox/commit/d91bdfcbdcbf5fcae6e17770c88c5ffba4115d67))
### Bug Fixes
* Correct the capitalization of `map` manifests ([#1139](https://github.com/googleapis/genai-toolbox/issues/1139)) ([0b0457c](https://github.com/googleapis/genai-toolbox/commit/0b0457c8e6b78f53a2f1929c05d46fb31421fbca))
* Remove unnecessary fields from `map` parameter manifests ([#1138](https://github.com/googleapis/genai-toolbox/issues/1138)) ([fbe8c1a](https://github.com/googleapis/genai-toolbox/commit/fbe8c1a9c0f28797443bf9cb32d63bfbc1072881))
* **tools/looker:** Add authorized invocation feature to all Looker tools ([#1091](https://github.com/googleapis/genai-toolbox/issues/1091)) ([3b1cce7](https://github.com/googleapis/genai-toolbox/commit/3b1cce72e7ff4f6b3a0a31db0564dc45b8302caa))
* Update ui info log to reflect port ([#1125](https://github.com/googleapis/genai-toolbox/issues/1125)) ([6d691d5](https://github.com/googleapis/genai-toolbox/commit/6d691d582f18137de504d39f372c5104b7392bff))
## [0.11.0](https://github.com/googleapis/genai-toolbox/compare/v0.11.0...v0.11.0) (2025-08-05)
## [0.11.0](https://github.com/googleapis/genai-toolbox/compare/v0.10.0...v0.11.0) (2025-08-04)
### ⚠ BREAKING CHANGES
@@ -66,16 +10,12 @@
### Features
* Add DuckDB source and tool ([#879](https://github.com/googleapis/genai-toolbox/pull/879)) ([fd14933](https://github.com/googleapis/genai-toolbox/commit/fd149337e9fa8e912e8699962a7104d51cdffc5d))
* Add TiDB source and tool ([#829](https://github.com/googleapis/genai-toolbox/issues/829)) ([6eaf36a](https://github.com/googleapis/genai-toolbox/commit/6eaf36ac8505d523fa4f5a4ac3c97209fd688cef))
* Interactive web UI for Toolbox ([#1065](https://github.com/googleapis/genai-toolbox/issues/1065)) ([8749b03](https://github.com/googleapis/genai-toolbox/commit/8749b030035e65361047c4ead13dfacb8e9a9b59))
* **prebuiltconfigs/cloud-sql-postgres:** Introduce additional parameter to limit context in list tables ([#1062](https://github.com/googleapis/genai-toolbox/issues/1062)) ([c3a58e1](https://github.com/googleapis/genai-toolbox/commit/c3a58e1d1678dc14d8de5006511df597fd75faa3))
* **tools/looker-query-url:** Add support for `looker-query-url` tool ([#1015](https://github.com/googleapis/genai-toolbox/issues/1015)) ([327ddf0](https://github.com/googleapis/genai-toolbox/commit/327ddf0439058aa5ecd2c7ae8251fcde6aeff18c))
* **tools/dataplex-lookup-entry:** Add support for `dataplex-lookup-entry` tool ([#1009](https://github.com/googleapis/genai-toolbox/issues/1009)) ([5fa1660](https://github.com/googleapis/genai-toolbox/commit/5fa1660fc8631989b4d13abea205b6426bb506a5))
### Bug Fixes
* **tools/bigquery,mssql,mysql,postgres,spanner,tidb:** Add query logging to execute-sql tools ([#1069](https://github.com/googleapis/genai-toolbox/issues/1069)) ([0527532]([https://github.com/googleapis/genai-toolbox/commit/0527532bd7085ef9eb8f9c30f430a2f2f35cef32))
## [0.10.0](https://github.com/googleapis/genai-toolbox/compare/v0.9.0...v0.10.0) (2025-07-25)

View File

@@ -116,9 +116,7 @@ tools.
* **Add a test file** under a new directory `tests/newdb`.
* **Add pre-defined integration test suites** in the
`/tests/newdb/newdb_test.go` that are **required** to be run as long as your
code contains related features. Please check each test suites for the config
defaults, if your source require test suites config updates, please refer to
[config option](./tests/option.go):
code contains related features:
1. [RunToolGetTest][tool-get]: tests for the `GET` endpoint that returns the
tool's manifest.
@@ -137,7 +135,7 @@ tools.
parameters][temp-param-doc]. Only run this test if template
parameters apply to your tool.
* **Add the new database to the integration test workflow** in
* **Add the new database to the test config** in
[integration.cloudbuild.yaml](.ci/integration.cloudbuild.yaml).
[tool-get]:
@@ -179,43 +177,6 @@ and data.
#### 6. Submit a Pull Request
Submit a pull request to the repository with your changes. Be sure to include a
detailed description of your changes and any requests for long term testing
resources.
* **Title:** All pull request title should follow the formatting of
[Conventional
Commit](https://www.conventionalcommits.org/) guidelines: `<type>[optional
scope]: description`. For example, if you are adding a new field in postgres
source, the title should be `feat(source/postgres): add support for
"new-field" field in postgres source`.
Here are some commonly used `type` in this GitHub repo.
| **type** | **description** |
|-----------------|-------------------------------------------------------------------------------------------------------|
| Breaking Change | Anything with this type of a `!` after the type/scope introduces a breaking change. |
| feat | Adding a new feature to the codebase. |
| fix | Fixing a bug or typo in the codebase. This does not include fixing docs. |
| test | Changes made to test files. |
| ci | Changes made to the cicd configuration files or scripts. |
| docs | Documentation-related PRs, including fixes on docs. |
| chore | Other small tasks or updates that don't fall into any of the above types. |
| refactor | Change src code but unlike feat, there are no tests broke and no line lost coverage. |
| revert | Revert changes made in another commit. |
| style | Update src code, with only formatting and whitespace updates (e.g. code formatter or linter changes). |
Pull requests should always add scope whenever possible. The scope is
formatted as `<scope-type>/<scope-kind>` (e.g., `sources/postgres`, or
`tools/mssql-sql`).
Ideally, **each PR covers only one scope**, if this is
inevitable, multiple scopes can be seaparated with a comma (e.g.
`sources/postgres,sources/alloydbpg`). If the PR covers multiple `scope-type`
(such as adding a new database), you can disregard the `scope-type`, e.g.
`feat(new-db): adding support for new-db source and tool`.
* **PR Description:** PR description should **always** be included. It should
include a concise description of the changes, it's impact, along with a
summary of the solution. If the PR is related to a specific issue, the issue
number should be mentioned in the PR description (e.g. `Fixes #1`).
* **Submit a pull request** to the repository with your changes. Be sure to
include a detailed description of your changes and any requests for long term
testing resources.

View File

@@ -108,7 +108,7 @@ variables for each source.
* AlloyDB - setup in the test project
* AI Natural Language ([setup
instructions](https://cloud.google.com/alloydb/docs/ai/use-natural-language-generate-sql-queries))
has been configured for `alloydb-ai-nl` tool tests
has been configured for `alloydb-a`-nl` tool tests
* The Cloud Build service account is a user
* Bigtable - setup in the test project
* The Cloud Build service account is a user

View File

@@ -13,7 +13,7 @@
# limitations under the License.
# Use the latest stable golang 1.x to compile to a binary
FROM --platform=$BUILDPLATFORM golang:1 AS build
FROM --platform=$BUILDPLATFORM golang:1-bookworm AS build
WORKDIR /go/src/genai-toolbox
COPY . .
@@ -23,8 +23,11 @@ ARG TARGETARCH
ARG BUILD_TYPE="container.dev"
ARG COMMIT_SHA=""
RUN apt-get update && \
apt install -y clang && \
rm -rf /var/lib/apt/lists/*
RUN go get ./...
RUN CGO_ENABLED=0 GOOS=${TARGETOS} GOARCH=${TARGETARCH} \
RUN CGO_ENABLED=1 GOOS=${TARGETOS} GOARCH=${TARGETARCH} \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=container.${BUILD_TYPE} -X github.com/googleapis/genai-toolbox/cmd.commitSha=${COMMIT_SHA}"
# Final Stage
@@ -34,4 +37,4 @@ WORKDIR /app
COPY --from=build --chown=nonroot /go/src/genai-toolbox/genai-toolbox /toolbox
USER nonroot
ENTRYPOINT ["/toolbox"]
ENTRYPOINT ["/toolbox"]

View File

@@ -33,15 +33,12 @@ documentation](https://googleapis.github.io/genai-toolbox/).
- [Getting Started](#getting-started)
- [Installing the server](#installing-the-server)
- [Running the server](#running-the-server)
- [Homebrew Users](#homebrew-users)
- [Integrating your application](#integrating-your-application)
- [Configuration](#configuration)
- [Sources](#sources)
- [Tools](#tools)
- [Toolsets](#toolsets)
- [Versioning](#versioning)
- [Pre-1.0.0 Versioning](#pre-100-versioning)
- [Post-1.0.0 Versioning](#post-100-versioning)
- [Contributing](#contributing)
- [Community](#community)
@@ -117,7 +114,7 @@ To install Toolbox as a binary:
<!-- {x-release-please-start-version} -->
```sh
# see releases page for other versions
export VERSION=0.13.0
export VERSION=0.10.0
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
chmod +x toolbox
```
@@ -130,7 +127,7 @@ You can also install Toolbox as a container:
```sh
# see releases page for other versions
export VERSION=0.13.0
export VERSION=0.10.0
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
```
@@ -154,7 +151,7 @@ To install from source, ensure you have the latest version of
[Go installed](https://go.dev/doc/install), and then run the following command:
```sh
go install github.com/googleapis/genai-toolbox@v0.13.0
go install github.com/googleapis/genai-toolbox@v0.10.0
```
<!-- {x-release-please-end} -->
@@ -727,26 +724,12 @@ my_second_toolset = client.load_toolset("my_second_toolset")
## Versioning
This project uses [semantic versioning](https://semver.org/) (`MAJOR.MINOR.PATCH`).
Since the project is in a pre-release stage (version `0.x.y`), we follow the
standard conventions for initial development:
This project uses [semantic versioning](https://semver.org/), including a
`MAJOR.MINOR.PATCH` version number that increments with:
### Pre-1.0.0 Versioning
While the major version is `0`, the public API should be considered unstable.
The version will be incremented as follows:
- **`0.MINOR.PATCH`**: The **MINOR** version is incremented when we add
new functionality or make breaking, incompatible API changes.
- **`0.MINOR.PATCH`**: The **PATCH** version is incremented for
backward-compatible bug fixes.
### Post-1.0.0 Versioning
Once the project reaches a stable `1.0.0` release, the versioning will follow
the more common convention:
- **`MAJOR.MINOR.PATCH`**: Incremented for incompatible API changes.
- **`MAJOR.MINOR.PATCH`**: Incremented for new, backward-compatible functionality.
- **`MAJOR.MINOR.PATCH`**: Incremented for backward-compatible bug fixes.
- MAJOR version when we make incompatible API changes
- MINOR version when we add functionality in a backward compatible manner
- PATCH version when we make backward compatible bug fixes
The public API that this applies to is the CLI associated with Toolbox, the
interactions with official SDKs, and the definitions in the `tools.yaml` file.

View File

@@ -43,35 +43,25 @@ import (
// Import tool packages for side effect of registration
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydbainl"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryconversationalanalytics"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryforecast"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygetdatasetinfo"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygettableinfo"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylistdatasetids"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylisttableids"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigtable"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouseexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhousesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/couchbase"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexlookupentry"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchaspecttypes"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchentries"
_ "github.com/googleapis/genai-toolbox/internal/tools/dgraph"
_ "github.com/googleapis/genai-toolbox/internal/tools/firebird/firebirdexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/firebird/firebirdsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreadddocuments"
_ "github.com/googleapis/genai-toolbox/internal/tools/duckdbsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoredeletedocuments"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetdocuments"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetrules"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorelistcollections"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequerycollection"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreupdatedocument"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules"
_ "github.com/googleapis/genai-toolbox/internal/tools/http"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardelement"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdashboards"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdimensions"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetexplores"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetfilters"
@@ -79,8 +69,6 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmeasures"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmodels"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetparameters"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakedashboard"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakelook"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquery"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquerysql"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerqueryurl"
@@ -101,8 +89,6 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jcypher"
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jexecutecypher"
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema"
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbaseexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbasesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
_ "github.com/googleapis/genai-toolbox/internal/tools/redis"
@@ -111,8 +97,6 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlitesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinoexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinosql"
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/alloydbwaitforoperation"
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/wait"
_ "github.com/googleapis/genai-toolbox/internal/tools/valkey"
@@ -122,14 +106,13 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/sources/alloydbpg"
_ "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
_ "github.com/googleapis/genai-toolbox/internal/sources/bigtable"
_ "github.com/googleapis/genai-toolbox/internal/sources/clickhouse"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmssql"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmysql"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg"
_ "github.com/googleapis/genai-toolbox/internal/sources/couchbase"
_ "github.com/googleapis/genai-toolbox/internal/sources/dataplex"
_ "github.com/googleapis/genai-toolbox/internal/sources/dgraph"
_ "github.com/googleapis/genai-toolbox/internal/sources/firebird"
_ "github.com/googleapis/genai-toolbox/internal/sources/duckdb"
_ "github.com/googleapis/genai-toolbox/internal/sources/firestore"
_ "github.com/googleapis/genai-toolbox/internal/sources/http"
_ "github.com/googleapis/genai-toolbox/internal/sources/looker"
@@ -137,13 +120,11 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/sources/mssql"
_ "github.com/googleapis/genai-toolbox/internal/sources/mysql"
_ "github.com/googleapis/genai-toolbox/internal/sources/neo4j"
_ "github.com/googleapis/genai-toolbox/internal/sources/oceanbase"
_ "github.com/googleapis/genai-toolbox/internal/sources/postgres"
_ "github.com/googleapis/genai-toolbox/internal/sources/redis"
_ "github.com/googleapis/genai-toolbox/internal/sources/spanner"
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
_ "github.com/googleapis/genai-toolbox/internal/sources/tidb"
_ "github.com/googleapis/genai-toolbox/internal/sources/trino"
_ "github.com/googleapis/genai-toolbox/internal/sources/valkey"
)
@@ -242,13 +223,7 @@ func NewCommand(opts ...Option) *Command {
flags.BoolVar(&cmd.cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.")
flags.StringVar(&cmd.cfg.TelemetryOTLP, "telemetry-otlp", "", "Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318')")
flags.StringVar(&cmd.cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
// Fetch prebuilt tools sources to customize the help description
prebuiltHelp := fmt.Sprintf(
"Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. Allowed: '%s'.",
strings.Join(prebuiltconfigs.GetPrebuiltSources(), "', '"),
)
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", prebuiltHelp)
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", "Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. Allowed: 'alloydb-postgres-admin', alloydb-postgres', 'bigquery', 'cloud-sql-mysql', 'cloud-sql-postgres', 'cloud-sql-mssql', 'dataplex', 'firestore', 'looker', 'mssql', 'mysql', 'postgres', 'spanner', 'spanner-postgres'.")
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
@@ -268,40 +243,32 @@ type ToolsFile struct {
}
// parseEnv replaces environment variables ${ENV_NAME} with their values.
// also support ${ENV_NAME:default_value}.
func parseEnv(input string) (string, error) {
re := regexp.MustCompile(`\$\{(\w+)(:(\w*))?\}`)
func parseEnv(input string) string {
re := regexp.MustCompile(`\$\{(\w+)\}`)
var err error
output := re.ReplaceAllStringFunc(input, func(match string) string {
return re.ReplaceAllStringFunc(input, func(match string) string {
parts := re.FindStringSubmatch(match)
if len(parts) < 2 {
// technically shouldn't happen
return match
}
// extract the variable name
variableName := parts[1]
if value, found := os.LookupEnv(variableName); found {
return value
}
if parts[2] != "" {
return parts[3]
}
err = fmt.Errorf("environment variable not found: %q", variableName)
return ""
return match
})
return output, err
}
// parseToolsFile parses the provided yaml into appropriate configs.
func parseToolsFile(ctx context.Context, raw []byte) (ToolsFile, error) {
var toolsFile ToolsFile
// Replace environment variables if found
output, err := parseEnv(string(raw))
if err != nil {
return toolsFile, fmt.Errorf("error parsing environment variables: %s", err)
}
raw = []byte(output)
raw = []byte(parseEnv(string(raw)))
// Parse contents
err = yaml.UnmarshalContext(ctx, raw, &toolsFile, yaml.Strict())
err := yaml.UnmarshalContext(ctx, raw, &toolsFile, yaml.Strict())
if err != nil {
return toolsFile, err
}
@@ -837,7 +804,7 @@ func run(cmd *Command) error {
}
cmd.logger.InfoContext(ctx, "Server ready to serve!")
if cmd.cfg.UI {
cmd.logger.InfoContext(ctx, fmt.Sprintf("Toolbox UI is up and running at: http://%s:%d/ui", cmd.cfg.Address, cmd.cfg.Port))
cmd.logger.InfoContext(ctx, "Toolbox UI is up and running at: http://localhost:5000/ui")
}
go func() {

View File

@@ -206,72 +206,6 @@ func TestServerConfigFlags(t *testing.T) {
}
}
func TestParseEnv(t *testing.T) {
tcs := []struct {
desc string
env map[string]string
in string
want string
err bool
errString string
}{
{
desc: "without default without env",
in: "${FOO}",
want: "",
err: true,
errString: `environment variable not found: "FOO"`,
},
{
desc: "without default with env",
env: map[string]string{
"FOO": "bar",
},
in: "${FOO}",
want: "bar",
},
{
desc: "with empty default",
in: "${FOO:}",
want: "",
},
{
desc: "with default",
in: "${FOO:bar}",
want: "bar",
},
{
desc: "with default with env",
env: map[string]string{
"FOO": "hello",
},
in: "${FOO:bar}",
want: "hello",
},
}
for _, tc := range tcs {
t.Run(tc.desc, func(t *testing.T) {
if tc.env != nil {
for k, v := range tc.env {
t.Setenv(k, v)
}
}
got, err := parseEnv(tc.in)
if tc.err {
if err == nil {
t.Fatalf("expected error not found")
}
if tc.errString != err.Error() {
t.Fatalf("incorrect error string: got %s, want %s", err, tc.errString)
}
}
if tc.want != got {
t.Fatalf("unexpected want: got %s, want %s", got, tc.want)
}
})
}
}
func TestToolFileFlag(t *testing.T) {
tcs := []struct {
desc string
@@ -886,14 +820,13 @@ func TestParseToolFileWithAuth(t *testing.T) {
func TestEnvVarReplacement(t *testing.T) {
ctx, err := testutils.ContextWithNewLogger()
t.Setenv("TestHeader", "ACTUAL_HEADER")
t.Setenv("API_KEY", "ACTUAL_API_KEY")
t.Setenv("clientId", "ACTUAL_CLIENT_ID")
t.Setenv("clientId2", "ACTUAL_CLIENT_ID_2")
t.Setenv("toolset_name", "ACTUAL_TOOLSET_NAME")
t.Setenv("cat_string", "cat")
t.Setenv("food_string", "food")
t.Setenv("TestHeader", "ACTUAL_HEADER")
os.Setenv("TestHeader", "ACTUAL_HEADER")
os.Setenv("API_KEY", "ACTUAL_API_KEY")
os.Setenv("clientId", "ACTUAL_CLIENT_ID")
os.Setenv("clientId2", "ACTUAL_CLIENT_ID_2")
os.Setenv("toolset_name", "ACTUAL_TOOLSET_NAME")
os.Setenv("cat_string", "cat")
os.Setenv("food_string", "food")
if err != nil {
t.Fatalf("unexpected error: %s", err)
@@ -1228,11 +1161,9 @@ func TestSingleEdit(t *testing.T) {
}
func TestPrebuiltTools(t *testing.T) {
// Get prebuilt configs
alloydb_admin_config, _ := prebuiltconfigs.Get("alloydb-postgres-admin")
alloydb_config, _ := prebuiltconfigs.Get("alloydb-postgres")
bigquery_config, _ := prebuiltconfigs.Get("bigquery")
clickhouse_config, _ := prebuiltconfigs.Get("clickhouse")
cloudsqlpg_config, _ := prebuiltconfigs.Get("cloud-sql-postgres")
cloudsqlmysql_config, _ := prebuiltconfigs.Get("cloud-sql-mysql")
cloudsqlmssql_config, _ := prebuiltconfigs.Get("cloud-sql-mssql")
@@ -1244,80 +1175,6 @@ func TestPrebuiltTools(t *testing.T) {
postgresconfig, _ := prebuiltconfigs.Get("postgres")
spanner_config, _ := prebuiltconfigs.Get("spanner")
spannerpg_config, _ := prebuiltconfigs.Get("spanner-postgres")
// Set environment variables
t.Setenv("API_KEY", "your_api_key")
t.Setenv("BIGQUERY_PROJECT", "your_gcp_project_id")
t.Setenv("DATAPLEX_PROJECT", "your_gcp_project_id")
t.Setenv("FIRESTORE_PROJECT", "your_gcp_project_id")
t.Setenv("FIRESTORE_DATABASE", "your_firestore_db_name")
t.Setenv("SPANNER_PROJECT", "your_gcp_project_id")
t.Setenv("SPANNER_INSTANCE", "your_spanner_instance")
t.Setenv("SPANNER_DATABASE", "your_spanner_db")
t.Setenv("ALLOYDB_POSTGRES_PROJECT", "your_gcp_project_id")
t.Setenv("ALLOYDB_POSTGRES_REGION", "your_gcp_region")
t.Setenv("ALLOYDB_POSTGRES_CLUSTER", "your_alloydb_cluster")
t.Setenv("ALLOYDB_POSTGRES_INSTANCE", "your_alloydb_instance")
t.Setenv("ALLOYDB_POSTGRES_DATABASE", "your_alloydb_db")
t.Setenv("ALLOYDB_POSTGRES_USER", "your_alloydb_user")
t.Setenv("ALLOYDB_POSTGRES_PASSWORD", "your_alloydb_password")
t.Setenv("CLICKHOUSE_PROTOCOL", "your_clickhouse_protocol")
t.Setenv("CLICKHOUSE_DATABASE", "your_clickhouse_database")
t.Setenv("CLICKHOUSE_PASSWORD", "your_clickhouse_password")
t.Setenv("CLICKHOUSE_USER", "your_clickhouse_user")
t.Setenv("CLICKHOUSE_HOST", "your_clickhosue_host")
t.Setenv("CLICKHOUSE_PORT", "8123")
t.Setenv("CLOUD_SQL_POSTGRES_PROJECT", "your_pg_project")
t.Setenv("CLOUD_SQL_POSTGRES_INSTANCE", "your_pg_instance")
t.Setenv("CLOUD_SQL_POSTGRES_DATABASE", "your_pg_db")
t.Setenv("CLOUD_SQL_POSTGRES_REGION", "your_pg_region")
t.Setenv("CLOUD_SQL_POSTGRES_USER", "your_pg_user")
t.Setenv("CLOUD_SQL_POSTGRES_PASS", "your_pg_pass")
t.Setenv("CLOUD_SQL_MYSQL_PROJECT", "your_gcp_project_id")
t.Setenv("CLOUD_SQL_MYSQL_REGION", "your_gcp_region")
t.Setenv("CLOUD_SQL_MYSQL_INSTANCE", "your_instance")
t.Setenv("CLOUD_SQL_MYSQL_DATABASE", "your_cloudsql_mysql_db")
t.Setenv("CLOUD_SQL_MYSQL_USER", "your_cloudsql_mysql_user")
t.Setenv("CLOUD_SQL_MYSQL_PASSWORD", "your_cloudsql_mysql_password")
t.Setenv("CLOUD_SQL_MSSQL_PROJECT", "your_gcp_project_id")
t.Setenv("CLOUD_SQL_MSSQL_REGION", "your_gcp_region")
t.Setenv("CLOUD_SQL_MSSQL_INSTANCE", "your_cloudsql_mssql_instance")
t.Setenv("CLOUD_SQL_MSSQL_DATABASE", "your_cloudsql_mssql_db")
t.Setenv("CLOUD_SQL_MSSQL_IP_ADDRESS", "127.0.0.1")
t.Setenv("CLOUD_SQL_MSSQL_USER", "your_cloudsql_mssql_user")
t.Setenv("CLOUD_SQL_MSSQL_PASSWORD", "your_cloudsql_mssql_password")
t.Setenv("CLOUD_SQL_POSTGRES_PASSWORD", "your_cloudsql_pg_password")
t.Setenv("POSTGRES_HOST", "localhost")
t.Setenv("POSTGRES_PORT", "5432")
t.Setenv("POSTGRES_DATABASE", "your_postgres_db")
t.Setenv("POSTGRES_USER", "your_postgres_user")
t.Setenv("POSTGRES_PASSWORD", "your_postgres_password")
t.Setenv("MYSQL_HOST", "localhost")
t.Setenv("MYSQL_PORT", "3306")
t.Setenv("MYSQL_DATABASE", "your_mysql_db")
t.Setenv("MYSQL_USER", "your_mysql_user")
t.Setenv("MYSQL_PASSWORD", "your_mysql_password")
t.Setenv("MSSQL_HOST", "localhost")
t.Setenv("MSSQL_PORT", "1433")
t.Setenv("MSSQL_DATABASE", "your_mssql_db")
t.Setenv("MSSQL_USER", "your_mssql_user")
t.Setenv("MSSQL_PASSWORD", "your_mssql_password")
t.Setenv("LOOKER_BASE_URL", "https://your_company.looker.com")
t.Setenv("LOOKER_CLIENT_ID", "your_looker_client_id")
t.Setenv("LOOKER_CLIENT_SECRET", "your_looker_client_secret")
t.Setenv("LOOKER_VERIFY_SSL", "true")
ctx, err := testutils.ContextWithNewLogger()
if err != nil {
t.Fatalf("unexpected error: %s", err)
@@ -1333,7 +1190,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"alloydb-postgres-admin-tools": tools.ToolsetConfig{
Name: "alloydb-postgres-admin-tools",
ToolNames: []string{"alloydb-create-cluster", "alloydb-operations-get", "alloydb-create-instance", "alloydb-list-clusters", "alloydb-list-instances", "alloydb-list-users", "alloydb-create-user"},
ToolNames: []string{"alloydb-create-cluster", "alloydb-operations-get", "alloydb-create-instance"},
},
},
},
@@ -1353,17 +1210,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"bigquery-database-tools": tools.ToolsetConfig{
Name: "bigquery-database-tools",
ToolNames: []string{"ask_data_insights", "execute_sql", "forecast", "get_dataset_info", "get_table_info", "list_dataset_ids", "list_table_ids"},
},
},
},
{
name: "clickhouse prebuilt tools",
in: clickhouse_config,
wantToolset: server.ToolsetConfigs{
"clickhouse-database-tools": tools.ToolsetConfig{
Name: "clickhouse-database-tools",
ToolNames: []string{"execute_sql"},
ToolNames: []string{"execute_sql", "get_dataset_info", "get_table_info", "list_dataset_ids", "list_table_ids"},
},
},
},
@@ -1403,7 +1250,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"dataplex-tools": tools.ToolsetConfig{
Name: "dataplex-tools",
ToolNames: []string{"dataplex_search_entries", "dataplex_lookup_entry", "dataplex_search_aspect_types"},
ToolNames: []string{"dataplex_search_entries", "dataplex_lookup_entry"},
},
},
},
@@ -1413,7 +1260,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"firestore-database-tools": tools.ToolsetConfig{
Name: "firestore-database-tools",
ToolNames: []string{"firestore-get-documents", "firestore-add-documents", "firestore-update-document", "firestore-list-collections", "firestore-delete-documents", "firestore-query-collection", "firestore-get-rules", "firestore-validate-rules"},
ToolNames: []string{"firestore-get-documents", "firestore-list-collections", "firestore-delete-documents", "firestore-query-collection", "firestore-get-rules", "firestore-validate-rules"},
},
},
},
@@ -1443,7 +1290,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"looker-tools": tools.ToolsetConfig{
Name: "looker-tools",
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look", "make_look", "get_dashboards", "make_dashboard", "add_dashboard_element"},
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look"},
},
},
},

View File

@@ -1 +1 @@
0.13.0
0.11.0

View File

@@ -7,11 +7,11 @@ description: Frequently asked questions about Toolbox.
## How can I deploy or run Toolbox?
MCP Toolbox for Databases is open-source and can be run or deployed to a
MCP Toolbox for Databases is open-source and can be ran or deployed to a
multitude of environments. For convenience, we release [compiled binaries and
docker images][release-notes] (but you can always compile yourself as well!).
For detailed instructions, check out these resources:
For detailed instructions, check our these resources:
- [Quickstart: How to Run Locally](../getting-started/local_quickstart.md)
- [Deploy to Cloud Run](../how-to/deploy_toolbox.md)

View File

@@ -234,7 +234,7 @@
},
"outputs": [],
"source": [
"version = \"0.13.0\" # x-release-please-version\n",
"version = \"0.11.0\" # x-release-please-version\n",
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
"\n",
"# Make the binary executable\n",

View File

@@ -22,11 +22,6 @@ etc., you could use environment variables instead with the format `${ENV_NAME}`.
user: ${USER_NAME}
password: ${PASSWORD}
```
A default value can be specified like `${ENV_NAME:default}`.
```yaml
port: ${DB_PORT:3306}
```
### Sources
@@ -50,7 +45,7 @@ For more details on configuring different types of sources, see the
### Tools
The `tools` section of your `tools.yaml` defines the actions your agent can
The `tools` section of your `tools.yaml` define your the actions your agent can
take: what kind of tool it is, which source(s) it affects, what parameters it
uses, etc.
@@ -81,7 +76,7 @@ toolsets:
my_first_toolset:
- my_first_tool
- my_second_tool
my_second_toolset:
my_second_toolset:
- my_second_tool
- my_third_tool
```

View File

@@ -86,7 +86,7 @@ To install Toolbox as a binary:
```sh
# see releases page for other versions
export VERSION=0.13.0
export VERSION=0.10.0
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
chmod +x toolbox
```
@@ -97,7 +97,7 @@ You can also install Toolbox as a container:
```sh
# see releases page for other versions
export VERSION=0.13.0
export VERSION=0.10.0
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
```
@@ -115,7 +115,7 @@ To install from source, ensure you have the latest version of
[Go installed](https://go.dev/doc/install), and then run the following command:
```sh
go install github.com/googleapis/genai-toolbox@v0.13.0
go install github.com/googleapis/genai-toolbox@v0.10.0
```
{{% /tab %}}
@@ -139,7 +139,7 @@ Toolbox enables dynamic reloading by default. To disable, use the
#### Launching Toolbox UI
To launch Toolbox's interactive UI, use the `--ui` flag. This allows you to test tools and toolsets
with features such as authorized parameters. To learn more, visit [Toolbox UI](../../how-to/toolbox-ui/index.md).
with features such as authorized parameters. To learn more, visit [Toolbox UI](../../how-to/use-toolbox-ui/index.md).
```sh
./toolbox --ui

View File

@@ -18,13 +18,265 @@ This guide assumes you have already done the following:
1. Installed [PostgreSQL 16+ and the `psql` client][install-postgres].
### Cloud Setup (Optional)
{{< regionInclude "quickstart/shared/cloud_setup.md" "cloud_setup" >}}
If you plan to use **Google Clouds Vertex AI** with your agent (e.g., using
`vertexai=True` or a Google GenAI model), follow these one-time setup steps for
local development:
1. [Install the Google Cloud CLI](https://cloud.google.com/sdk/docs/install)
1. [Set up Application Default Credentials (ADC)](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment)
1. Set your project and enable Vertex AI
```bash
gcloud config set project YOUR_PROJECT_ID
gcloud services enable aiplatform.googleapis.com
```
[install-python]: https://wiki.python.org/moin/BeginnersGuide/Download
[install-pip]: https://pip.pypa.io/en/stable/installation/
[install-venv]: https://packaging.python.org/en/latest/tutorials/installing-packages/#creating-virtual-environments
[install-postgres]: https://www.postgresql.org/download/
## Step 1: Set up your database
{{< regionInclude "quickstart/shared/database_setup.md" "database_setup" >}}
In this section, we will create a database, insert some data that needs to be
accessed by our agent, and create a database user for Toolbox to connect with.
1. Connect to postgres using the `psql` command:
```bash
psql -h 127.0.0.1 -U postgres
```
Here, `postgres` denotes the default postgres superuser.
{{< notice info >}}
#### **Having trouble connecting?**
* **Password Prompt:** If you are prompted for a password for the `postgres`
user and do not know it (or a blank password doesn't work), your PostgreSQL
installation might require a password or a different authentication method.
* **`FATAL: role "postgres" does not exist`:** This error means the default
`postgres` superuser role isn't available under that name on your system.
* **`Connection refused`:** Ensure your PostgreSQL server is actually running.
You can typically check with `sudo systemctl status postgresql` and start it
with `sudo systemctl start postgresql` on Linux systems.
<br/>
#### **Common Solution**
For password issues or if the `postgres` role seems inaccessible directly, try
switching to the `postgres` operating system user first. This user often has
permission to connect without a password for local connections (this is called
peer authentication).
```bash
sudo -i -u postgres
psql -h 127.0.0.1
```
Once you are in the `psql` shell using this method, you can proceed with the
database creation steps below. Afterwards, type `\q` to exit `psql`, and then
`exit` to return to your normal user shell.
If desired, once connected to `psql` as the `postgres` OS user, you can set a
password for the `postgres` *database* user using: `ALTER USER postgres WITH
PASSWORD 'your_chosen_password';`. This would allow direct connection with `-U
postgres` and a password next time.
{{< /notice >}}
1. Create a new database and a new user:
{{< notice tip >}}
For a real application, it's best to follow the principle of least permission
and only grant the privileges your application needs.
{{< /notice >}}
```sql
CREATE USER toolbox_user WITH PASSWORD 'my-password';
CREATE DATABASE toolbox_db;
GRANT ALL PRIVILEGES ON DATABASE toolbox_db TO toolbox_user;
ALTER DATABASE toolbox_db OWNER TO toolbox_user;
```
1. End the database session:
```bash
\q
```
(If you used `sudo -i -u postgres` and then `psql`, remember you might also
need to type `exit` after `\q` to leave the `postgres` user's shell
session.)
1. Connect to your database with your new user:
```bash
psql -h 127.0.0.1 -U toolbox_user -d toolbox_db
```
1. Create a table using the following command:
```sql
CREATE TABLE hotels(
id INTEGER NOT NULL PRIMARY KEY,
name VARCHAR NOT NULL,
location VARCHAR NOT NULL,
price_tier VARCHAR NOT NULL,
checkin_date DATE NOT NULL,
checkout_date DATE NOT NULL,
booked BIT NOT NULL
);
```
1. Insert data into the table.
```sql
INSERT INTO hotels(id, name, location, price_tier, checkin_date, checkout_date, booked)
VALUES
(1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-22', '2024-04-20', B'0'),
(2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', B'0'),
(3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', B'0'),
(4, 'Radisson Blu Lucerne', 'Lucerne', 'Midscale', '2024-04-24', '2024-04-05', B'0'),
(5, 'Best Western Bern', 'Bern', 'Upper Midscale', '2024-04-23', '2024-04-01', B'0'),
(6, 'InterContinental Geneva', 'Geneva', 'Luxury', '2024-04-23', '2024-04-28', B'0'),
(7, 'Sheraton Zurich', 'Zurich', 'Upper Upscale', '2024-04-27', '2024-04-02', B'0'),
(8, 'Holiday Inn Basel', 'Basel', 'Upper Midscale', '2024-04-24', '2024-04-09', B'0'),
(9, 'Courtyard Zurich', 'Zurich', 'Upscale', '2024-04-03', '2024-04-13', B'0'),
(10, 'Comfort Inn Bern', 'Bern', 'Midscale', '2024-04-04', '2024-04-16', B'0');
```
1. End the database session:
```bash
\q
```
## Step 2: Install and configure Toolbox
{{< regionInclude "quickstart/shared/configure_toolbox.md" "configure_toolbox" >}}
In this section, we will download Toolbox, configure our tools in a
`tools.yaml`, and then run the Toolbox server.
1. Download the latest version of Toolbox as a binary:
{{< notice tip >}}
Select the
[correct binary](https://github.com/googleapis/genai-toolbox/releases)
corresponding to your OS and CPU architecture.
{{< /notice >}}
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/$OS/toolbox
```
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Write the following into a `tools.yaml` file. Be sure to update any fields
such as `user`, `password`, or `database` that you may have customized in the
previous step.
{{< notice tip >}}
In practice, use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}
```yaml
sources:
my-pg-source:
kind: postgres
host: 127.0.0.1
port: 5432
database: toolbox_db
user: ${USER_NAME}
password: ${PASSWORD}
tools:
search-hotels-by-name:
kind: postgres-sql
source: my-pg-source
description: Search for hotels based on name.
parameters:
- name: name
type: string
description: The name of the hotel.
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
search-hotels-by-location:
kind: postgres-sql
source: my-pg-source
description: Search for hotels based on location.
parameters:
- name: location
type: string
description: The location of the hotel.
statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%';
book-hotel:
kind: postgres-sql
source: my-pg-source
description: >-
Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
parameters:
- name: hotel_id
type: string
description: The ID of the hotel to book.
statement: UPDATE hotels SET booked = B'1' WHERE id = $1;
update-hotel:
kind: postgres-sql
source: my-pg-source
description: >-
Update a hotel's check-in and check-out dates by its ID. Returns a message
indicating whether the hotel was successfully updated or not.
parameters:
- name: hotel_id
type: string
description: The ID of the hotel to update.
- name: checkin_date
type: string
description: The new check-in date of the hotel.
- name: checkout_date
type: string
description: The new check-out date of the hotel.
statement: >-
UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3
as date) WHERE id = $1;
cancel-hotel:
kind: postgres-sql
source: my-pg-source
description: Cancel a hotel by its ID.
parameters:
- name: hotel_id
type: string
description: The ID of the hotel to cancel.
statement: UPDATE hotels SET booked = B'0' WHERE id = $1;
toolsets:
my-toolset:
- search-hotels-by-name
- search-hotels-by-location
- book-hotel
- update-hotel
- cancel-hotel
```
For more info on tools, check out the `Resources` section of the docs.
1. Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
```bash
./toolbox --tools-file "tools.yaml"
```
{{< notice note >}}
Toolbox enables dynamic reloading by default. To disable, use the
`--disable-reload` flag.
{{< /notice >}}
## Step 3: Connect your agent to Toolbox
@@ -94,23 +346,305 @@ pip install google-genai
code to create an agent:
{{< tabpane persist=header >}}
{{< tab header="ADK" lang="python" >}}
from google.adk.agents import Agent
from google.adk.runners import Runner
from google.adk.sessions import InMemorySessionService
from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService
from google.genai import types
from toolbox_core import ToolboxSyncClient
{{< include "quickstart/python/adk/quickstart.py" >}}
import asyncio
import os
# TODO(developer): replace this with your Google API key
os.environ['GOOGLE_API_KEY'] = 'your-api-key'
async def main():
with ToolboxSyncClient("<http://127.0.0.1:5000>") as toolbox_client:
prompt = """
You're a helpful hotel assistant. You handle hotel searching, booking and
cancellations. When the user searches for a hotel, mention it's name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
"""
root_agent = Agent(
model='gemini-2.0-flash-001',
name='hotel_agent',
description='A helpful AI assistant.',
instruction=prompt,
tools=toolbox_client.load_toolset("my-toolset"),
)
session_service = InMemorySessionService()
artifacts_service = InMemoryArtifactService()
session = await session_service.create_session(
state={}, app_name='hotel_agent', user_id='123'
)
runner = Runner(
app_name='hotel_agent',
agent=root_agent,
artifact_service=artifacts_service,
session_service=session_service,
)
queries = [
"Find hotels in Basel with Basel in it's name.",
"Can you book the Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
]
for query in queries:
content = types.Content(role='user', parts=[types.Part(text=query)])
events = runner.run(session_id=session.id,
user_id='123', new_message=content)
responses = (
part.text
for event in events
for part in event.content.parts
if part.text is not None
)
for text in responses:
print(text)
asyncio.run(main())
{{< /tab >}}
{{< tab header="LangChain" lang="python" >}}
import asyncio
{{< include "quickstart/python/langchain/quickstart.py" >}}
from langgraph.prebuilt import create_react_agent
# TODO(developer): replace this with another import if needed
from langchain_google_vertexai import ChatVertexAI
# from langchain_google_genai import ChatGoogleGenerativeAI
# from langchain_anthropic import ChatAnthropic
from langgraph.checkpoint.memory import MemorySaver
from toolbox_langchain import ToolboxClient
prompt = """
You're a helpful hotel assistant. You handle hotel searching, booking and
cancellations. When the user searches for a hotel, mention it's name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
"""
queries = [
"Find hotels in Basel with Basel in it's name.",
"Can you book the Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
]
async def run_application():
# TODO(developer): replace this with another model if needed
model = ChatVertexAI(model_name="gemini-2.0-flash-001")
# model = ChatGoogleGenerativeAI(model="gemini-2.0-flash-001")
# model = ChatAnthropic(model="claude-3-5-sonnet-20240620")
# Load the tools from the Toolbox server
async with ToolboxClient("http://127.0.0.1:5000") as client:
tools = await client.aload_toolset()
agent = create_react_agent(model, tools, checkpointer=MemorySaver())
config = {"configurable": {"thread_id": "thread-1"}}
for query in queries:
inputs = {"messages": [("user", prompt + query)]}
response = agent.invoke(inputs, stream_mode="values", config=config)
print(response["messages"][-1].content)
asyncio.run(run_application())
{{< /tab >}}
{{< tab header="LlamaIndex" lang="python" >}}
import asyncio
import os
{{< include "quickstart/python/llamaindex/quickstart.py" >}}
from llama_index.core.agent.workflow import AgentWorkflow
from llama_index.core.workflow import Context
# TODO(developer): replace this with another import if needed
from llama_index.llms.google_genai import GoogleGenAI
# from llama_index.llms.anthropic import Anthropic
from toolbox_llamaindex import ToolboxClient
prompt = """
You're a helpful hotel assistant. You handle hotel searching, booking and
cancellations. When the user searches for a hotel, mention it's name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
"""
queries = [
"Find hotels in Basel with Basel in it's name.",
"Can you book the Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
]
async def run_application():
# TODO(developer): replace this with another model if needed
llm = GoogleGenAI(
model="gemini-2.0-flash-001",
vertexai_config={"project": "project-id", "location": "us-central1"},
)
# llm = GoogleGenAI(
# api_key=os.getenv("GOOGLE_API_KEY"),
# model="gemini-2.0-flash-001",
# )
# llm = Anthropic(
# model="claude-3-7-sonnet-latest",
# api_key=os.getenv("ANTHROPIC_API_KEY")
# )
# Load the tools from the Toolbox server
async with ToolboxClient("http://127.0.0.1:5000") as client:
tools = await client.aload_toolset()
agent = AgentWorkflow.from_tools_or_functions(
tools,
llm=llm,
system_prompt=prompt,
)
ctx = Context(agent)
for query in queries:
response = await agent.run(user_msg=query, ctx=ctx)
print(f"---- {query} ----")
print(str(response))
asyncio.run(run_application())
{{< /tab >}}
{{< tab header="Core" lang="python" >}}
import asyncio
{{< include "quickstart/python/core/quickstart.py" >}}
from google import genai
from google.genai.types import (
Content,
FunctionDeclaration,
GenerateContentConfig,
Part,
Tool,
)
from toolbox_core import ToolboxClient
prompt = """
You're a helpful hotel assistant. You handle hotel searching, booking and
cancellations. When the user searches for a hotel, mention it's name, id,
location and price tier. Always mention hotel id while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
"""
queries = [
"Find hotels in Basel with Basel in it's name.",
"Please book the hotel Hilton Basel for me.",
"This is too expensive. Please cancel it.",
"Please book Hyatt Regency for me",
"My check in dates for my booking would be from April 10, 2024 to April 19, 2024.",
]
async def run_application():
async with ToolboxClient("<http://127.0.0.1:5000>") as toolbox_client:
# The toolbox_tools list contains Python callables (functions/methods) designed for LLM tool-use
# integration. While this example uses Google's genai client, these callables can be adapted for
# various function-calling or agent frameworks. For easier integration with supported frameworks
# (https://github.com/googleapis/mcp-toolbox-python-sdk/tree/main/packages), use the
# provided wrapper packages, which handle framework-specific boilerplate.
toolbox_tools = await toolbox_client.load_toolset("my-toolset")
genai_client = genai.Client(
vertexai=True, project="project-id", location="us-central1"
)
genai_tools = [
Tool(
function_declarations=[
FunctionDeclaration.from_callable_with_api_option(callable=tool)
]
)
for tool in toolbox_tools
]
history = []
for query in queries:
user_prompt_content = Content(
role="user",
parts=[Part.from_text(text=query)],
)
history.append(user_prompt_content)
response = genai_client.models.generate_content(
model="gemini-2.0-flash-001",
contents=history,
config=GenerateContentConfig(
system_instruction=prompt,
tools=genai_tools,
),
)
history.append(response.candidates[0].content)
function_response_parts = []
for function_call in response.function_calls:
fn_name = function_call.name
# The tools are sorted alphabetically
if fn_name == "search-hotels-by-name":
function_result = await toolbox_tools[3](**function_call.args)
elif fn_name == "search-hotels-by-location":
function_result = await toolbox_tools[2](**function_call.args)
elif fn_name == "book-hotel":
function_result = await toolbox_tools[0](**function_call.args)
elif fn_name == "update-hotel":
function_result = await toolbox_tools[4](**function_call.args)
elif fn_name == "cancel-hotel":
function_result = await toolbox_tools[1](**function_call.args)
else:
raise ValueError("Function name not present.")
function_response = {"result": function_result}
function_response_part = Part.from_function_response(
name=function_call.name,
response=function_response,
)
function_response_parts.append(function_response_part)
if function_response_parts:
tool_response_content = Content(role="tool", parts=function_response_parts)
history.append(tool_response_content)
response2 = genai_client.models.generate_content(
model="gemini-2.0-flash-001",
contents=history,
config=GenerateContentConfig(
tools=genai_tools,
),
)
final_model_response_content = response2.candidates[0].content
history.append(final_model_response_content)
print(response2.text)
asyncio.run(run_application())
{{< /tab >}}
{{< /tabpane >}}

View File

@@ -13,17 +13,266 @@ This guide assumes you have already done the following:
1. Installed [Go (v1.24.2 or higher)].
1. Installed [PostgreSQL 16+ and the `psql` client][install-postgres].
### Cloud Setup (Optional)
If you plan to use **Google Clouds Vertex AI** with your agent (e.g., using
Gemini or PaLM models), follow these one-time setup steps:
1. [Install the Google Cloud CLI]
1. [Set up Application Default Credentials (ADC)]
1. Set your project and enable Vertex AI
```bash
gcloud config set project YOUR_PROJECT_ID
gcloud services enable aiplatform.googleapis.com
```
[Go (v1.24.2 or higher)]: https://go.dev/doc/install
[install-postgres]: https://www.postgresql.org/download/
### Cloud Setup (Optional)
{{< regionInclude "quickstart/shared/cloud_setup.md" "cloud_setup" >}}
[Install the Google Cloud CLI]: https://cloud.google.com/sdk/docs/install
[Set up Application Default Credentials (ADC)]:
https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment
## Step 1: Set up your database
{{< regionInclude "quickstart/shared/database_setup.md" "database_setup" >}}
In this section, we will create a database, insert some data that needs to be
accessed by our agent, and create a database user for Toolbox to connect with.
1. Connect to postgres using the `psql` command:
```bash
psql -h 127.0.0.1 -U postgres
```
Here, `postgres` denotes the default postgres superuser.
{{< notice info >}}
#### **Having trouble connecting?**
* **Password Prompt:** If you are prompted for a password for the `postgres`
user and do not know it (or a blank password doesn't work), your PostgreSQL
installation might require a password or a different authentication method.
* **`FATAL: role "postgres" does not exist`:** This error means the default
`postgres` superuser role isn't available under that name on your system.
* **`Connection refused`:** Ensure your PostgreSQL server is actually running.
You can typically check with `sudo systemctl status postgresql` and start it
with `sudo systemctl start postgresql` on Linux systems.
<br/>
#### **Common Solution**
For password issues or if the `postgres` role seems inaccessible directly, try
switching to the `postgres` operating system user first. This user often has
permission to connect without a password for local connections (this is called
peer authentication).
```bash
sudo -i -u postgres
psql -h 127.0.0.1
```
Once you are in the `psql` shell using this method, you can proceed with the
database creation steps below. Afterwards, type `\q` to exit `psql`, and then
`exit` to return to your normal user shell.
If desired, once connected to `psql` as the `postgres` OS user, you can set a
password for the `postgres` *database* user using: `ALTER USER postgres WITH
PASSWORD 'your_chosen_password';`. This would allow direct connection with `-U
postgres` and a password next time.
{{< /notice >}}
1. Create a new database and a new user:
{{< notice tip >}}
For a real application, it's best to follow the principle of least permission
and only grant the privileges your application needs.
{{< /notice >}}
```sql
CREATE USER toolbox_user WITH PASSWORD 'my-password';
CREATE DATABASE toolbox_db;
GRANT ALL PRIVILEGES ON DATABASE toolbox_db TO toolbox_user;
ALTER DATABASE toolbox_db OWNER TO toolbox_user;
```
1. End the database session:
```bash
\q
```
(If you used `sudo -i -u postgres` and then `psql`, remember you might also
need to type `exit` after `\q` to leave the `postgres` user's shell
session.)
1. Connect to your database with your new user:
```bash
psql -h 127.0.0.1 -U toolbox_user -d toolbox_db
```
1. Create a table using the following command:
```sql
CREATE TABLE hotels(
id INTEGER NOT NULL PRIMARY KEY,
name VARCHAR NOT NULL,
location VARCHAR NOT NULL,
price_tier VARCHAR NOT NULL,
checkin_date DATE NOT NULL,
checkout_date DATE NOT NULL,
booked BIT NOT NULL
);
```
1. Insert data into the table.
```sql
INSERT INTO hotels(id, name, location, price_tier, checkin_date, checkout_date, booked)
VALUES
(1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-22', '2024-04-20', B'0'),
(2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', B'0'),
(3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', B'0'),
(4, 'Radisson Blu Lucerne', 'Lucerne', 'Midscale', '2024-04-24', '2024-04-05', B'0'),
(5, 'Best Western Bern', 'Bern', 'Upper Midscale', '2024-04-23', '2024-04-01', B'0'),
(6, 'InterContinental Geneva', 'Geneva', 'Luxury', '2024-04-23', '2024-04-28', B'0'),
(7, 'Sheraton Zurich', 'Zurich', 'Upper Upscale', '2024-04-27', '2024-04-02', B'0'),
(8, 'Holiday Inn Basel', 'Basel', 'Upper Midscale', '2024-04-24', '2024-04-09', B'0'),
(9, 'Courtyard Zurich', 'Zurich', 'Upscale', '2024-04-03', '2024-04-13', B'0'),
(10, 'Comfort Inn Bern', 'Bern', 'Midscale', '2024-04-04', '2024-04-16', B'0');
```
1. End the database session:
```bash
\q
```
## Step 2: Install and configure Toolbox
{{< regionInclude "quickstart/shared/configure_toolbox.md" "configure_toolbox" >}}
In this section, we will download Toolbox, configure our tools in a
`tools.yaml`, and then run the Toolbox server.
1. Download the latest version of Toolbox as a binary:
{{< notice tip >}}
Select the
[correct binary](https://github.com/googleapis/genai-toolbox/releases)
corresponding to your OS and CPU architecture.
{{< /notice >}}
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/$OS/toolbox
```
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Write the following into a `tools.yaml` file. Be sure to update any fields
such as `user`, `password`, or `database` that you may have customized in the
previous step.
{{< notice tip >}}
In practice, use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}
```yaml
sources:
my-pg-source:
kind: postgres
host: 127.0.0.1
port: 5432
database: toolbox_db
user: ${USER_NAME}
password: ${PASSWORD}
tools:
search-hotels-by-name:
kind: postgres-sql
source: my-pg-source
description: Search for hotels based on name.
parameters:
- name: name
type: string
description: The name of the hotel.
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
search-hotels-by-location:
kind: postgres-sql
source: my-pg-source
description: Search for hotels based on location.
parameters:
- name: location
type: string
description: The location of the hotel.
statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%';
book-hotel:
kind: postgres-sql
source: my-pg-source
description: >-
Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
parameters:
- name: hotel_id
type: string
description: The ID of the hotel to book.
statement: UPDATE hotels SET booked = B'1' WHERE id = $1;
update-hotel:
kind: postgres-sql
source: my-pg-source
description: >-
Update a hotel's check-in and check-out dates by its ID. Returns a message
indicating whether the hotel was successfully updated or not.
parameters:
- name: hotel_id
type: string
description: The ID of the hotel to update.
- name: checkin_date
type: string
description: The new check-in date of the hotel.
- name: checkout_date
type: string
description: The new check-out date of the hotel.
statement: >-
UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3
as date) WHERE id = $1;
cancel-hotel:
kind: postgres-sql
source: my-pg-source
description: Cancel a hotel by its ID.
parameters:
- name: hotel_id
type: string
description: The ID of the hotel to cancel.
statement: UPDATE hotels SET booked = B'0' WHERE id = $1;
toolsets:
my-toolset:
- search-hotels-by-name
- search-hotels-by-location
- book-hotel
- update-hotel
- cancel-hotel
```
For more info on tools, check out the `Resources` section of the docs.
1. Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
```bash
./toolbox --tools-file "tools.yaml"
```
{{< notice note >}}
Toolbox enables dynamic reloading by default. To disable, use the
`--disable-reload` flag.
{{< /notice >}}
## Step 3: Connect your agent to Toolbox
@@ -522,7 +771,6 @@ import (
"context"
"encoding/json"
"log"
"fmt
"github.com/googleapis/mcp-toolbox-sdk-go/core"
openai "github.com/openai/openai-go"
@@ -624,7 +872,7 @@ func main() {
log.Println("No function call")
}
// If there was a function call, continue the conversation
// If there is a was a function call, continue the conversation
params.Messages = append(params.Messages, completion.Choices[0].Message.ToParam())
for _, toolCall := range toolCalls {
@@ -652,7 +900,7 @@ func main() {
params.Messages = append(params.Messages, openai.AssistantMessage(query))
fmt.println("\n", completion.Choices[0].Message.Content)
println("\n", completion.Choices[0].Message.Content)
}

View File

@@ -3,7 +3,7 @@ title: "JS Quickstart (Local)"
type: docs
weight: 3
description: >
How to get started running Toolbox locally with [JavaScript](https://github.com/googleapis/mcp-toolbox-sdk-js), PostgreSQL, and orchestration frameworks such as [LangChain](https://js.langchain.com/docs/introduction/), [GenkitJS](https://genkit.dev/docs/get-started/), [LlamaIndex](https://ts.llamaindex.ai/) and [GoogleGenAI](https://github.com/googleapis/js-genai).
How to get started running Toolbox locally with [JavaScript](https://github.com/googleapis/mcp-toolbox-sdk-js), PostgreSQL, and orchestration frameworks such as [LangChain](https://js.langchain.com/docs/introduction/), [GenkitJS](https://genkit.dev/docs/get-started/), and [LlamaIndex](https://ts.llamaindex.ai/).
---
## Before you begin
@@ -13,17 +13,265 @@ This guide assumes you have already done the following:
1. Installed [Node.js (v18 or higher)].
1. Installed [PostgreSQL 16+ and the `psql` client][install-postgres].
### Cloud Setup (Optional)
If you plan to use **Google Clouds Vertex AI** with your agent (e.g., using
Gemini or PaLM models), follow these one-time setup steps:
1. [Install the Google Cloud CLI]
1. [Set up Application Default Credentials (ADC)]
1. Set your project and enable Vertex AI
```bash
gcloud config set project YOUR_PROJECT_ID
gcloud services enable aiplatform.googleapis.com
```
[Node.js (v18 or higher)]: https://nodejs.org/
[install-postgres]: https://www.postgresql.org/download/
### Cloud Setup (Optional)
{{< regionInclude "quickstart/shared/cloud_setup.md" "cloud_setup" >}}
[Install the Google Cloud CLI]: https://cloud.google.com/sdk/docs/install
[Set up Application Default Credentials (ADC)]:
https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment
## Step 1: Set up your database
{{< regionInclude "quickstart/shared/database_setup.md" "database_setup" >}}
In this section, we will create a database, insert some data that needs to be
accessed by our agent, and create a database user for Toolbox to connect with.
1. Connect to postgres using the `psql` command:
```bash
psql -h 127.0.0.1 -U postgres
```
Here, `postgres` denotes the default postgres superuser.
{{< notice info >}}
#### **Having trouble connecting?**
* **Password Prompt:** If you are prompted for a password for the `postgres`
user and do not know it (or a blank password doesn't work), your PostgreSQL
installation might require a password or a different authentication method.
* **`FATAL: role "postgres" does not exist`:** This error means the default
`postgres` superuser role isn't available under that name on your system.
* **`Connection refused`:** Ensure your PostgreSQL server is actually running.
You can typically check with `sudo systemctl status postgresql` and start it
with `sudo systemctl start postgresql` on Linux systems.
<br/>
#### **Common Solution**
For password issues or if the `postgres` role seems inaccessible directly, try
switching to the `postgres` operating system user first. This user often has
permission to connect without a password for local connections (this is called
peer authentication).
```bash
sudo -i -u postgres
psql -h 127.0.0.1
```
Once you are in the `psql` shell using this method, you can proceed with the
database creation steps below. Afterwards, type `\q` to exit `psql`, and then
`exit` to return to your normal user shell.
If desired, once connected to `psql` as the `postgres` OS user, you can set a
password for the `postgres` *database* user using: `ALTER USER postgres WITH
PASSWORD 'your_chosen_password';`. This would allow direct connection with `-U
postgres` and a password next time.
{{< /notice >}}
1. Create a new database and a new user:
{{< notice tip >}}
For a real application, it's best to follow the principle of least permission
and only grant the privileges your application needs.
{{< /notice >}}
```sql
CREATE USER toolbox_user WITH PASSWORD 'my-password';
CREATE DATABASE toolbox_db;
GRANT ALL PRIVILEGES ON DATABASE toolbox_db TO toolbox_user;
ALTER DATABASE toolbox_db OWNER TO toolbox_user;
```
1. End the database session:
```bash
\q
```
(If you used `sudo -i -u postgres` and then `psql`, remember you might also
need to type `exit` after `\q` to leave the `postgres` user's shell
session.)
1. Connect to your database with your new user:
```bash
psql -h 127.0.0.1 -U toolbox_user -d toolbox_db
```
1. Create a table using the following command:
```sql
CREATE TABLE hotels(
id INTEGER NOT NULL PRIMARY KEY,
name VARCHAR NOT NULL,
location VARCHAR NOT NULL,
price_tier VARCHAR NOT NULL,
checkin_date DATE NOT NULL,
checkout_date DATE NOT NULL,
booked BIT NOT NULL
);
```
1. Insert data into the table.
```sql
INSERT INTO hotels(id, name, location, price_tier, checkin_date, checkout_date, booked)
VALUES
(1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-22', '2024-04-20', B'0'),
(2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', B'0'),
(3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', B'0'),
(4, 'Radisson Blu Lucerne', 'Lucerne', 'Midscale', '2024-04-24', '2024-04-05', B'0'),
(5, 'Best Western Bern', 'Bern', 'Upper Midscale', '2024-04-23', '2024-04-01', B'0'),
(6, 'InterContinental Geneva', 'Geneva', 'Luxury', '2024-04-23', '2024-04-28', B'0'),
(7, 'Sheraton Zurich', 'Zurich', 'Upper Upscale', '2024-04-27', '2024-04-02', B'0'),
(8, 'Holiday Inn Basel', 'Basel', 'Upper Midscale', '2024-04-24', '2024-04-09', B'0'),
(9, 'Courtyard Zurich', 'Zurich', 'Upscale', '2024-04-03', '2024-04-13', B'0'),
(10, 'Comfort Inn Bern', 'Bern', 'Midscale', '2024-04-04', '2024-04-16', B'0');
```
1. End the database session:
```bash
\q
```
## Step 2: Install and configure Toolbox
{{< regionInclude "quickstart/shared/configure_toolbox.md" "configure_toolbox" >}}
In this section, we will download Toolbox, configure our tools in a
`tools.yaml`, and then run the Toolbox server.
1. Download the latest version of Toolbox as a binary:
{{< notice tip >}}
Select the
[correct binary](https://github.com/googleapis/genai-toolbox/releases)
corresponding to your OS and CPU architecture.
{{< /notice >}}
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/$OS/toolbox
```
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Write the following into a `tools.yaml` file. Be sure to update any fields
such as `user`, `password`, or `database` that you may have customized in the
previous step.
{{< notice tip >}}
In practice, use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}
```yaml
sources:
my-pg-source:
kind: postgres
host: 127.0.0.1
port: 5432
database: toolbox_db
user: ${USER_NAME}
password: ${PASSWORD}
tools:
search-hotels-by-name:
kind: postgres-sql
source: my-pg-source
description: Search for hotels based on name.
parameters:
- name: name
type: string
description: The name of the hotel.
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
search-hotels-by-location:
kind: postgres-sql
source: my-pg-source
description: Search for hotels based on location.
parameters:
- name: location
type: string
description: The location of the hotel.
statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%';
book-hotel:
kind: postgres-sql
source: my-pg-source
description: >-
Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
parameters:
- name: hotel_id
type: string
description: The ID of the hotel to book.
statement: UPDATE hotels SET booked = B'1' WHERE id = $1;
update-hotel:
kind: postgres-sql
source: my-pg-source
description: >-
Update a hotel's check-in and check-out dates by its ID. Returns a message
indicating whether the hotel was successfully updated or not.
parameters:
- name: hotel_id
type: string
description: The ID of the hotel to update.
- name: checkin_date
type: string
description: The new check-in date of the hotel.
- name: checkout_date
type: string
description: The new check-out date of the hotel.
statement: >-
UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3
as date) WHERE id = $1;
cancel-hotel:
kind: postgres-sql
source: my-pg-source
description: Cancel a hotel by its ID.
parameters:
- name: hotel_id
type: string
description: The ID of the hotel to cancel.
statement: UPDATE hotels SET booked = B'0' WHERE id = $1;
toolsets:
my-toolset:
- search-hotels-by-name
- search-hotels-by-location
- book-hotel
- update-hotel
- cancel-hotel
```
For more info on tools, check out the `Resources` section of the docs.
1. Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
```bash
./toolbox --tools-file "tools.yaml"
```
{{< notice note >}}
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
{{< /notice >}}
## Step 3: Connect your agent to Toolbox
@@ -46,17 +294,14 @@ from Toolbox.
{{< tabpane persist=header >}}
{{< tab header="LangChain" lang="bash" >}}
npm install langchain @langchain/google-genai
npm install langchain @langchain/google-vertexai
{{< /tab >}}
{{< tab header="GenkitJS" lang="bash" >}}
npm install genkit @genkit-ai/googleai
npm install genkit @genkit-ai/vertexai
{{< /tab >}}
{{< tab header="LlamaIndex" lang="bash" >}}
npm install llamaindex @llamaindex/google @llamaindex/workflow
{{< /tab >}}
{{< tab header="GoogleGenAI" lang="bash" >}}
npm install @google/genai
{{< /tab >}}
{{< /tabpane >}}
1. Create a new file named `hotelAgent.js` and copy the following code to create an agent:
@@ -64,7 +309,7 @@ npm install @google/genai
{{< tabpane persist=header >}}
{{< tab header="LangChain" lang="js" >}}
import { ChatGoogleGenerativeAI } from "@langchain/google-genai";
import { ChatVertexAI } from "@langchain/google-vertexai";
import { ToolboxClient } from "@toolbox-sdk/core";
import { tool } from "@langchain/core/tools";
import { createReactAgent } from "@langchain/langgraph/prebuilt";
@@ -90,8 +335,8 @@ const queries = [
"My check in dates would be from April 10, 2024 to April 19, 2024.",
];
async function main() {
const model = new ChatGoogleGenerativeAI({
async function runApplication() {
const model = new ChatVertexAI({
model: "gemini-2.0-flash",
});
@@ -137,7 +382,9 @@ async function main() {
}
}
main();
runApplication()
.catch(console.error)
.finally(() => console.log("\nApplication finished."));
{{< /tab >}}
@@ -167,7 +414,7 @@ const queries = [
"My check in dates would be from April 10, 2024 to April 19, 2024.",
];
async function main() {
async function run() {
const toolboxClient = new ToolboxClient("http://127.0.0.1:5000");
const ai = genkit({
@@ -231,7 +478,7 @@ async function main() {
}
}
main();
run();
{{< /tab >}}
{{< tab header="LlamaIndex" lang="js" >}}
@@ -318,128 +565,6 @@ main();
{{< /tab >}}
{{< tab header="GoogleGenAI" lang="js" >}}
import { GoogleGenAI } from "@google/genai";
import { ToolboxClient } from "@toolbox-sdk/core";
const TOOLBOX_URL = "http://127.0.0.1:5000"; // Update if needed
const GOOGLE_API_KEY = 'enter your api here'; // Replace it with your API key
const prompt = `
You're a helpful hotel assistant. You handle hotel searching, booking, and
cancellations. When the user searches for a hotel, you MUST use the available tools to find information. Mention its name, id,
location and price tier. Always mention hotel id while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
`;
const queries = [
"Find hotels in Basel with Basel in its name.",
"Can you book the Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
];
function mapZodTypeToOpenAPIType(zodTypeName) {
console.log(zodTypeName)
const typeMap = {
'ZodString': 'string',
'ZodNumber': 'number',
'ZodBoolean': 'boolean',
'ZodArray': 'array',
'ZodObject': 'object',
};
return typeMap[zodTypeName] || 'string';
}
async function main() {
const toolboxClient = new ToolboxClient(TOOLBOX_URL);
const toolboxTools = await toolboxClient.loadToolset("my-toolset");
const geminiTools = [{
functionDeclarations: toolboxTools.map(tool => {
const schema = tool.getParamSchema();
const properties = {};
const required = [];
for (const [key, param] of Object.entries(schema.shape)) {
properties[key] = {
type: mapZodTypeToOpenAPIType(param.constructor.name),
description: param.description || '',
};
required.push(key)
}
return {
name: tool.getName(),
description: tool.getDescription(),
parameters: { type: 'object', properties, required },
};
})
}];
const genAI = new GoogleGenAI({ apiKey: GOOGLE_API_KEY });
const chat = genAI.chats.create({
model: "gemini-2.5-flash",
config: {
systemInstruction: prompt,
tools: geminiTools,
}
});
for (const query of queries) {
let currentResult = await chat.sendMessage({ message: query });
let finalResponseGiven = false
while (!finalResponseGiven) {
const response = currentResult;
const functionCalls = response.functionCalls || [];
if (functionCalls.length === 0) {
console.log(response.text)
finalResponseGiven = true;
} else {
const toolResponses = [];
for (const call of functionCalls) {
const toolName = call.name
const toolToExecute = toolboxTools.find(t => t.getName() === toolName);
if (toolToExecute) {
try {
const functionResult = await toolToExecute(call.args);
toolResponses.push({
functionResponse: { name: call.name, response: { result: functionResult } }
});
} catch (e) {
console.error(`Error executing tool '${toolName}':`, e);
toolResponses.push({
functionResponse: { name: call.name, response: { error: e.message } }
});
}
}
}
currentResult = await chat.sendMessage({ message: toolResponses });
}
}
}
}
main();
{{< /tab >}}
{{< /tabpane >}}
1. Run your agent, and observe the results:

View File

@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/$OS/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/$OS/toolbox
```
<!-- {x-release-please-end} -->

View File

@@ -1,70 +0,0 @@
from google.adk.agents import Agent
from google.adk.runners import Runner
from google.adk.sessions import InMemorySessionService
from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService
from google.genai import types
from toolbox_core import ToolboxSyncClient
import asyncio
import os
# TODO(developer): replace this with your Google API key
os.environ['GOOGLE_API_KEY'] = 'your-api-key'
async def main():
with ToolboxSyncClient("http://127.0.0.1:5000") as toolbox_client:
prompt = """
You're a helpful hotel assistant. You handle hotel searching, booking and
cancellations. When the user searches for a hotel, mention it's name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
"""
root_agent = Agent(
model='gemini-2.0-flash-001',
name='hotel_agent',
description='A helpful AI assistant.',
instruction=prompt,
tools=toolbox_client.load_toolset("my-toolset"),
)
session_service = InMemorySessionService()
artifacts_service = InMemoryArtifactService()
session = await session_service.create_session(
state={}, app_name='hotel_agent', user_id='123'
)
runner = Runner(
app_name='hotel_agent',
agent=root_agent,
artifact_service=artifacts_service,
session_service=session_service,
)
queries = [
"Find hotels in Basel with Basel in its name.",
"Can you book the Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
]
for query in queries:
content = types.Content(role='user', parts=[types.Part(text=query)])
events = runner.run(session_id=session.id,
user_id='123', new_message=content)
responses = (
part.text
for event in events
for part in event.content.parts
if part.text is not None
)
for text in responses:
print(text)
asyncio.run(main())

View File

@@ -1,108 +0,0 @@
import asyncio
from google import genai
from google.genai.types import (
Content,
FunctionDeclaration,
GenerateContentConfig,
Part,
Tool,
)
from toolbox_core import ToolboxClient
prompt = """
You're a helpful hotel assistant. You handle hotel searching, booking and
cancellations. When the user searches for a hotel, mention it's name, id,
location and price tier. Always mention hotel id while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
"""
queries = [
"Find hotels in Basel with Basel in its name.",
"Please book the hotel Hilton Basel for me.",
"This is too expensive. Please cancel it.",
"Please book Hyatt Regency for me",
"My check in dates for my booking would be from April 10, 2024 to April 19, 2024.",
]
async def main():
async with ToolboxClient("http://127.0.0.1:5000") as toolbox_client:
# The toolbox_tools list contains Python callables (functions/methods) designed for LLM tool-use
# integration. While this example uses Google's genai client, these callables can be adapted for
# various function-calling or agent frameworks. For easier integration with supported frameworks
# (https://github.com/googleapis/mcp-toolbox-python-sdk/tree/main/packages), use the
# provided wrapper packages, which handle framework-specific boilerplate.
toolbox_tools = await toolbox_client.load_toolset("my-toolset")
genai_client = genai.Client(
vertexai=True, project="project-id", location="us-central1"
)
genai_tools = [
Tool(
function_declarations=[
FunctionDeclaration.from_callable_with_api_option(callable=tool)
]
)
for tool in toolbox_tools
]
history = []
for query in queries:
user_prompt_content = Content(
role="user",
parts=[Part.from_text(text=query)],
)
history.append(user_prompt_content)
response = genai_client.models.generate_content(
model="gemini-2.0-flash-001",
contents=history,
config=GenerateContentConfig(
system_instruction=prompt,
tools=genai_tools,
),
)
history.append(response.candidates[0].content)
function_response_parts = []
for function_call in response.function_calls:
fn_name = function_call.name
# The tools are sorted alphabetically
if fn_name == "search-hotels-by-name":
function_result = await toolbox_tools[3](**function_call.args)
elif fn_name == "search-hotels-by-location":
function_result = await toolbox_tools[2](**function_call.args)
elif fn_name == "book-hotel":
function_result = await toolbox_tools[0](**function_call.args)
elif fn_name == "update-hotel":
function_result = await toolbox_tools[4](**function_call.args)
elif fn_name == "cancel-hotel":
function_result = await toolbox_tools[1](**function_call.args)
else:
raise ValueError("Function name not present.")
function_response = {"result": function_result}
function_response_part = Part.from_function_response(
name=function_call.name,
response=function_response,
)
function_response_parts.append(function_response_part)
if function_response_parts:
tool_response_content = Content(role="tool", parts=function_response_parts)
history.append(tool_response_content)
response2 = genai_client.models.generate_content(
model="gemini-2.0-flash-001",
contents=history,
config=GenerateContentConfig(
tools=genai_tools,
),
)
final_model_response_content = response2.candidates[0].content
history.append(final_model_response_content)
print(response2.text)
asyncio.run(main())

View File

@@ -1,52 +0,0 @@
import asyncio
from langgraph.prebuilt import create_react_agent
# TODO(developer): replace this with another import if needed
from langchain_google_vertexai import ChatVertexAI
# from langchain_google_genai import ChatGoogleGenerativeAI
# from langchain_anthropic import ChatAnthropic
from langgraph.checkpoint.memory import MemorySaver
from toolbox_langchain import ToolboxClient
prompt = """
You're a helpful hotel assistant. You handle hotel searching, booking and
cancellations. When the user searches for a hotel, mention it's name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
"""
queries = [
"Find hotels in Basel with Basel in its name.",
"Can you book the Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
]
async def main():
# TODO(developer): replace this with another model if needed
model = ChatVertexAI(model_name="gemini-2.0-flash-001")
# model = ChatGoogleGenerativeAI(model="gemini-2.0-flash-001")
# model = ChatAnthropic(model="claude-3-5-sonnet-20240620")
# Load the tools from the Toolbox server
async with ToolboxClient("http://127.0.0.1:5000") as client:
tools = await client.aload_toolset()
agent = create_react_agent(model, tools, checkpointer=MemorySaver())
config = {"configurable": {"thread_id": "thread-1"}}
for query in queries:
inputs = {"messages": [("user", prompt + query)]}
response = agent.invoke(inputs, stream_mode="values", config=config)
print(response["messages"][-1].content)
asyncio.run(main())

View File

@@ -1,63 +0,0 @@
import asyncio
import os
from llama_index.core.agent.workflow import AgentWorkflow
from llama_index.core.workflow import Context
# TODO(developer): replace this with another import if needed
from llama_index.llms.google_genai import GoogleGenAI
# from llama_index.llms.anthropic import Anthropic
from toolbox_llamaindex import ToolboxClient
prompt = """
You're a helpful hotel assistant. You handle hotel searching, booking and
cancellations. When the user searches for a hotel, mention it's name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
"""
queries = [
"Find hotels in Basel with Basel in its name.",
"Can you book the Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
]
async def main():
# TODO(developer): replace this with another model if needed
llm = GoogleGenAI(
model="gemini-2.0-flash-001",
vertexai_config={"project": "project-id", "location": "us-central1"},
)
# llm = GoogleGenAI(
# api_key=os.getenv("GOOGLE_API_KEY"),
# model="gemini-2.0-flash-001",
# )
# llm = Anthropic(
# model="claude-3-7-sonnet-latest",
# api_key=os.getenv("ANTHROPIC_API_KEY")
# )
# Load the tools from the Toolbox server
async with ToolboxClient("http://127.0.0.1:5000") as client:
tools = await client.aload_toolset()
agent = AgentWorkflow.from_tools_or_functions(
tools,
llm=llm,
system_prompt=prompt,
)
ctx = Context(agent)
for query in queries:
response = await agent.run(user_msg=query, ctx=ctx)
print(f"---- {query} ----")
print(str(response))
asyncio.run(main())

View File

@@ -1,20 +0,0 @@
<!-- This file has been used in local_quickstart.md, local_quickstart_go.md & local_quickstart_js.md -->
<!-- [START cloud_setup] -->
If you plan to use **Google Clouds Vertex AI** with your agent (e.g., using
`vertexai=True` or a Google GenAI model), follow these one-time setup steps for
local development:
1. [Install the Google Cloud CLI](https://cloud.google.com/sdk/docs/install)
1. [Set up Application Default Credentials (ADC)](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment)
1. Set your project and enable Vertex AI
```bash
gcloud config set project YOUR_PROJECT_ID
gcloud services enable aiplatform.googleapis.com
```
[install-python]: https://wiki.python.org/moin/BeginnersGuide/Download
[install-pip]: https://pip.pypa.io/en/stable/installation/
[install-venv]: https://packaging.python.org/en/latest/tutorials/installing-packages/#creating-virtual-environments
[install-postgres]: https://www.postgresql.org/download/
<!-- [END cloud_setup] -->

View File

@@ -1,122 +0,0 @@
<!-- This file has been used in local_quickstart.md, local_quickstart_go.md & local_quickstart_js.md -->
<!-- [START configure_toolbox] -->
In this section, we will download Toolbox, configure our tools in a
`tools.yaml`, and then run the Toolbox server.
1. Download the latest version of Toolbox as a binary:
{{< notice tip >}}
Select the
[correct binary](https://github.com/googleapis/genai-toolbox/releases)
corresponding to your OS and CPU architecture.
{{< /notice >}}
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.12.0/$OS/toolbox
```
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Write the following into a `tools.yaml` file. Be sure to update any fields
such as `user`, `password`, or `database` that you may have customized in the
previous step.
{{< notice tip >}}
In practice, use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}
```yaml
sources:
my-pg-source:
kind: postgres
host: 127.0.0.1
port: 5432
database: toolbox_db
user: ${USER_NAME}
password: ${PASSWORD}
tools:
search-hotels-by-name:
kind: postgres-sql
source: my-pg-source
description: Search for hotels based on name.
parameters:
- name: name
type: string
description: The name of the hotel.
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
search-hotels-by-location:
kind: postgres-sql
source: my-pg-source
description: Search for hotels based on location.
parameters:
- name: location
type: string
description: The location of the hotel.
statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%';
book-hotel:
kind: postgres-sql
source: my-pg-source
description: >-
Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
parameters:
- name: hotel_id
type: string
description: The ID of the hotel to book.
statement: UPDATE hotels SET booked = B'1' WHERE id = $1;
update-hotel:
kind: postgres-sql
source: my-pg-source
description: >-
Update a hotel's check-in and check-out dates by its ID. Returns a message
indicating whether the hotel was successfully updated or not.
parameters:
- name: hotel_id
type: string
description: The ID of the hotel to update.
- name: checkin_date
type: string
description: The new check-in date of the hotel.
- name: checkout_date
type: string
description: The new check-out date of the hotel.
statement: >-
UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3
as date) WHERE id = $1;
cancel-hotel:
kind: postgres-sql
source: my-pg-source
description: Cancel a hotel by its ID.
parameters:
- name: hotel_id
type: string
description: The ID of the hotel to cancel.
statement: UPDATE hotels SET booked = B'0' WHERE id = $1;
toolsets:
my-toolset:
- search-hotels-by-name
- search-hotels-by-location
- book-hotel
- update-hotel
- cancel-hotel
```
For more info on tools, check out the `Resources` section of the docs.
1. Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
```bash
./toolbox --tools-file "tools.yaml"
```
{{< notice note >}}
Toolbox enables dynamic reloading by default. To disable, use the
`--disable-reload` flag.
{{< /notice >}}
<!-- [END configure_toolbox] -->

View File

@@ -1,119 +0,0 @@
<!-- This file has been used in local_quickstart.md, local_quickstart_go.md & local_quickstart_js.md -->
<!-- [START database_setup] -->
In this section, we will create a database, insert some data that needs to be
accessed by our agent, and create a database user for Toolbox to connect with.
1. Connect to postgres using the `psql` command:
```bash
psql -h 127.0.0.1 -U postgres
```
Here, `postgres` denotes the default postgres superuser.
{{< notice info >}}
#### **Having trouble connecting?**
* **Password Prompt:** If you are prompted for a password for the `postgres`
user and do not know it (or a blank password doesn't work), your PostgreSQL
installation might require a password or a different authentication method.
* **`FATAL: role "postgres" does not exist`:** This error means the default
`postgres` superuser role isn't available under that name on your system.
* **`Connection refused`:** Ensure your PostgreSQL server is actually running.
You can typically check with `sudo systemctl status postgresql` and start it
with `sudo systemctl start postgresql` on Linux systems.
<br/>
#### **Common Solution**
For password issues or if the `postgres` role seems inaccessible directly, try
switching to the `postgres` operating system user first. This user often has
permission to connect without a password for local connections (this is called
peer authentication).
```bash
sudo -i -u postgres
psql -h 127.0.0.1
```
Once you are in the `psql` shell using this method, you can proceed with the
database creation steps below. Afterwards, type `\q` to exit `psql`, and then
`exit` to return to your normal user shell.
If desired, once connected to `psql` as the `postgres` OS user, you can set a
password for the `postgres` *database* user using: `ALTER USER postgres WITH
PASSWORD 'your_chosen_password';`. This would allow direct connection with `-U
postgres` and a password next time.
{{< /notice >}}
1. Create a new database and a new user:
{{< notice tip >}}
For a real application, it's best to follow the principle of least permission
and only grant the privileges your application needs.
{{< /notice >}}
```sql
CREATE USER toolbox_user WITH PASSWORD 'my-password';
CREATE DATABASE toolbox_db;
GRANT ALL PRIVILEGES ON DATABASE toolbox_db TO toolbox_user;
ALTER DATABASE toolbox_db OWNER TO toolbox_user;
```
1. End the database session:
```bash
\q
```
(If you used `sudo -i -u postgres` and then `psql`, remember you might also
need to type `exit` after `\q` to leave the `postgres` user's shell
session.)
1. Connect to your database with your new user:
```bash
psql -h 127.0.0.1 -U toolbox_user -d toolbox_db
```
1. Create a table using the following command:
```sql
CREATE TABLE hotels(
id INTEGER NOT NULL PRIMARY KEY,
name VARCHAR NOT NULL,
location VARCHAR NOT NULL,
price_tier VARCHAR NOT NULL,
checkin_date DATE NOT NULL,
checkout_date DATE NOT NULL,
booked BIT NOT NULL
);
```
1. Insert data into the table.
```sql
INSERT INTO hotels(id, name, location, price_tier, checkin_date, checkout_date, booked)
VALUES
(1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-22', '2024-04-20', B'0'),
(2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', B'0'),
(3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', B'0'),
(4, 'Radisson Blu Lucerne', 'Lucerne', 'Midscale', '2024-04-24', '2024-04-05', B'0'),
(5, 'Best Western Bern', 'Bern', 'Upper Midscale', '2024-04-23', '2024-04-01', B'0'),
(6, 'InterContinental Geneva', 'Geneva', 'Luxury', '2024-04-23', '2024-04-28', B'0'),
(7, 'Sheraton Zurich', 'Zurich', 'Upper Upscale', '2024-04-27', '2024-04-02', B'0'),
(8, 'Holiday Inn Basel', 'Basel', 'Upper Midscale', '2024-04-24', '2024-04-09', B'0'),
(9, 'Courtyard Zurich', 'Zurich', 'Upscale', '2024-04-03', '2024-04-13', B'0'),
(10, 'Comfort Inn Bern', 'Bern', 'Midscale', '2024-04-04', '2024-04-16', B'0');
```
1. End the database session:
```bash
\q
```
<!-- [END database_setup] -->

View File

@@ -5,9 +5,329 @@ weight: 2
description: >
Connect your IDE to Firestore using Toolbox.
---
<html>
<head>
<link rel="canonical" href="https://cloud.google.com/firestore/native/docs/connect-ide-using-mcp-toolbox"/>
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/firestore/native/docs/connect-ide-using-mcp-toolbox"/>
</head>
</html>
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is
an open protocol for connecting Large Language Models (LLMs) to data sources
like Firestore. This guide covers how to use [MCP Toolbox for Databases][toolbox]
to expose your developer assistant tools to a Firestore instance:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
* [Visual Studio Code][vscode] (Copilot)
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
* [Gemini CLI][geminicli]
* [Gemini Code Assist][geminicodeassist]
[toolbox]: https://github.com/googleapis/genai-toolbox
[cursor]: #configure-your-mcp-client
[windsurf]: #configure-your-mcp-client
[vscode]: #configure-your-mcp-client
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
[geminicli]: #configure-your-mcp-client
[geminicodeassist]: #configure-your-mcp-client
## Set up Firestore
1. Create or select a Google Cloud project.
* [Create a new
project](https://cloud.google.com/resource-manager/docs/creating-managing-projects)
* [Select an existing
project](https://cloud.google.com/resource-manager/docs/creating-managing-projects#identifying_projects)
1. [Enable the Firestore
API](https://console.cloud.google.com/apis/library/firestore.googleapis.com)
for your project.
1. [Create a Firestore
database](https://cloud.google.com/firestore/docs/create-database-web-mobile-client-library)
if you haven't already.
1. Set up authentication for your local environment.
* [Install gcloud CLI](https://cloud.google.com/sdk/docs/install)
* Run `gcloud auth application-default login` to authenticate
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
to your OS and CPU architecture. You are required to use Toolbox version
V0.10.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/windows/amd64/toolbox
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Verify the installation:
```bash
./toolbox --version
```
## Configure your MCP Client
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"firestore": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","firestore","--stdio"],
"env": {
"FIRESTORE_PROJECT": "your-project-id",
"FIRESTORE_DATABASE": "(default)"
}
}
}
}
```
1. Restart Claude code to apply the new configuration.
{{% /tab %}}
{{% tab header="Claude desktop" lang="en" %}}
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"firestore": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","firestore","--stdio"],
"env": {
"FIRESTORE_PROJECT": "your-project-id",
"FIRESTORE_DATABASE": "(default)"
}
}
}
}
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap
the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"firestore": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","firestore","--stdio"],
"env": {
"FIRESTORE_PROJECT": "your-project-id",
"FIRESTORE_DATABASE": "(default)"
}
}
}
}
```
1. You should see a green active status after the server is successfully
connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}
1. Create a `.cursor` directory in your project root if it doesn't exist.
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"firestore": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","firestore","--stdio"],
"env": {
"FIRESTORE_PROJECT": "your-project-id",
"FIRESTORE_DATABASE": "(default)"
}
}
}
}
```
1. [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
Settings > MCP**. You should see a green active status after the server is
successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"firestore": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","firestore","--stdio"],
"env": {
"FIRESTORE_PROJECT": "your-project-id",
"FIRESTORE_DATABASE": "(default)"
}
}
}
}
```
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"firestore": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","firestore","--stdio"],
"env": {
"FIRESTORE_PROJECT": "your-project-id",
"FIRESTORE_DATABASE": "(default)"
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini
CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it, create
a `settings.json` file.
1. Add the following configuration, replace the environment variables with your
values, and then save:
```json
{
"mcpServers": {
"firestore": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","firestore","--stdio"],
"env": {
"FIRESTORE_PROJECT": "your-project-id",
"FIRESTORE_DATABASE": "(default)"
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code
Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist)
extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it, create
a `settings.json` file.
1. Add the following configuration, replace the environment variables with your
values, and then save:
```json
{
"mcpServers": {
"firestore": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","firestore","--stdio"],
"env": {
"FIRESTORE_PROJECT": "your-project-id",
"FIRESTORE_DATABASE": "(default)"
}
}
}
}
```
{{% /tab %}}
{{< /tabpane >}}
## Use Tools
Your AI tool is now connected to Firestore using MCP. Try asking your AI
assistant to list collections, get documents, query collections, or manage
security rules.
The following tools are available to the LLM:
1. **firestore-get-documents**: Gets multiple documents from Firestore by their
paths
1. **firestore-list-collections**: List Firestore collections for a given parent
path
1. **firestore-delete-documents**: Delete multiple documents from Firestore
1. **firestore-query-collection**: Query documents from a collection with
filtering, ordering, and limit options
1. **firestore-get-rules**: Retrieves the active Firestore security rules for
the current project
1. **firestore-validate-rules**: Validates Firestore security rules syntax and
errors
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}

View File

@@ -11,7 +11,6 @@ an open protocol for connecting Large Language Models (LLMs) to data sources
like Postgres. This guide covers how to use [MCP Toolbox for Databases][toolbox]
to expose your developer assistant tools to a Looker instance:
* [Gemini-CLI][gemini-cli]
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
* [Visual Studio Code][vscode] (Copilot)
@@ -20,7 +19,6 @@ to expose your developer assistant tools to a Looker instance:
* [Claude code][claudecode]
[toolbox]: https://github.com/googleapis/genai-toolbox
[gemini-cli]: #configure-your-mcp-client
[cursor]: #configure-your-mcp-client
[windsurf]: #configure-your-mcp-client
[vscode]: #configure-your-mcp-client
@@ -48,19 +46,19 @@ to expose your developer assistant tools to a Looker instance:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
@@ -80,36 +78,6 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/windows/amd64/toolb
## Configure your MCP Client
{{< tabpane text=true >}}
{{% tab header="Gemini-CLI" lang="en" %}}
1. Install [Gemini-CLI](https://github.com/google-gemini/gemini-cli#install-globally-with-npm).
1. Create a directory `.gemini` in your home directory if it doesn't exist.
1. Create the file `.gemini/settings.json` if it doesn't exist.
1. Add the following configuration, or add the mcpServers stanza if you already
have a `settings.json` with content. Replace the path to the toolbox
executable and the environment variables with your values, and save:
```json
{
"mcpServers": {
"looker-toolbox": {
"command": "./PATH/TO/toolbox",
"args": ["--stdio", "--prebuilt", "looker"],
"env": {
"LOOKER_BASE_URL": "https://looker.example.com",
"LOOKER_CLIENT_ID": "",
"LOOKER_CLIENT_SECRET": "",
"LOOKER_VERIFY_SSL": "true"
}
}
}
}
```
1. Start Gemini-CLI with the `gemini` command and use the command `/mcp` to see
the configured MCP tools.
{{% /tab %}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude
@@ -295,15 +263,11 @@ The following tools are available to the LLM:
1. **get_measures**: list the measures in a given explore
1. **get_filters**: list the filters in a given explore
1. **get_parameters**: list the parameters in a given explore
1. **query**: Run a query and return the data
1. **query**: Run a query
1. **query_sql**: Return the SQL generated by Looker for a query
1. **query_url**: Return a link to the query in Looker for further exploration
1. **get_looks**: Return the saved Looks that match a title or description
1. **run_look**: Run a saved Look and return the data
1. **make_look**: Create a saved Look in Looker and return the URL
1. **get_dashboards**: Return the saved dashboards that match a title or description
1. **make_dashboard**: Create a saved dashboard in Looker and return the URL
1. **add_dashboard_element**: Add a tile to a dashboard
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs

View File

@@ -1,289 +0,0 @@
---
title: SQL Server using MCP
type: docs
weight: 2
description: "Connect your IDE to SQL Server using Toolbox."
---
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like SQL Server. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a SQL Server instance:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
* [Visual Studio Code][vscode] (Copilot)
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
* [Gemini CLI][geminicli]
* [Gemini Code Assist][geminicodeassist]
[toolbox]: https://github.com/googleapis/genai-toolbox
[cursor]: #configure-your-mcp-client
[windsurf]: #configure-your-mcp-client
[vscode]: #configure-your-mcp-client
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
[geminicli]: #configure-your-mcp-client
[geminicodeassist]: #configure-your-mcp-client
## Set up the database
1. [Create or select a SQL Server instance.](https://www.microsoft.com/en-us/sql-server/sql-server-downloads)
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version V0.10.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Verify the installation:
```bash
./toolbox --version
```
## Configure your MCP Client
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
1. Restart Claude code to apply the new configuration.
{{% /tab %}}
{{% tab header="Claude desktop" lang="en" %}}
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
1. You should see a green active status after the server is successfully connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}
1. Create a `.cursor` directory in your project root if it doesn't exist.
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcp" : {
"servers": {
"cloud-sql-sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
}
```
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{< /tabpane >}}
## Use Tools
Your AI tool is now connected to SQL Server using MCP. Try asking your AI assistant to list tables, create a table, or define and execute other SQL statements.
The following tools are available to the LLM:
1. **list_tables**: lists tables and descriptions
1. **execute_sql**: execute any SQL statement
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}

View File

@@ -1,287 +0,0 @@
---
title: MySQL using MCP
type: docs
weight: 2
description: "Connect your IDE to MySQL using Toolbox."
---
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like MySQL. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a MySQL instance:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
* [Visual Studio Code][vscode] (Copilot)
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
* [Gemini CLI][geminicli]
* [Gemini Code Assist][geminicodeassist]
[toolbox]: https://github.com/googleapis/genai-toolbox
[cursor]: #configure-your-mcp-client
[windsurf]: #configure-your-mcp-client
[vscode]: #configure-your-mcp-client
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
[geminicli]: #configure-your-mcp-client
[geminicodeassist]: #configure-your-mcp-client
## Set up the database
1. [Create or select a MySQL instance.](https://dev.mysql.com/downloads/installer/)
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version V0.10.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Verify the installation:
```bash
./toolbox --version
```
## Configure your MCP Client
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt", "mysql", "--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
1. Restart Claude code to apply the new configuration.
{{% /tab %}}
{{% tab header="Claude desktop" lang="en" %}}
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt", "mysql", "--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt", "mysql", "--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
1. You should see a green active status after the server is successfully connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}
1. Create a `.cursor` directory in your project root if it doesn't exist.
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt", "mysql", "--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mysql","--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mysql","--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mysql","--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mysql","--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{< /tabpane >}}
## Use Tools
Your AI tool is now connected to MySQL using MCP. Try asking your AI assistant to list tables, create a table, or define and execute other SQL statements.
The following tools are available to the LLM:
1. **list_tables**: lists tables and descriptions
1. **execute_sql**: execute any SQL statement
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}

View File

@@ -17,8 +17,6 @@ to expose your developer assistant tools to a Postgres instance:
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
* [Gemini CLI][geminicli]
* [Gemini Code Assist][geminicodeassist]
[toolbox]: https://github.com/googleapis/genai-toolbox
[cursor]: #configure-your-mcp-client
@@ -27,8 +25,6 @@ to expose your developer assistant tools to a Postgres instance:
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
[geminicli]: #configure-your-mcp-client
[geminicodeassist]: #configure-your-mcp-client
{{< notice tip >}}
This guide can be used with [AlloyDB
@@ -56,19 +52,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
@@ -263,57 +259,6 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/windows/amd64/toolb
```
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
```json
{
"mcpServers": {
"postgres": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","postgres","--stdio"],
"env": {
"POSTGRES_HOST": "",
"POSTGRES_PORT": "",
"POSTGRES_DATABASE": "",
"POSTGRES_USER": "",
"POSTGRES_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
```json
{
"mcpServers": {
"postgres": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","postgres","--stdio"],
"env": {
"POSTGRES_HOST": "",
"POSTGRES_PORT": "",
"POSTGRES_DATABASE": "",
"POSTGRES_USER": "",
"POSTGRES_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{< /tabpane >}}
## Use Tools

View File

@@ -75,7 +75,7 @@ networks:
{{< notice tip >}}
You can use this setup to quickly set up Toolbox + Postgres to follow along in our
You can use this setup quickly set up Toolbox + Postgres to follow along in our
[Quickstart](../getting-started/local_quickstart.md)
{{< /notice >}}

View File

@@ -60,8 +60,8 @@ description: >
gcloud iam service-accounts create $SA_NAME
```
1. Grant any IAM roles necessary to the IAM service account. Each source has a
list of necessary IAM permissions listed on its page. The example below is
1. Grant any IAM roles necessary to the IAM service account. Each source have a
list of necessary IAM permissions listed on it's page. The example below is
for cloud sql postgres source:
```bash

View File

@@ -103,14 +103,6 @@ section.
```bash
export IMAGE=us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:latest
```
{{< notice note >}}
**The `$PORT` Environment Variable**
Google Cloud Run dictates the port your application must listen on by setting the
`$PORT` environment variable inside your container. This value defaults to
**8080**. Your application's `--port` argument **must** be set to listen on this
port. If there is a mismatch, the container will fail to start and the
deployment will time out.
{{< /notice >}}
1. Deploy Toolbox to Cloud Run using the following command:
@@ -149,7 +141,7 @@ You can connect to Toolbox Cloud Run instances directly through the SDK.
1. (Only for local runs) Set up [Application Default
Credentials](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment)
for the principal you set up the `Cloud Run Invoker` role access to.
for the principle you set up the `Cloud Run Invoker` role access to.
1. Run the following to retrieve a non-deterministic URL for the cloud run service:
@@ -159,68 +151,18 @@ You can connect to Toolbox Cloud Run instances directly through the SDK.
1. Import and initialize the toolbox client with the URL retrieved above:
{{< tabpane persist=header >}}
{{< tab header="Python" lang="python" >}}
from toolbox_core import ToolboxClient, auth_methods
```python
from toolbox_core import ToolboxClient, auth_methods
# Replace with the Cloud Run service URL generated in the previous step.
URL = "https://cloud-run-url.app"
# Replace with the Cloud Run service URL generated in the previous step.
URL = "https://cloud-run-url.app"
auth_token_provider = auth_methods.aget_google_id_token(URL) # can also use sync method
auth_token_provider = auth_methods.aget_google_id_token(URL) # can also use sync method
async with ToolboxClient(
URL,
client_headers={"Authorization": auth_token_provider},
) as toolbox:
{{< /tab >}}
{{< tab header="Javascript" lang="javascript" >}}
import { ToolboxClient } from '@toolbox-sdk/core';
import {getGoogleIdToken} from '@toolbox-sdk/core/auth'
// Replace with the Cloud Run service URL generated in the previous step.
const URL = 'http://127.0.0.1:5000';
const authTokenProvider = () => getGoogleIdToken(URL);
const client = new ToolboxClient(URL, null, {"Authorization": authTokenProvider});
{{< /tab >}}
{{< tab header="Go" lang="go" >}}
import "github.com/googleapis/mcp-toolbox-sdk-go/core"
func main() {
// Replace with the Cloud Run service URL generated in the previous step.
URL := "http://127.0.0.1:5000"
auth_token_provider, err := core.GetGoogleIDToken(ctx, URL)
if err != nil {
log.Fatalf("Failed to fetch token %v", err)
}
toolboxClient, err := core.NewToolboxClient(
async with ToolboxClient(
URL,
core.WithClientHeaderString("Authorization", auth_token_provider))
if err != nil {
log.Fatalf("Failed to create Toolbox client: %v", err)
}
}
{{< /tab >}}
{{< /tabpane >}}
client_headers={"Authorization": auth_token_provider},
) as toolbox:
```
Now, you can use this client to connect to the deployed Cloud Run instance!
## Troubleshooting
{{< notice note >}}
For any deployment or runtime error, the best first step is to check the logs for your service in the Google Cloud Console's Cloud Run section. They often contain the specific error message needed to diagnose the problem.
{{< /notice >}}
* **Deployment Fails with "Container failed to start":** This is almost always
caused by a port mismatch. Ensure your container's `--port` argument is set to
`8080` to match the `$PORT` environment variable provided by Cloud Run.
* **Client Receives Permission Denied Error (401 or 403):** If your client application (e.g., your local SDK) gets a `401 Unauthorized` or `403 Forbidden` error when trying to call your Cloud Run service, it means the client is not properly authenticated as an invoker.
* Ensure the user or service account calling the service has the **Cloud Run Invoker** (`roles/run.invoker`) IAM role.
* If running locally, make sure your Application Default Credentials are set up correctly by running `gcloud auth application-default login`.
* **Service Fails to Access Secrets (in logs):** If your application starts but the logs show errors like "permission denied" when trying to access Secret Manager, it means the Toolbox service account is missing permissions.
* Ensure the `toolbox-identity` service account has the **Secret Manager Secret Accessor** (`roles/secretmanager.secretAccessor`) IAM role.

View File

Before

Width:  |  Height:  |  Size: 36 MiB

After

Width:  |  Height:  |  Size: 36 MiB

View File

Before

Width:  |  Height:  |  Size: 298 KiB

After

Width:  |  Height:  |  Size: 298 KiB

View File

@@ -36,10 +36,10 @@ with the tool name, description, and available parameters.
### Invoking a Tool
1. Click on a Tool
1. Enter appropriate parameters in each parameter field
1. Click "Run Tool"
1. Done! Your results will appear in the response field
1. (Optional) Uncheck "Prettify JSON" to format the response as plain text
2. Enter appropriate parameters in each parameter field
3. Click "Run Tool"
4. Done! Your results will appear in the response field
5. (Optional) Uncheck "Prettify JSON" to format the response as plain text
![Run Tool Demo GIF](./run-tool.gif)
@@ -78,16 +78,13 @@ button and modal described above. The key should be the name of your AuthService
your tool configuration file, suffixed with `_token`. The value should be your ID token as a string.
1. Select a tool that requires [authenticated parameters]()
1. The auth parameter's text field is greyed out. This is because it cannot be entered manually and will
2. The auth parameter's text field is greyed out. This is because it cannot be entered manually and will
be parsed from the resolved auth token
1. To update request headers with the token, select "Edit Headers"
1. (Optional) If you wish to manually edit the header, checkout the dropdown "How to extract Google OAuth ID Token manually" for guidance on retrieving ID token
1. To edit the header automatically, click the "Auto Setup" button that is associated with your Auth Profile
1. Enter the Client ID defined in your tools configuration file
1. Click "Continue"
1. Click "Sign in With Google" and login with your associated google account. This should automatically populate the header text area with your token
1. Click "Save"
1. Click "Run Tool"
3. To update request headers with the token, select "Edit Headers"
4. Checkout the dropdown "How to extract Google OAuth ID Token manually" for guidance on retrieving ID token
5. Paste the request header
6. Click "Save"
7. Click "Run Tool"
```json
{

View File

Before

Width:  |  Height:  |  Size: 58 KiB

After

Width:  |  Height:  |  Size: 58 KiB

View File

Before

Width:  |  Height:  |  Size: 59 KiB

After

Width:  |  Height:  |  Size: 59 KiB

View File

Before

Width:  |  Height:  |  Size: 5.4 MiB

After

Width:  |  Height:  |  Size: 5.4 MiB

View File

Before

Width:  |  Height:  |  Size: 269 KiB

After

Width:  |  Height:  |  Size: 269 KiB

View File

Before

Width:  |  Height:  |  Size: 136 KiB

After

Width:  |  Height:  |  Size: 136 KiB

View File

@@ -1,8 +0,0 @@
---
title: "Reference"
type: docs
weight: 7
description: >
This section contains reference documentation.
---

View File

@@ -1,75 +0,0 @@
---
title: "CLI"
type: docs
weight: 1
description: >
This page describes the `toolbox` command-line options.
---
## Reference
| Flag (Short) | Flag (Long) | Description | Default |
|---|---|---|---|
| `-a` | `--address` | Address of the interface the server will listen on. | `127.0.0.1` |
| | `--disable-reload` | Disables dynamic reloading of tools file. | |
| `-h` | `--help` | help for toolbox | |
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
| `-p` | `--port` | Port the server will listen on. | `5000` |
| | `--prebuilt` | Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
| | `--telemetry-service-name` | Sets the value of the service.name resource attribute for telemetry data. | `toolbox` |
| | `--tools-file` | File path specifying the tool configuration. Cannot be used with --prebuilt, --tools-files, or --tools-folder. | |
| | `--tools-files` | Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --prebuilt, --tools-file, or --tools-folder. | |
| | `--tools-folder` | Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --prebuilt, --tools-file, or --tools-files. | |
| | `--ui` | Launches the Toolbox UI web server. | |
| `-v` | `--version` | version for toolbox | |
## Examples
### Transport Configuration
**Server Settings:**
- `--address`, `-a`: Server listening address (default: "127.0.0.1")
- `--port`, `-p`: Server listening port (default: 5000)
**STDIO:**
- `--stdio`: Run in MCP STDIO mode instead of HTTP server
#### Usage Examples
```bash
# Basic server with custom port configuration
./toolbox --tools-file "tools.yaml" --port 8080
```
### Tool Configuration Sources
The CLI supports multiple mutually exclusive ways to specify tool configurations:
**Single File:** (default)
- `--tools-file`: Path to a single YAML configuration file (default: `tools.yaml`)
**Multiple Files:**
- `--tools-files`: Comma-separated list of YAML files to merge
**Directory:**
- `--tools-folder`: Directory containing YAML files to load and merge
**Prebuilt Configurations:**
- `--prebuilt`: Use predefined configurations for specific database types (e.g., 'bigquery', 'postgres', 'spanner'). See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values.
{{< notice tip >}}
The CLI enforces mutual exclusivity between configuration source flags, preventing simultaneous use of `--prebuilt` with file-based options, and ensuring only one of `--tools-file`, `--tools-files`, or `--tools-folder` is used at a time.
{{< /notice >}}
### Hot Reload
Toolbox enables dynamic reloading by default. To disable, use the
`--disable-reload` flag.
### Toolbox UI
To launch Toolbox's interactive UI, use the `--ui` flag. This allows you to test tools and toolsets with features such as authorized parameters. To learn more, visit [Toolbox UI](../how-to/toolbox-ui/index.md).

View File

@@ -1,263 +0,0 @@
---
title: "Prebuilt Tools"
type: docs
weight: 1
description: >
This page lists all the prebuilt tools available.
---
Prebuilt tools are reusable, pre-packaged toolsets that are designed to extend the capabilities of agents. These tools are built to be generic and adaptable, allowing developers to interact with and take action on databases.
See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
## AlloyDB Postgres
* `--prebuilt` value: `alloydb-postgres`
* **Environment Variables:**
* `ALLOYDB_POSTGRES_PROJECT`: The GCP project ID.
* `ALLOYDB_POSTGRES_REGION`: The region of your AlloyDB instance.
* `ALLOYDB_POSTGRES_CLUSTER`: The ID of your AlloyDB cluster.
* `ALLOYDB_POSTGRES_INSTANCE`: The ID of your AlloyDB instance.
* `ALLOYDB_POSTGRES_DATABASE`: The name of the database to connect to.
* `ALLOYDB_POSTGRES_USER`: The database username.
* `ALLOYDB_POSTGRES_PASSWORD`: The password for the database user.
* **Permissions:**
* **AlloyDB Client** (`roles/alloydb.client`) to connect to the instance.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
## AlloyDB Postgres Admin
* `--prebuilt` value: `alloydb-postgres-admin`
* **Environment Variables:**
* `API_KEY`: Your API key for the AlloyDB API.
* **Permissions:**
* **AlloyDB Admin** (`roles/alloydb.admin`) IAM role is required on the project.
* **Tools:**
* `alloydb-create-cluster`: Creates a new AlloyDB cluster.
* `alloydb-operations-get`: Polls the operations API to track the status of long-running operations.
* `alloydb-create-instance`: Creates a new AlloyDB instance within a cluster.
* `alloydb-list-clusters`: Lists all AlloyDB clusters in a project.
* `alloydb-list-instances`: Lists all instances within an AlloyDB cluster.
* `alloydb-list-users`: Lists all database users within an AlloyDB cluster.
* `alloydb-create-user`: Creates a new database user in an AlloyDB cluster.
## BigQuery
* `--prebuilt` value: `bigquery`
* **Environment Variables:**
* `BIGQUERY_PROJECT`: The GCP project ID.
* **Permissions:**
* **BigQuery User** (`roles/bigquery.user`) to execute queries and view metadata.
* **BigQuery Metadata Viewer** (`roles/bigquery.metadataViewer`) to view all datasets.
* **BigQuery Data Editor** (`roles/bigquery.dataEditor`) to create or modify datasets and tables.
* **Gemini for Google Cloud** (`roles/cloudaicompanion.user`) to use the conversational analytics API.
* **Tools:**
* `ask_data_insights`: Use this tool to perform data analysis, get insights, or answer complex questions about the contents of specific BigQuery tables. For more information on required roles, API setup, and IAM configuration, see the setup and authentication section of the [Conversational Analytics API documentation](https://cloud.google.com/gemini/docs/conversational-analytics-api/overview).
* `execute_sql`: Executes a SQL statement.
* `forecast`: Use this tool to forecast time series data.
* `get_dataset_info`: Gets dataset metadata.
* `get_table_info`: Gets table metadata.
* `list_dataset_ids`: Lists datasets.
* `list_table_ids`: Lists tables.
## Cloud SQL for MySQL
* `--prebuilt` value: `cloud-sql-mysql`
* **Environment Variables:**
* `CLOUD_SQL_MYSQL_PROJECT`: The GCP project ID.
* `CLOUD_SQL_MYSQL_REGION`: The region of your Cloud SQL instance.
* `CLOUD_SQL_MYSQL_INSTANCE`: The ID of your Cloud SQL instance.
* `CLOUD_SQL_MYSQL_DATABASE`: The name of the database to connect to.
* `CLOUD_SQL_MYSQL_USER`: The database username.
* `CLOUD_SQL_MYSQL_PASSWORD`: The password for the database user.
* **Permissions:**
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the instance.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
## Cloud SQL for PostgreSQL
* `--prebuilt` value: `cloud-sql-postgres`
* **Environment Variables:**
* `CLOUD_SQL_POSTGRES_PROJECT`: The GCP project ID.
* `CLOUD_SQL_POSTGRES_REGION`: The region of your Cloud SQL instance.
* `CLOUD_SQL_POSTGRES_INSTANCE`: The ID of your Cloud SQL instance.
* `CLOUD_SQL_POSTGRES_DATABASE`: The name of the database to connect to.
* `CLOUD_SQL_POSTGRES_USER`: The database username.
* `CLOUD_SQL_POSTGRES_PASSWORD`: The password for the database user.
* **Permissions:**
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the instance.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
## Cloud SQL for SQL Server
* `--prebuilt` value: `cloud-sql-mssql`
* **Environment Variables:**
* `CLOUD_SQL_MSSQL_PROJECT`: The GCP project ID.
* `CLOUD_SQL_MSSQL_REGION`: The region of your Cloud SQL instance.
* `CLOUD_SQL_MSSQL_INSTANCE`: The ID of your Cloud SQL instance.
* `CLOUD_SQL_MSSQL_DATABASE`: The name of the database to connect to.
* `CLOUD_SQL_MSSQL_IP_ADDRESS`: The IP address of the Cloud SQL instance.
* `CLOUD_SQL_MSSQL_USER`: The database username.
* `CLOUD_SQL_MSSQL_PASSWORD`: The password for the database user.
* **Permissions:**
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the instance.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
## Dataplex
* `--prebuilt` value: `dataplex`
* **Environment Variables:**
* `DATAPLEX_PROJECT`: The GCP project ID.
* **Permissions:**
* **Dataplex Reader** (`roles/dataplex.viewer`) to search and look up entries.
* **Dataplex Editor** (`roles/dataplex.editor`) to modify entries.
* **Tools:**
* `dataplex_search_entries`: Searches for entries in Dataplex Catalog.
* `dataplex_lookup_entry`: Retrieves a specific entry from Dataplex Catalog.
* `dataplex_search_aspect_types`: Finds aspect types relevant to the query.
## Firestore
* `--prebuilt` value: `firestore`
* **Environment Variables:**
* `FIRESTORE_PROJECT`: The GCP project ID.
* `FIRESTORE_DATABASE`: The Firestore database ID.
* **Permissions:**
* **Cloud Datastore User** (`roles/datastore.user`) to get documents, list collections, and query collections.
* **Firebase Rules Viewer** (`roles/firebaserules.viewer`) to get and validate Firestore rules.
* **Tools:**
* `firestore-get-documents`: Gets multiple documents from Firestore by their paths.
* `firestore-list-collections`: Lists Firestore collections for a given parent path.
* `firestore-delete-documents`: Deletes multiple documents from Firestore.
* `firestore-query-collection`: Retrieves one or more Firestore documents from a collection.
* `firestore-get-rules`: Retrieves the active Firestore security rules.
* `firestore-validate-rules`: Checks the provided Firestore Rules source for syntax and validation errors.
## Looker
* `--prebuilt` value: `looker`
* **Environment Variables:**
* `LOOKER_BASE_URL`: The URL of your Looker instance.
* `LOOKER_CLIENT_ID`: The client ID for the Looker API.
* `LOOKER_CLIENT_SECRET`: The client secret for the Looker API.
* `LOOKER_VERIFY_SSL`: Whether to verify SSL certificates.
* **Permissions:**
* A Looker account with permissions to access the desired models, explores, and data is required.
* **Tools:**
* `get_models`: Retrieves the list of LookML models.
* `get_explores`: Retrieves the list of explores in a model.
* `get_dimensions`: Retrieves the list of dimensions in an explore.
* `get_measures`: Retrieves the list of measures in an explore.
* `get_filters`: Retrieves the list of filters in an explore.
* `get_parameters`: Retrieves the list of parameters in an explore.
* `query`: Runs a query against the LookML model.
* `query_sql`: Generates the SQL for a query.
* `query_url`: Generates a URL for a query in Looker.
* `get_looks`: Searches for saved looks.
* `run_look`: Runs the query associated with a look.
* `make_look`: Creates a new look.
* `get_dashboards`: Searches for saved dashboards.
* `make_dashboard`: Creates a new dashboard.
* `add_dashboard_element`: Adds a tile to a dashboard.
## Microsoft SQL Server
* `--prebuilt` value: `mssql`
* **Environment Variables:**
* `MSSQL_HOST`: The hostname or IP address of the SQL Server instance.
* `MSSQL_PORT`: The port number for the SQL Server instance.
* `MSSQL_DATABASE`: The name of the database to connect to.
* `MSSQL_USER`: The database username.
* `MSSQL_PASSWORD`: The password for the database user.
* **Permissions:**
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
## MySQL
* `--prebuilt` value: `mysql`
* **Environment Variables:**
* `MYSQL_HOST`: The hostname or IP address of the MySQL server.
* `MYSQL_PORT`: The port number for the MySQL server.
* `MYSQL_DATABASE`: The name of the database to connect to.
* `MYSQL_USER`: The database username.
* `MYSQL_PASSWORD`: The password for the database user.
* **Permissions:**
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
## OceanBase
* `--prebuilt` value: `oceanbase`
* **Environment Variables:**
* `OCEANBASE_HOST`: The hostname or IP address of the OceanBase server.
* `OCEANBASE_PORT`: The port number for the OceanBase server.
* `OCEANBASE_DATABASE`: The name of the database to connect to.
* `OCEANBASE_USER`: The database username.
* `OCEANBASE_PASSWORD`: The password for the database user.
* **Permissions:**
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
## PostgreSQL
* `--prebuilt` value: `postgres`
* **Environment Variables:**
* `POSTGRES_HOST`: The hostname or IP address of the PostgreSQL server.
* `POSTGRES_PORT`: The port number for the PostgreSQL server.
* `POSTGRES_DATABASE`: The name of the database to connect to.
* `POSTGRES_USER`: The database username.
* `POSTGRES_PASSWORD`: The password for the database user.
* **Permissions:**
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
## Spanner (GoogleSQL dialect)
* `--prebuilt` value: `spanner`
* **Environment Variables:**
* `SPANNER_PROJECT`: The GCP project ID.
* `SPANNER_INSTANCE`: The Spanner instance ID.
* `SPANNER_DATABASE`: The Spanner database ID.
* **Permissions:**
* **Cloud Spanner Database Reader** (`roles/spanner.databaseReader`) to execute DQL queries and list tables.
* **Cloud Spanner Database User** (`roles/spanner.databaseUser`) to execute DML queries.
* **Tools:**
* `execute_sql`: Executes a DML SQL query.
* `execute_sql_dql`: Executes a DQL SQL query.
* `list_tables`: Lists tables in the database.
## Spanner (PostgreSQL dialect)
* `--prebuilt` value: `spanner-postgres`
* **Environment Variables:**
* `SPANNER_PROJECT`: The GCP project ID.
* `SPANNER_INSTANCE`: The Spanner instance ID.
* `SPANNER_DATABASE`: The Spanner database ID.
* **Permissions:**
* **Cloud Spanner Database Reader** (`roles/spanner.databaseReader`) to execute DQL queries and list tables.
* **Cloud Spanner Database User** (`roles/spanner.databaseUser`) to execute DML queries.
* **Tools:**
* `execute_sql`: Executes a DML SQL query using the PostgreSQL interface for Spanner.
* `execute_sql_dql`: Executes a DQL SQL query using the PostgreSQL interface for Spanner.
* `list_tables`: Lists tables in the database.

View File

@@ -68,10 +68,6 @@ parameter when loading tools, or the `add_auth_token_getter`() /
### Specifying tokens during load
#### Python
Use the [Python SDK](https://github.com/googleapis/mcp-toolbox-sdk-python/tree/main).
{{< tabpane persist=header >}}
{{< tab header="Core" lang="Python" >}}
import asyncio
@@ -139,69 +135,8 @@ if **name** == "**main**":
asyncio.run(main()){{< /tab >}}
{{< /tabpane >}}
#### Javascript/Typescript
Use the [JS SDK](https://github.com/googleapis/mcp-toolbox-sdk-js/tree/main).
```javascript
import { ToolboxClient } from '@toolbox-sdk/core';
async function getAuthToken() {
// ... Logic to retrieve ID token (e.g., from local storage, OAuth flow)
// This example just returns a placeholder. Replace with your actual token retrieval.
return "YOUR_ID_TOKEN" // Placeholder
}
const URL = 'http://127.0.0.1:5000';
let client = new ToolboxClient(URL);
const authTool = await client.loadTool("my-tool", {"my_auth_app_1": getAuthToken});
const result = await authTool({param:"value"});
console.log(result);
print(result)
```
#### Go
Use the [Go SDK](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main).
```go
import "github.com/googleapis/mcp-toolbox-sdk-go/core"
import "fmt"
func getAuthToken() string {
// ... Logic to retrieve ID token (e.g., from local storage, OAuth flow)
// This example just returns a placeholder. Replace with your actual token retrieval.
return "YOUR_ID_TOKEN" // Placeholder
}
func main() {
URL := 'http://127.0.0.1:5000'
client, err := core.NewToolboxClient(URL)
if err != nil {
log.Fatalf("Failed to create Toolbox client: %v", err)
}
dynamicTokenSource := core.NewCustomTokenSource(getAuthToken)
authTool, err := client.LoadTool(
"my-tool",
ctx,
core.WithAuthTokenSource("my_auth_app_1", dynamicTokenSource))
if err != nil {
log.Fatalf("Failed to load tool: %v", err)
}
inputs := map[string]any{"param": "value"}
result, err := authTool.Invoke(ctx, inputs)
if err != nil {
log.Fatalf("Failed to invoke tool: %v", err)
}
fmt.Println(result)
}
```
### Specifying tokens for existing tools
#### Python
Use the [Python SDK](https://github.com/googleapis/mcp-toolbox-sdk-python/tree/main).
{{< tabpane persist=header >}}
{{< tab header="Core" lang="Python" >}}
tools = await toolbox.load_toolset()
@@ -247,57 +182,4 @@ authorized_tool = tools[0].add_auth_token_getters({
{{< /tab >}}
{{< /tabpane >}}
#### Javascript/Typescript
Use the [JS SDK](https://github.com/googleapis/mcp-toolbox-sdk-js/tree/main).
```javascript
const URL = 'http://127.0.0.1:5000';
let client = new ToolboxClient(URL);
let tool = await client.loadTool("my-tool")
// for a single token
const authorizedTool = tool.addAuthTokenGetter("my_auth", get_auth_token)
// OR, if multiple tokens are needed
const multiAuthTool = tool.addAuthTokenGetters({
"my_auth_1": getAuthToken1,
"my_auth_2": getAuthToken2,
})
```
#### Go
Use the [Go SDK](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main).
```go
import "github.com/googleapis/mcp-toolbox-sdk-go/core"
func main() {
URL := 'http://127.0.0.1:5000'
client, err := core.NewToolboxClient(URL)
if err != nil {
log.Fatalf("Failed to create Toolbox client: %v", err)
}
tool, err := client.LoadTool("my-tool", ctx))
if err != nil {
log.Fatalf("Failed to load tool: %v", err)
}
dynamicTokenSource1 := core.NewCustomTokenSource(getAuthToken1)
dynamicTokenSource2 := core.NewCustomTokenSource(getAuthToken1)
// For a single token
authTool, err := tool.ToolFrom(
core.WithAuthTokenSource("my-auth", dynamicTokenSource),
)
// OR, if multiple tokens are needed
authTool, err := tool.ToolFrom(
core.WithAuthTokenSource("my-auth_1", dynamicTokenSource1),
core.WithAuthTokenSource("my-auth_2", dynamicTokenSource2),
)
}
```
## Kinds of Auth Services

View File

@@ -33,7 +33,7 @@ ID.
When using [Authenticated Parameters][auth-params], any [claim provided by the
id-token][provided-claims] can be used for the parameter.
[auth-params]: ../tools/#authenticated-parameters
[auth-params]: ../tools/#authenticated-phugarameters
[provided-claims]:
https://developers.google.com/identity/openid-connect/openid-connect#obtaininguserprofileinformation

View File

@@ -33,14 +33,6 @@ cluster][alloydb-free-trial].
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
Run parameterized SQL statements in AlloyDB Postgres.
### Pre-built Configurations
- [AlloyDB using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/alloydb_pg_mcp/)
Connect your IDE to AlloyDB using Toolbox.
- [AlloyDB Admin API using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/alloydb_pg_admin_mcp/)
Create your AlloyDB database with MCP Toolbox.
## Requirements
### IAM Permissions

View File

@@ -36,15 +36,12 @@ avoiding full table scans or complex filters.
## Available Tools
- [`bigquery-conversational-analytics`](../tools/bigquery/bigquery-conversational-analytics.md)
Allows conversational interaction with a BigQuery source.
- [`bigquery-sql`](../tools/bigquery/bigquery-sql.md)
Run SQL queries directly against BigQuery datasets.
- [`bigquery-execute-sql`](../tools/bigquery/bigquery-execute-sql.md)
Execute structured queries using parameters.
- [`bigquery-forecast`](../tools/bigquery/bigquery-forecast.md)
Forecasts time series data in BigQuery.
- [`bigquery-get-dataset-info`](../tools/bigquery/bigquery-get-dataset-info.md)
Retrieve metadata for a specific dataset.
@@ -57,78 +54,40 @@ avoiding full table scans or complex filters.
- [`bigquery-list-table-ids`](../tools/bigquery/bigquery-list-table-ids.md)
List tables in a given dataset.
- [`bigquery-sql`](../tools/bigquery/bigquery-sql.md)
Run SQL queries directly against BigQuery datasets.
### Pre-built Configurations
- [BigQuery using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/bigquery_mcp/)
Connect your IDE to BigQuery using Toolbox.
## Requirements
### IAM Permissions
BigQuery uses [Identity and Access Management (IAM)][iam-overview] to control
user and group access to BigQuery resources like projects, datasets, and tables.
Toolbox will use your [Application Default Credentials (ADC)][adc] to authorize
and authenticate when interacting with [BigQuery][bigquery-docs].
### Authentication via Application Default Credentials (ADC)
By **default**, Toolbox will use your [Application Default Credentials (ADC)][adc] to authorize and authenticate when interacting with [BigQuery][bigquery-docs].
When using this method, you need to ensure the IAM identity associated with your
ADC (such as a service account) has the correct permissions for the queries you
intend to run. Common roles include `roles/bigquery.user` (which includes
permissions to run jobs and read data) or `roles/bigbigquery.dataViewer`.
Follow this [guide][set-adc] to set up your ADC.
### Authentication via User's OAuth Access Token
If the `useClientOAuth` parameter is set to `true`, Toolbox will instead use the
OAuth access token for authentication. This token is parsed from the
`Authorization` header passed in with the tool invocation request. This method
allows Toolbox to make queries to [BigQuery][bigquery-docs] on behalf of the
client or the end-user.
When using this on-behalf-of authentication, you must ensure that the
identity used has been granted the correct IAM permissions. Currently,
this option is only supported by the following BigQuery tools:
- [`bigquery-sql`](../tools/bigquery/bigquery-sql.md)
Run SQL queries directly against BigQuery datasets.
In addition to [setting the ADC for your server][set-adc], you need to ensure
the IAM identity has been given the correct IAM permissions for the queries
you intend to run. Common roles include `roles/bigquery.user` (which includes
permissions to run jobs and read data) or `roles/bigquery.dataViewer`. See
[Introduction to BigQuery IAM][grant-permissions] for more information on
applying IAM permissions and roles to an identity.
[iam-overview]: https://cloud.google.com/bigquery/docs/access-control
[adc]: https://cloud.google.com/docs/authentication#adc
[set-adc]: https://cloud.google.com/docs/authentication/provide-credentials-adc
[grant-permissions]: https://cloud.google.com/bigquery/docs/access-control
## Example
Initialize a BigQuery source that uses ADC:
```yaml
sources:
my-bigquery-source:
kind: "bigquery"
project: "my-project-id"
# location: "US" # Optional: Specifies the location for query jobs.
```
Initialize a BigQuery source that uses the client's access token:
```yaml
sources:
my-bigquery-client-auth-source:
kind: "bigquery"
project: "my-project-id"
useClientOAuth: true
# location: "US" # Optional: Specifies the location for query jobs.
```
## Reference
| **field** | **type** | **required** | **description** |
|----------------|:--------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "bigquery". |
| project | string | true | Id of the Google Cloud project to use for billing and as the default project for BigQuery resources. |
| location | string | false | Specifies the location (e.g., 'us', 'asia-northeast1') in which to run the query job. This location must match the location of any tables referenced in the query. Defaults to the table's location or 'US' if the location cannot be determined. [Learn More](https://cloud.google.com/bigquery/docs/locations) |
| useClientOAuth | bool | false | If true, forwards the client's OAuth access token from the "Authorization" header to downstream queries. |
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|-------------------------------------------------------------------------------|
| kind | string | true | Must be "bigquery". |
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
| location | string | false | Specifies the location (e.g., 'us', 'asia-northeast1') in which to run the query job. This location must match the location of any tables referenced in the query. The default behavior is for it to be executed in the US multi-region |

View File

@@ -1,91 +0,0 @@
---
title: "ClickHouse"
type: docs
weight: 1
description: >
ClickHouse is an open-source, OLTP database.
---
## About
[ClickHouse][clickhouse-docs] is a fast, open-source, column-oriented database
[clickhouse-docs]: https://clickhouse.com/docs
## Available Tools
- [`clickhouse-execute-sql`](../tools/clickhouse/clickhouse-execute-sql.md)
Execute parameterized SQL queries in ClickHouse with query logging.
- [`clickhouse-sql`](../tools/clickhouse/clickhouse-sql.md)
Execute SQL queries as prepared statements in ClickHouse.
## Requirements
### Database User
This source uses standard ClickHouse authentication. You will need to [create a
ClickHouse user][clickhouse-users] (or with [ClickHouse Cloud][clickhouse-cloud]) to connect to the database with. The user
should have appropriate permissions for the operations you plan to perform.
[clickhouse-cloud]: https://clickhouse.com/docs/getting-started/quick-start/cloud#connect-with-your-app
[clickhouse-users]: https://clickhouse.com/docs/en/sql-reference/statements/create/user
### Network Access
ClickHouse supports multiple protocols:
- **HTTPS protocol** (default port 8443) - Secure HTTP access (default)
- **HTTP protocol** (default port 8123) - Good for web-based access
## Example
### Secure Connection Example
```yaml
sources:
secure-clickhouse-source:
kind: clickhouse
host: clickhouse.example.com
port: "8443"
database: analytics
user: ${CLICKHOUSE_USER}
password: ${CLICKHOUSE_PASSWORD}
protocol: https
secure: true
```
### HTTP Protocol Example
```yaml
sources:
http-clickhouse-source:
kind: clickhouse
host: localhost
port: "8123"
database: logs
user: ${CLICKHOUSE_USER}
password: ${CLICKHOUSE_PASSWORD}
protocol: http
secure: false
```
{{< notice tip >}}
Use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|------------------------------------------------------------------------------------|
| kind | string | true | Must be "clickhouse". |
| host | string | true | IP address or hostname to connect to (e.g. "127.0.0.1" or "clickhouse.example.com") |
| port | string | true | Port to connect to (e.g. "8443" for HTTPS, "8123" for HTTP) |
| database | string | true | Name of the ClickHouse database to connect to (e.g. "my_database"). |
| user | string | true | Name of the ClickHouse user to connect as (e.g. "analytics_user"). |
| password | string | false | Password of the ClickHouse user (e.g. "my-password"). |
| protocol | string | false | Connection protocol: "https" (default) or "http". |
| secure | boolean | false | Whether to use a secure connection (TLS). Default: false. |

View File

@@ -27,11 +27,6 @@ to a database by following these instructions][csql-mssql-connect].
- [`mssql-execute-sql`](../tools/mssql/mssql-execute-sql.md)
Run parameterized SQL Server queries in Cloud SQL for SQL Server.
### Pre-built Configurations
- [Cloud SQL for SQL Server using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_mssql_mcp/)
Connect your IDE to Cloud SQL for SQL Server using Toolbox.
## Requirements
### IAM Permissions

View File

@@ -28,11 +28,6 @@ to a database by following these instructions][csql-mysql-quickstart].
- [`mysql-execute-sql`](../tools/mysql/mysql-execute-sql.md)
Run parameterized SQL queries in Cloud SQL for MySQL.
### Pre-built Configurations
- [Cloud SQL for MySQL using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_mysql_mcp/)
Connect your IDE to Cloud SQL for MySQL using Toolbox.
## Requirements
### IAM Permissions

View File

@@ -28,12 +28,6 @@ to a database by following these instructions][csql-pg-quickstart].
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
Run parameterized SQL statements in PostgreSQL.
### Pre-built Configurations
- [Cloud SQL for Postgres using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_pg_mcp/)
Connect your IDE to Cloud SQL for Postgres using Toolbox.
## Requirements
### IAM Permissions

View File

@@ -22,17 +22,13 @@ allowing tools to execute SQL queries against it.
sources:
my-couchbase-instance:
kind: couchbase
connectionString: couchbase://localhost
connectionString: couchbase://localhost:8091
bucket: travel-sample
scope: inventory
username: Administrator
password: password
```
{{< notice note >}}
For more details about alternate addresses and custom ports refer to [Managing Connections](https://docs.couchbase.com/java-sdk/current/howtos/managing-connections.html).
{{< /notice >}}
## Reference
| **field** | **type** | **required** | **description** |

View File

@@ -218,45 +218,22 @@ Search syntax supports the following qualifiers:
- "label.foo" - Matches BigQuery resources that have a label whose key equals foo as a string.
- "type=TYPE" - Matches resources of a specific entry type or its type alias.
- "projectid:bar" - Matches resources within Google Cloud projects that match bar as a substring in the ID.
- "parent:x" - Matches x as a substring of the hierarchical path of a resource. It supports same syntax as `name` predicate.
- "parent:x" - Matches x as a substring of the hierarchical path of a resource. The parent path is a fully_qualified_name of the parent resource.
- "orgid=number" - Matches resources within a Google Cloud organization with the exact ID value of the number.
- "system=SYSTEM" - Matches resources from a specified system. For example, system=bigquery matches BigQuery resources.
- "location=LOCATION" - Matches resources in a specified location with an exact name. For example, location=us-central1 matches assets hosted in Iowa. BigQuery Omni assets support this qualifier by using the BigQuery Omni location name. For example, location=aws-us-east-1 matches BigQuery Omni assets in Northern Virginia.
- "createtime" -
Finds resources that were created within, before, or after a given date or time. For example "createtime:2019-01-01" matches resources created on 2019-01-01.
- "updatetime" - Finds resources that were updated within, before, or after a given date or time. For example "updatetime>2019-01-01" matches resources updated after 2019-01-01.
### Aspect Search
To search for entries based on their attached aspects, use the following query syntax.
aspect:x Matches x as a substring of the full path to the aspect type of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID
aspect=x Matches x as the full path to the aspect type of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID
aspect:xOPERATORvalue
Searches for aspect field values. Matches x as a substring of the full path to the aspect type and field name of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID.FIELD_NAME
The list of supported {OPERATOR}s depends on the type of field in the aspect, as follows:
- String: = (exact match) and : (substring)
- All number types: =, :, <, >, <=, >=, =>, =<
- Enum: =
- Datetime: same as for numbers, but the values to compare are treated as datetimes instead of numbers
- Boolean: =
Only top-level fields of the aspect are searchable. For example, all of the following queries match entries where the value of the is-enrolled field in the employee-info aspect type is true. Other entries that match on the substring are also returned.
- aspect:example-project.us-central1.employee-info.is-enrolled=true
- aspect:example-project.us-central1.employee=true
- aspect:employee=true
Example:-
You can use following filters
- dataplex-types.global.bigquery-table.type={BIGLAKE_TABLE, BIGLAKE_OBJECT_TABLE, EXTERNAL_TABLE, TABLE}
- dataplex-types.global.storage.type={STRUCTURED, UNSTRUCTURED}
- "fully_qualified_name:x" - Matches x as a substring of fully_qualified_name.
- "fully_qualified_name=x" - Matches x as fully_qualified_name.
### Logical operators
A query can consist of several predicates with logical operators. If you don't specify an operator, logical AND is implied. For example, foo bar returns resources that match both predicate foo and predicate bar.
Logical AND and logical OR are supported. For example, foo OR bar.
You can negate a predicate with a - (hyphen) or NOT prefix. For example, -name:foo returns resources with names that don't match the predicate foo.
Logical operators are case-sensitive. `OR` and `AND` are acceptable whereas `or` and `and` are not.
Logical operators aren't case-sensitive. For example, both or and OR are acceptable.
### Request
1. Always try to rewrite the prompt using search syntax.
@@ -310,7 +287,7 @@ Logical operators are case-sensitive. `OR` and `AND` are acceptable whereas `or`
## Tool: dataplex_lookup_entry
### Request
1. Always try to limit the size of the response by specifying `aspect_types` parameter. Make sure to include to select view=CUSTOM when using aspect_types parameter. If you do not know the name of the aspect type, use the `dataplex_search_aspect_types` tool.
1. Always try to limit the size of the response by specifying `aspect_types` parameter. Make sure to include to select view=CUSTOM when using aspect_types parameter.
2. If you do not know the name of the entry, use `dataplex_search_entries` tool
### Response
1. Unless asked for a specific aspect, respond with all aspects attached to the entry.
@@ -321,4 +298,4 @@ Logical operators are case-sensitive. `OR` and `AND` are acceptable whereas `or`
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|----------------------------------------------------------------------------------|
| kind | string | true | Must be "dataplex". |
| project | string | true | ID of the GCP project used for quota and billing purposes (e.g. "my-project-id").|
| project | string | true | ID of the GCP project used for quota and billing purposes (e.g. "my-project-id").|

View File

@@ -0,0 +1,73 @@
---
title: DuckDB
linkTitle: DuckDB
type: docs
weight: 1
description: >
DuckDB is an in-process SQL OLAP database management system designed for analytical query processing.
---
## About
[DuckDB](https://duckdb.org/) is an embedded analytical database management system that runs in-process with the client application. It is optimized for analytical workloads, providing high performance for complex queries with minimal setup.
DuckDB has the following notable characteristics:
- In-process, serverless database engine
- Supports complex SQL queries for analytical processing
- Can operate on in-memory or persistent storage
- Zero-configuration - no external dependencies or server setup required
- Highly optimized for columnar data storage and query execution
For more details, refer to the [DuckDB Documentation](https://duckdb.org/).
## Available Tools
- [`duckdb-sql`](../tools/duckdb/duckdb-sql.md)
Execute pre-defined prepared SQL queries in DuckDB.
## Requirements
### Database File
To use DuckDB, you can either:
- Specify a file path for a persistent database stored on the filesystem
- Omit the file path to use an in-memory database
## Example
For a persistent DuckDB database:
```yaml
sources:
my-duckdb:
kind: "duckdb"
dbFilePath: "/path/to/database.db"
configuration:
memory_limit: "2GB"
threads: "4"
```
For an in-memory DuckDB database:
```yaml
sources:
my-duckdb-memory:
name: "my-duckdb-memory"
kind: "duckdb"
```
## Reference
### Configuration Fields
| **field** | **type** | **required** | **description** |
|-------------------|:-----------------:|:------------:|---------------------------------------------------------------------------------|
| kind | string | true | Must be "duckdb". |
| dbFilePath | string | false | Path to the DuckDB database file. Omit for an in-memory database. |
| configuration | map[string]string | false | Additional DuckDB configuration options (e.g., `memory_limit`, `threads`). |
For a complete list of available configuration options, refer to the [DuckDB Configuration Documentation](https://duckdb.org/docs/stable/configuration/overview.html#local-configuration-options).
For more details on the Go implementation, see the [go-duckdb package documentation](https://pkg.go.dev/github.com/scottlepp/go-duckdb#section-readme).

View File

@@ -1,59 +0,0 @@
---
title: "Firebird"
type: docs
weight: 1
description: >
Firebird is a powerful, cross-platform, and open-source relational database.
---
## About
[Firebird][fb-docs] is a relational database management system offering many ANSI SQL standard features that runs on Linux, Windows, and a variety of Unix platforms. It is known for its small footprint, powerful features, and easy maintenance.
[fb-docs]: https://firebirdsql.org/
## Available Tools
- [`firebird-sql`](../tools/firebird/firebird-sql.md)
Execute SQL queries as prepared statements in Firebird.
- [`firebird-execute-sql`](../tools/firebird/firebird-execute-sql.md)
Run parameterized SQL statements in Firebird.
## Requirements
### Database User
This source uses standard authentication. You will need to [create a Firebird user][fb-users] to login to the database with.
[fb-users]: https://firebirdsql.org/refdocs/langrefupd25-sql-create-user.html
## Example
```yaml
sources:
my_firebird_db:
kind: firebird
host: "localhost"
port: 3050
database: "/path/to/your/database.fdb"
user: ${FIREBIRD_USER}
password: ${FIREBIRD_PASS}
```
{{< notice tip >}}
Use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}
## Reference
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|------------------------------------------------------------------------|
| kind | string | true | Must be "firebird". |
| host | string | true | IP address to connect to (e.g. "127.0.0.1") |
| port | string | true | Port to connect to (e.g. "3050") |
| database | string | true | Path to the Firebird database file (e.g. "/var/lib/firebird/data/test.fdb"). |
| user | string | true | Name of the Firebird user to connect as (e.g. "SYSDBA"). |
| password | string | true | Password of the Firebird user (e.g. "masterkey"). |

View File

@@ -39,16 +39,15 @@ sources:
The Looker base url will look like "https://looker.example.com", don't include
a trailing "/". In some cases, especially if your Looker is deployed
on-premises, you may need to add the API port number like
on-premises, you may need to add the API port numner like
"https://looker.example.com:19999".
Verify ssl should almost always be "true" (all lower case) unless you are using
a self-signed ssl certificate for the Looker server. Anything other than "true"
will be interpreted as false.
will be interpretted as false.
The client id and client secret are seemingly random character sequences
assigned by the looker server. If you are using Looker OAuth you don't need
these settings
assigned by the looker server.
{{< notice tip >}}
Use environment variable replacement with the format ${ENV_NAME}
@@ -57,15 +56,11 @@ instead of hardcoding your secrets into the configuration file.
## Reference
| **field** | **type** | **required** | **description** |
| -------------------- | :------: | :----------: | ----------------------------------------------------------------------------------------- |
| kind | string | true | Must be "looker". |
| base_url | string | true | The URL of your Looker server with no trailing /). |
| client_id | string | false | The client id assigned by Looker. |
| client_secret | string | false | The client secret assigned by Looker. |
| verify_ssl | string | false | Whether to check the ssl certificate of the server. |
| timeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, 120s is applied. |
| use_client_oauth | string | false | Use OAuth tokens instead of client_id and client_secret. (default: false) |
| show_hidden_models | string | false | Show or hide hidden models. (default: true) |
| show_hidden_explores | string | false | Show or hide hidden explores. (default: true) |
| show_hidden_fields | string | false | Show or hide hidden fields. (default: true) |
| **field** | **type** | **required** | **description** |
| ------------- | :------: | :----------: | ----------------------------------------------------------------------------------------- |
| kind | string | true | Must be "looker". |
| base_url | string | true | The URL of your Looker server with no trailing /). |
| client_id | string | true | The client id assigned by Looker. |
| client_secret | string | true | The client secret assigned by Looker. |
| verify_ssl | string | true | Whether to check the ssl certificate of the server. |
| timeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, 120s is applied. |

View File

@@ -20,8 +20,9 @@ flexible, JSON-like documents, making it easy to develop and scale applications.
sources:
my-mongodb:
kind: mongodb
uri: "mongodb+srv://username:password@host.mongodb.net"
uri: "mongodb+srv://username:password@host.mongodb.net"
database: sample_mflix
```
## Reference
@@ -30,3 +31,4 @@ sources:
|-----------|:--------:|:------------:|-------------------------------------------------------------------|
| kind | string | true | Must be "mongodb". |
| uri | string | true | connection string to connect to MongoDB |
| database | string | true | Name of the mongodb database to connect to (e.g. "sample_mflix"). |

View File

@@ -42,9 +42,6 @@ sources:
database: my_db
user: ${USER_NAME}
password: ${PASSWORD}
# Optional TLS and other driver parameters. For example, enable preferred TLS:
# queryParams:
# tls: preferred
queryTimeout: 30s # Optional: query timeout duration
```
@@ -64,4 +61,3 @@ instead of hardcoding your secrets into the configuration file.
| user | string | true | Name of the MySQL user to connect as (e.g. "my-mysql-user"). |
| password | string | true | Password of the MySQL user (e.g. "my-password"). |
| queryTimeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, no timeout is applied. |
| queryParams | map<string,string> | false | Arbitrary DSN parameters passed to the driver (e.g. `tls: preferred`, `charset: utf8mb4`). Useful for enabling TLS or other connection options. |

View File

@@ -1,72 +0,0 @@
---
title: "OceanBase"
type: docs
weight: 1
description: >
OceanBase is a distributed relational database that provides high availability, scalability, and compatibility with MySQL.
---
## About
[OceanBase][oceanbase-docs] is a distributed relational database management system (RDBMS) that provides high availability, scalability, and strong consistency. It's designed to handle large-scale data processing and is compatible with MySQL, making it easy for developers to migrate from MySQL to OceanBase.
[oceanbase-docs]: https://www.oceanbase.com/
## Requirements
### Database User
This source only uses standard authentication. You will need to create an OceanBase user to login to the database with. OceanBase supports MySQL-compatible user management syntax.
### Network Connectivity
Ensure that your application can connect to the OceanBase cluster. OceanBase typically runs on ports 2881 (for MySQL protocol) or 3881 (for MySQL protocol with SSL).
## Example
```yaml
sources:
my-oceanbase-source:
kind: oceanbase
host: 127.0.0.1
port: 2881
database: my_db
user: ${USER_NAME}
password: ${PASSWORD}
queryTimeout: 30s # Optional: query timeout duration
```
{{< notice tip >}}
Use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}
## Reference
| **field** | **type** | **required** | **description** |
| ------------ | :------: | :----------: |-------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "oceanbase". |
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
| port | string | true | Port to connect to (e.g. "2881"). |
| database | string | true | Name of the OceanBase database to connect to (e.g. "my_db"). |
| user | string | true | Name of the OceanBase user to connect as (e.g. "my-oceanbase-user"). |
| password | string | true | Password of the OceanBase user (e.g. "my-password"). |
| queryTimeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, no timeout is applied. |
## Features
### MySQL Compatibility
OceanBase is highly compatible with MySQL, supporting most MySQL SQL syntax, data types, and functions. This makes it easy to migrate existing MySQL applications to OceanBase.
### High Availability
OceanBase provides automatic failover and data replication across multiple nodes, ensuring high availability and data durability.
### Scalability
OceanBase can scale horizontally by adding more nodes to the cluster, making it suitable for large-scale applications.
### Strong Consistency
OceanBase provides strong consistency guarantees, ensuring that all transactions are ACID compliant.

View File

@@ -23,11 +23,6 @@ reputation for reliability, feature robustness, and performance.
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
Run parameterized SQL statements in PostgreSQL.
### Pre-built Configurations
- [PostgreSQL using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/postgres_mcp/)
Connect your IDE to PostgreSQL using Toolbox.
## Requirements
### Database User
@@ -57,12 +52,11 @@ instead of hardcoding your secrets into the configuration file.
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------:|:------------:|------------------------------------------------------------------------|
| kind | string | true | Must be "postgres". |
| host | string | true | IP address to connect to (e.g. "127.0.0.1") |
| port | string | true | Port to connect to (e.g. "5432") |
| database | string | true | Name of the Postgres database to connect to (e.g. "my_db"). |
| user | string | true | Name of the Postgres user to connect as (e.g. "my-pg-user"). |
| password | string | true | Password of the Postgres user (e.g. "my-password"). |
| queryParams | map[string]string | false | Raw query to be added to the db connection string. |
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|------------------------------------------------------------------------|
| kind | string | true | Must be "postgres". |
| host | string | true | IP address to connect to (e.g. "127.0.0.1") |
| port | string | true | Port to connect to (e.g. "5432") |
| database | string | true | Name of the Postgres database to connect to (e.g. "my_db"). |
| user | string | true | Name of the Postgres user to connect as (e.g. "my-pg-user"). |
| password | string | true | Password of the Postgres user (e.g. "my-password"). |

View File

@@ -4,13 +4,13 @@ linkTitle: "Redis"
type: docs
weight: 1
description: >
Redis is a in-memory data structure store.
Redis is an open-source, in-memory data structure store.
---
## About
Redis is a in-memory data structure store, used as a database,
Redis is an open-source, in-memory data structure store, used as a database,
cache, and message broker. It supports data structures such as strings, hashes,
lists, sets, sorted sets with range queries, bitmaps, hyperloglogs, and
geospatial indexes with radius queries.

View File

@@ -31,11 +31,6 @@ the Google Cloud console][spanner-quickstart].
- [`spanner-execute-sql`](../tools/spanner/spanner-execute-sql.md)
Run structured and parameterized queries on Spanner.
### Pre-built Configurations
- [Spanner using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/spanner_mcp/)
Connect your IDE to Spanner using Toolbox.
## Requirements
### IAM Permissions

View File

@@ -61,7 +61,7 @@ sources:
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "sqlite". |
| kind | string | true | Must be "spanner". |
| database | string | true | Path to SQLite database file, or ":memory:" for an in-memory database. |
### Connection Properties

View File

@@ -1,62 +0,0 @@
---
title: "Trino"
type: docs
weight: 1
description: >
Trino is a distributed SQL query engine for big data analytics.
---
## About
[Trino][trino-docs] is a distributed SQL query engine designed for fast analytic queries against data of any size. It allows you to query data where it lives, including Hive, Cassandra, relational databases or even proprietary data stores.
[trino-docs]: https://trino.io/docs/
## Available Tools
- [`trino-sql`](../tools/trino/trino-sql.md)
Execute parameterized SQL queries against Trino.
- [`trino-execute-sql`](../tools/trino/trino-execute-sql.md)
Execute arbitrary SQL queries against Trino.
## Requirements
### Trino Cluster
You need access to a running Trino cluster with appropriate user permissions for the catalogs and schemas you want to query.
## Example
```yaml
sources:
my-trino-source:
kind: trino
host: trino.example.com
port: "8080"
user: ${TRINO_USER} # Optional for anonymous access
password: ${TRINO_PASSWORD} # Optional
catalog: hive
schema: default
```
{{< notice tip >}}
Use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------:|:------------:|------------------------------------------------------------------------|
| kind | string | true | Must be "trino". |
| host | string | true | Trino coordinator hostname (e.g. "trino.example.com") |
| port | string | true | Trino coordinator port (e.g. "8080", "8443") |
| user | string | false | Username for authentication (e.g. "analyst"). Optional for anonymous access. |
| password | string | false | Password for basic authentication |
| catalog | string | true | Default catalog to use for queries (e.g. "hive") |
| schema | string | true | Default schema to use for queries (e.g. "default") |
| queryTimeout| string | false | Query timeout duration (e.g. "30m", "1h") |
| accessToken | string | false | JWT access token for authentication |
| kerberosEnabled | boolean | false | Enable Kerberos authentication (default: false) |
| sslEnabled | boolean | false | Enable SSL/TLS (default: false) |

View File

@@ -75,12 +75,6 @@ visible to the LLM.
[alloydb-psv]: https://cloud.google.com/alloydb/docs/parameterized-secure-views-overview
{{< notice tip >}} Make sure to enable the `parameterized_views` extension before running this tool. You can do so by running this command in the AlloyDB studio:
```sql
CREATE EXTENSION IF NOT EXISTS parameterized_views;
```
{{< /notice >}}
## Example
```yaml
@@ -101,6 +95,7 @@ tools:
- name: my_google_service
field: email
```
## Reference
| **field** | **type** | **required** | **description** |

View File

@@ -1,54 +0,0 @@
---
title: "bigquery-conversational-analytics"
type: docs
weight: 1
description: >
A "bigquery-conversational-analytics" tool allows conversational interaction with a BigQuery source.
aliases:
- /resources/tools/bigquery-conversational-analytics
---
## About
A `bigquery-conversational-analytics` tool allows you to ask questions about your data in natural language.
This function takes a user's question (which can include conversational history for context)
and references to specific BigQuery tables, and sends them to a stateless conversational API.
The API uses a GenAI agent to understand the question, generate and execute SQL queries
and Python code, and formulate an answer. This function returns a detailed, sequential
log of this entire process, which includes any generated SQL or Python code, the data
retrieved, and the final text answer.
**Note**: This tool requires additional setup in your project. Please refer to the
official [Conversational Analytics API documentation](https://cloud.google.com/gemini/docs/conversational-analytics-api/overview)
for instructions.
It's compatible with the following sources:
- [bigquery](../sources/bigquery.md)
The tool takes the following input parameters:
* `user_query_with_context`: The user's question, potentially including conversation history and system instructions for context.
* `table_references`: A JSON string of a list of BigQuery tables to use as context. Each object in the list must contain `projectId`, `datasetId`, and `tableId`. Example: `'[{"projectId": "my-gcp-project", "datasetId": "my_dataset", "tableId": "my_table"}]'`
## Example
```yaml
tools:
ask_data_insights:
kind: bigquery-conversational-analytics
source: my-bigquery-source
description: |
Use this tool to perform data analysis, get insights, or answer complex
questions about the contents of specific BigQuery tables.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "bigquery-conversational-analytics". |
| source | string | true | Name of the source for chat. |
| description | string | true | Description of the tool
that is passed to the LLM. |

View File

@@ -15,9 +15,8 @@ It's compatible with the following sources:
- [bigquery](../../sources/bigquery.md)
`bigquery-execute-sql` takes a required `sql` input parameter and runs the SQL
statement against the configured `source`. It also supports an optional `dry_run`
parameter to validate a query without executing it.
`bigquery-execute-sql` takes one input parameter `sql` and runs the sql
statement against the `source`.
## Example

View File

@@ -1,49 +0,0 @@
---
title: "bigquery-forecast"
type: docs
weight: 1
description: >
A "bigquery-forecast" tool forecasts time series data in BigQuery.
aliases:
- /resources/tools/bigquery-forecast
---
## About
A `bigquery-forecast` tool forecasts time series data in BigQuery.
It's compatible with the following sources:
- [bigquery](../../sources/bigquery.md)
`bigquery-forecast` constructs and executes a `SELECT * FROM AI.FORECAST(...)` query based on the provided parameters:
- **history_data** (string, required): This specifies the source of the historical time series data. It can be either a fully qualified BigQuery table ID (e.g., my-project.my_dataset.my_table) or a SQL query that returns the data.
- **timestamp_col** (string, required): The name of the column in your history_data that contains the timestamps.
- **data_col** (string, required): The name of the column in your history_data that contains the numeric values to be forecasted.
- **id_cols** (array of strings, optional): If you are forecasting multiple time series at once (e.g., sales for different products), this parameter takes an array of column names that uniquely identify each series. It defaults to an empty array if not provided.
- **horizon** (integer, optional): The number of future time steps you want to predict. It defaults to 10 if not specified.
## Example
```yaml
tools:
forecast_tool:
kind: bigquery-forecast
source: my-bigquery-source
description: Use this tool to forecast time series data in BigQuery.
```
## Sample Prompt
You can use the following sample prompts to call this tool:
- Can you forecast the history time series data in bigquery table `bqml_tutorial.google_analytic`? Use project_id `myproject`.
- What are the future `total_visits` in bigquery table `bqml_tutorial.google_analytic`?
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "bigquery-forecast". |
| source | string | true | Name of the source the forecast tool should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -15,7 +15,7 @@ It's compatible with the following sources:
- [bigquery](../../sources/bigquery.md)
`bigquery-list-table-ids` takes a required `dataset` parameter to specify the dataset
`bigquery-get-dataset-info` takes a required `dataset` parameter to specify the dataset
from which to list table IDs. It also optionally accepts a `project` parameter to
define the Google Cloud project ID. If the `project` parameter is not provided, the
tool defaults to using the project defined in the source configuration.

View File

@@ -77,7 +77,7 @@ tools:
> including identifiers, column names, and table names. **This makes it more
> vulnerable to SQL injections**. Using basic parameters only (see above) is
> recommended for performance and safety reasons. For more details, please check
> [templateParameters](..#template-parameters).
> [templateParameters](#template-parameters).
```yaml
tools:
@@ -107,7 +107,7 @@ tools:
| description | string | true | Description of the tool that is passed to the LLM. |
| statement | string | true | SQL statement to execute on. |
| parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the SQL statement. |
| templateParameters | [templateParameters](..#template-parameters) | false | List of [templateParameters](..#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
| templateParameters | [templateParameters](#template-parameters) | false | List of [templateParameters](#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
## Tips

View File

@@ -1,7 +0,0 @@
---
title: "ClickHouse"
type: docs
weight: 1
description: >
Tools for interacting with ClickHouse databases and tables.
---

View File

@@ -1,46 +0,0 @@
---
title: "clickhouse-execute-sql"
type: docs
weight: 1
description: >
A "clickhouse-execute-sql" tool executes a SQL statement against a ClickHouse
database.
aliases:
- /resources/tools/clickhouse-execute-sql
---
## About
A `clickhouse-execute-sql` tool executes a SQL statement against a ClickHouse
database. It's compatible with the [clickhouse](../../sources/clickhouse.md) source.
`clickhouse-execute-sql` takes one input parameter `sql` and runs the SQL
statement against the specified `source`. This tool includes query logging
capabilities for monitoring and debugging purposes.
> **Note:** This tool is intended for developer assistant workflows with
> human-in-the-loop and shouldn't be used for production agents.
## Example
```yaml
tools:
execute_sql_tool:
kind: clickhouse-execute-sql
source: my-clickhouse-instance
description: Use this tool to execute SQL statements against ClickHouse.
```
## Parameters
| **parameter** | **type** | **required** | **description** |
|---------------|:--------:|:------------:|----------------------------------------------------|
| sql | string | true | The SQL statement to execute against the database |
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|---------------------------------------------------------|
| kind | string | true | Must be "clickhouse-execute-sql". |
| source | string | true | Name of the ClickHouse source to execute SQL against. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,81 +0,0 @@
---
title: "clickhouse-sql"
type: docs
weight: 2
description: >
A "clickhouse-sql" tool executes SQL queries as prepared statements in ClickHouse.
aliases:
- /resources/tools/clickhouse-sql
---
## About
A `clickhouse-sql` tool executes SQL queries as prepared statements against a
ClickHouse database. It's compatible with the [clickhouse](../../sources/clickhouse.md) source.
This tool supports both template parameters (for SQL statement customization)
and regular parameters (for prepared statement values), providing flexible
query execution capabilities.
## Example
```yaml
tools:
my_analytics_query:
kind: clickhouse-sql
source: my-clickhouse-instance
description: Get user analytics for a specific date range
statement: |
SELECT
user_id,
count(*) as event_count,
max(timestamp) as last_event
FROM events
WHERE date >= ? AND date <= ?
GROUP BY user_id
ORDER BY event_count DESC
LIMIT ?
parameters:
- name: start_date
description: Start date for the query (YYYY-MM-DD format)
- name: end_date
description: End date for the query (YYYY-MM-DD format)
- name: limit
description: Maximum number of results to return
```
## Template Parameters Example
```yaml
tools:
flexible_table_query:
kind: clickhouse-sql
source: my-clickhouse-instance
description: Query any table with flexible columns
statement: |
SELECT {{columns}}
FROM {{table_name}}
WHERE created_date >= ?
LIMIT ?
templateParameters:
- name: columns
description: Comma-separated list of columns to select
- name: table_name
description: Name of the table to query
parameters:
- name: start_date
description: Start date filter
- name: limit
description: Maximum number of results
```
## Reference
| **field** | **type** | **required** | **description** |
|--------------------|:------------------:|:------------:|-----------------------------------------------------------|
| kind | string | true | Must be "clickhouse-sql". |
| source | string | true | Name of the ClickHouse source to execute SQL against. |
| description | string | true | Description of the tool that is passed to the LLM. |
| statement | string | true | The SQL statement template to execute. |
| parameters | array of Parameter | false | Parameters for prepared statement values. |
| templateParameters | array of Parameter | false | Parameters for SQL statement template customization. |

View File

@@ -66,7 +66,7 @@ tools:
> including identifiers, column names, and table names. **This makes it more
> vulnerable to SQL injections**. Using basic parameters only (see above) is
> recommended for performance and safety reasons. For more details, please check
> [templateParameters](..#template-parameters).
> [templateParameters](#template-parameters).
```yaml
tools:
@@ -96,5 +96,5 @@ tools:
| description | string | true | Description of the tool that is passed to the LLM. |
| statement | string | true | SQL statement to execute |
| parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be used with the SQL statement. |
| templateParameters | [templateParameters](..#template-parameters) | false | List of [templateParameters](..#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
| templateParameters | [templateParameters](#template-parameters) | false | List of [templateParameters](#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
| authRequired | array[string] | false | List of auth services that are required to use this tool. |

View File

@@ -1,62 +0,0 @@
---
title: "dataplex-search-aspect-types"
type: docs
weight: 1
description: >
A "dataplex-search-aspect-types" tool allows to to find aspect types relevant to the query.
aliases:
- /resources/tools/dataplex-search-aspect-types
---
## About
A `dataplex-search-aspect-types` tool allows to fetch the metadata template of aspect types based on search query.
It's compatible with the following sources:
- [dataplex](../../sources/dataplex.md)
`dataplex-search-aspect-types` accepts following parameters optionally:
- `query` - Narrows down the search of aspect types to value of this parameter. If not provided, it fetches all aspect types available to the user.
- `pageSize` - Number of returned aspect types in the search page. Defaults to `5`.
- `orderBy` - Specifies the ordering of results. Supported values are: relevance (default), last_modified_timestamp, last_modified_timestamp asc.
## Requirements
### IAM Permissions
Dataplex uses [Identity and Access Management (IAM)][iam-overview] to control
user and group access to Dataplex resources. Toolbox will use your
[Application Default Credentials (ADC)][adc] to authorize and authenticate when
interacting with [Dataplex][dataplex-docs].
In addition to [setting the ADC for your server][set-adc], you need to ensure
the IAM identity has been given the correct IAM permissions for the tasks you
intend to perform. See [Dataplex Universal Catalog IAM permissions][iam-permissions]
and [Dataplex Universal Catalog IAM roles][iam-roles] for more information on
applying IAM permissions and roles to an identity.
[iam-overview]: https://cloud.google.com/dataplex/docs/iam-and-access-control
[adc]: https://cloud.google.com/docs/authentication#adc
[set-adc]: https://cloud.google.com/docs/authentication/provide-credentials-adc
[iam-permissions]: https://cloud.google.com/dataplex/docs/iam-permissions
[iam-roles]: https://cloud.google.com/dataplex/docs/iam-roles
[dataplex-docs]: https://cloud.google.com/dataplex
## Example
```yaml
tools:
dataplex-search-aspect-types:
kind: dataplex-search-aspect-types
source: my-dataplex-source
description: Use this tool to find aspect types relevant to the query.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "dataplex-search-aspect-types". |
| source | string | true | Name of the source the tool should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -17,11 +17,22 @@ It's compatible with the following sources:
- [dataplex](../../sources/dataplex.md)
`dataplex-search-entries` takes a required `query` parameter based on which
entries are filtered and returned to the user. It also optionally accepts following parameters:
entries are filtered and returned to the user and a required `name` parameter
which is constructed using source's project if user does not provide it
explicitly and has the following format: projects/{project}/locations/global. It
also optionally accepts following parameters:
- `pageSize` - Number of results in the search page. Defaults to `5`.
- `pageToken` - Page token received from a previous locations.searchEntries
call.
- `orderBy` - Specifies the ordering of results. Supported values are: relevance
(default), last_modified_timestamp, last_modified_timestamp asc.
(default), last_modified_timestamp, last_modified_timestamp asc
- `semanticSearch` - Specifies whether the search should understand the meaning
and intent behind the query, rather than just matching keywords. Defaults to
`true`.
- `scope` - The scope under which the search should be operating. Since this
parameter is not exposed to the toolbox user, it defaults to the organization
where the project provided in name is located.
## Requirements

View File

@@ -0,0 +1,7 @@
---
title: "DuckDB"
type: docs
weight: 1
description: >
Tools that work with DuckDB Sources.
---

View File

@@ -0,0 +1,80 @@
---
title: "duckdb-sql"
type: docs
weight: 1
description: >
Execute SQL statements against a DuckDB database using the DuckDB SQL tools configuration.
aliases:
- /resources/tools/duckdb-sql
---
## About
A `duckdb-sql` tool executes a pre-defined SQL statement against a [DuckDB](https://duckdb.org/) database. It is compatible with any DuckDB source configuration as defined in the [DuckDB source documentation](../../sources/duckdb.md).
The specified SQL statement is executed as a prepared statement, and parameters are inserted according to their position: e.g., `$1` is the first parameter, `$2` is the second, and so on. If template parameters are included, they are resolved before execution of the prepared statement.
DuckDB's SQL dialect closely follows the conventions of the PostgreSQL dialect, with a few exceptions listed in the [DuckDB PostgreSQL Compatibility documentation](https://duckdb.org/docs/stable/sql/dialect/postgresql_compatibility.html). For an introduction to DuckDB's SQL dialect, refer to the [DuckDB SQL Introduction](https://duckdb.org/docs/stable/sql/introduction).
### Concepts
DuckDB is a relational database management system (RDBMS). Data is stored in relations (tables), where each table is a named collection of rows. Each row in a table has the same set of named columns, each with a specific data type. Tables are stored within schemas, and a collection of schemas constitutes the entire database.
For more details, see the [DuckDB SQL Introduction](https://duckdb.org/docs/stable/sql/introduction).
## Example
> **Note:** This tool uses parameterized queries to prevent SQL injections. Query parameters can be used as substitutes for arbitrary expressions but cannot be used for identifiers, column names, table names, or other parts of the query.
```yaml
tools:
search-users:
kind: duckdb-sql
source: my-duckdb
description: Search users by name and age
statement: SELECT * FROM users WHERE name LIKE $1 AND age >= $2
parameters:
- name: name
type: string
description: The name to search for
- name: min_age
type: integer
description: Minimum age
```
## Example with Template Parameters
> **Note:** Template parameters allow direct modifications to the SQL statement, including identifiers, column names, and table names, which makes them more vulnerable to SQL injections. Using basic parameters (see above) is recommended for performance and safety. For more details, see the [templateParameters](../#template-parameters) section.
```yaml
tools:
list_table:
kind: duckdb-sql
source: my-duckdb
statement: |
SELECT * FROM {{.tableName}};
description: |
Use this tool to list all information from a specific table.
Example:
{{
"tableName": "flights",
}}
templateParameters:
- name: tableName
type: string
description: Table to select from
```
## Reference
### Configuration Fields
| **field** | **type** | **required** | **description** |
|--------------------|:-------------------------------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "duckdb-sql". |
| source | string | true | Name of the DuckDB source configuration (see [DuckDB source documentation](../../sources/duckdb.md)). |
| description | string | true | Description of the tool that is passed to the LLM. |
| statement | string | true | The SQL statement to execute. |
| authRequired | []string | false | List of authentication requirements for the tool (if any). |
| parameters | [parameters](../#specifying-parameters) | false | List of parameters that will be inserted into the SQL statement |
| templateParameters | [templateParameters](../#template-parameters) | false | List of template parameters that will be inserted into the SQL statement before executing the prepared statement. |

View File

@@ -1,7 +0,0 @@
---
title: "Firebird"
type: docs
weight: 1
description: >
Tools that work with Firebird Sources.
---

View File

@@ -1,41 +0,0 @@
---
title: "firebird-execute-sql"
type: docs
weight: 1
description: >
A "firebird-execute-sql" tool executes a SQL statement against a Firebird
database.
aliases:
- /resources/tools/firebird-execute-sql
---
## About
A `firebird-execute-sql` tool executes a SQL statement against a Firebird
database. It's compatible with the following source:
- [firebird](../sources/firebird.md)
`firebird-execute-sql` takes one input parameter `sql` and runs the sql
statement against the `source`.
> **Note:** This tool is intended for developer assistant workflows with
> human-in-the-loop and shouldn't be used for production agents.
## Example
```yaml
tools:
execute_sql_tool:
kind: firebird-execute-sql
source: my_firebird_db
description: Use this tool to execute a SQL statement against the Firebird database.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "firebird-execute-sql". |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,135 +0,0 @@
---
title: "firebird-sql"
type: docs
weight: 1
description: >
A "firebird-sql" tool executes a pre-defined SQL statement against a Firebird
database.
aliases:
- /resources/tools/firebird-sql
---
## About
A `firebird-sql` tool executes a pre-defined SQL statement against a Firebird
database. It's compatible with the following source:
- [firebird](../sources/firebird.md)
The specified SQL statement is executed as a [prepared statement][fb-prepare],
and supports both positional parameters (`?`) and named parameters (`:param_name`).
Parameters will be inserted according to their position or name. If template
parameters are included, they will be resolved before the execution of the
prepared statement.
[fb-prepare]: https://firebirdsql.org/refdocs/langrefupd25-psql-execstat.html
## Example
> **Note:** This tool uses parameterized queries to prevent SQL injections.
> Query parameters can be used as substitutes for arbitrary expressions.
> Parameters cannot be used as substitutes for identifiers, column names, table
> names, or other parts of the query.
```yaml
tools:
search_flights_by_number:
kind: firebird-sql
source: my_firebird_db
statement: |
SELECT * FROM flights
WHERE airline = ?
AND flight_number = ?
LIMIT 10
description: |
Use this tool to get information for a specific flight.
Takes an airline code and flight number and returns info on the flight.
Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number.
A airline code is a code for an airline service consisting of two-character
airline designator and followed by flight number, which is 1 to 4 digit number.
For example, if given CY 0123, the airline is "CY", and flight_number is "123".
Another example for this is DL 1234, the airline is "DL", and flight_number is "1234".
If the tool returns more than one option choose the date closes to today.
Example:
{{
"airline": "CY",
"flight_number": "888",
}}
Example:
{{
"airline": "DL",
"flight_number": "1234",
}}
parameters:
- name: airline
type: string
description: Airline unique 2 letter identifier
- name: flight_number
type: string
description: 1 to 4 digit number
```
### Example with Named Parameters
```yaml
tools:
search_flights_by_airline:
kind: firebird-sql
source: my_firebird_db
statement: |
SELECT * FROM flights
WHERE airline = :airline
AND departure_date >= :start_date
AND departure_date <= :end_date
ORDER BY departure_date
description: |
Search for flights by airline within a date range using named parameters.
parameters:
- name: airline
type: string
description: Airline unique 2 letter identifier
- name: start_date
type: string
description: Start date in YYYY-MM-DD format
- name: end_date
type: string
description: End date in YYYY-MM-DD format
```
### Example with Template Parameters
> **Note:** This tool allows direct modifications to the SQL statement,
> including identifiers, column names, and table names. **This makes it more
> vulnerable to SQL injections**. Using basic parameters only (see above) is
> recommended for performance and safety reasons. For more details, please check
> [templateParameters](_index#template-parameters).
```yaml
tools:
list_table:
kind: firebird-sql
source: my_firebird_db
statement: |
SELECT * FROM {{.tableName}}
description: |
Use this tool to list all information from a specific table.
Example:
{{
"tableName": "flights",
}}
templateParameters:
- name: tableName
type: string
description: Table to select from
```
## Reference
| **field** | **type** | **required** | **description** |
|---------------------|:---------------------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "firebird-sql". |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |
| statement | string | true | SQL statement to execute on. |
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |

View File

@@ -1,274 +0,0 @@
---
title: "firestore-add-documents"
type: docs
weight: 1
description: >
A "firestore-add-documents" tool adds document to a given collection path.
aliases:
- /resources/tools/firestore-add-documents
---
## Description
The `firestore-add-documents` tool allows you to add new documents to a Firestore collection. It supports all Firestore data types using Firestore's native JSON format. The tool automatically generates a unique document ID for each new document.
## Parameters
| Parameter | Type | Required | Description |
|-----------|------|----------|-------------|
| `collectionPath` | string | Yes | The path of the collection where the document will be added |
| `documentData` | map | Yes | The data to be added as a document to the given collection. Must use [Firestore's native JSON format](https://cloud.google.com/firestore/docs/reference/rest/Shared.Types/ArrayValue#Value) with typed values |
| `returnData` | boolean | No | If set to true, the output will include the data of the created document. Defaults to false to help avoid overloading the context |
## Output
The tool returns a map containing:
| Field | Type | Description |
|-------|------|-------------|
| `documentPath` | string | The full resource name of the created document (e.g., `projects/{projectId}/databases/{databaseId}/documents/{document_path}`) |
| `createTime` | string | The timestamp when the document was created |
| `documentData` | map | The data that was added (only included when `returnData` is true) |
## Data Type Format
The tool requires Firestore's native JSON format for document data. Each field must be wrapped with its type indicator:
### Basic Types
- **String**: `{"stringValue": "your string"}`
- **Integer**: `{"integerValue": "123"}` or `{"integerValue": 123}`
- **Double**: `{"doubleValue": 123.45}`
- **Boolean**: `{"booleanValue": true}`
- **Null**: `{"nullValue": null}`
- **Bytes**: `{"bytesValue": "base64EncodedString"}`
- **Timestamp**: `{"timestampValue": "2025-01-07T10:00:00Z"}` (RFC3339 format)
### Complex Types
- **GeoPoint**: `{"geoPointValue": {"latitude": 34.052235, "longitude": -118.243683}}`
- **Array**: `{"arrayValue": {"values": [{"stringValue": "item1"}, {"integerValue": "2"}]}}`
- **Map**: `{"mapValue": {"fields": {"key1": {"stringValue": "value1"}, "key2": {"booleanValue": true}}}}`
- **Reference**: `{"referenceValue": "collection/document"}`
## Examples
### Basic Document Creation
```yaml
tools:
add-company-doc:
kind: firestore-add-documents
source: my-firestore
description: Add a new company document
```
Usage:
```json
{
"collectionPath": "companies",
"documentData": {
"name": {
"stringValue": "Acme Corporation"
},
"establishmentDate": {
"timestampValue": "2000-01-15T10:30:00Z"
},
"location": {
"geoPointValue": {
"latitude": 34.052235,
"longitude": -118.243683
}
},
"active": {
"booleanValue": true
},
"employeeCount": {
"integerValue": "1500"
},
"annualRevenue": {
"doubleValue": 1234567.89
}
}
}
```
### With Nested Maps and Arrays
```json
{
"collectionPath": "companies",
"documentData": {
"name": {
"stringValue": "Tech Innovations Inc"
},
"contactInfo": {
"mapValue": {
"fields": {
"email": {
"stringValue": "info@techinnovations.com"
},
"phone": {
"stringValue": "+1-555-123-4567"
},
"address": {
"mapValue": {
"fields": {
"street": {
"stringValue": "123 Innovation Drive"
},
"city": {
"stringValue": "San Francisco"
},
"state": {
"stringValue": "CA"
},
"zipCode": {
"stringValue": "94105"
}
}
}
}
}
}
},
"products": {
"arrayValue": {
"values": [
{
"stringValue": "Product A"
},
{
"stringValue": "Product B"
},
{
"mapValue": {
"fields": {
"productName": {
"stringValue": "Product C Premium"
},
"version": {
"integerValue": "3"
},
"features": {
"arrayValue": {
"values": [
{
"stringValue": "Advanced Analytics"
},
{
"stringValue": "Real-time Sync"
}
]
}
}
}
}
}
]
}
}
},
"returnData": true
}
```
### Complete Example with All Data Types
```json
{
"collectionPath": "test-documents",
"documentData": {
"stringField": {
"stringValue": "Hello World"
},
"integerField": {
"integerValue": "42"
},
"doubleField": {
"doubleValue": 3.14159
},
"booleanField": {
"booleanValue": true
},
"nullField": {
"nullValue": null
},
"timestampField": {
"timestampValue": "2025-01-07T15:30:00Z"
},
"geoPointField": {
"geoPointValue": {
"latitude": 37.7749,
"longitude": -122.4194
}
},
"bytesField": {
"bytesValue": "SGVsbG8gV29ybGQh"
},
"arrayField": {
"arrayValue": {
"values": [
{
"stringValue": "item1"
},
{
"integerValue": "2"
},
{
"booleanValue": false
}
]
}
},
"mapField": {
"mapValue": {
"fields": {
"nestedString": {
"stringValue": "nested value"
},
"nestedNumber": {
"doubleValue": 99.99
}
}
}
}
}
}
```
## Authentication
The tool can be configured to require authentication:
```yaml
tools:
secure-add-docs:
kind: firestore-add-documents
source: prod-firestore
description: Add documents with authentication required
authRequired:
- google-oauth
- api-key
```
## Error Handling
Common errors include:
- Invalid collection path
- Missing or invalid document data
- Permission denied (if Firestore security rules block the operation)
- Invalid data type conversions
## Best Practices
1. **Always use typed values**: Every field must be wrapped with its appropriate type indicator (e.g., `{"stringValue": "text"}`)
2. **Integer values can be strings**: The tool accepts integer values as strings (e.g., `{"integerValue": "1500"}`)
3. **Use returnData sparingly**: Only set to true when you need to verify the exact data that was written
4. **Validate data before sending**: Ensure your data matches Firestore's native JSON format
5. **Handle timestamps properly**: Use RFC3339 format for timestamp strings
6. **Base64 encode binary data**: Binary data must be base64 encoded in the `bytesValue` field
7. **Consider security rules**: Ensure your Firestore security rules allow document creation in the target collection
## Related Tools
- [`firestore-get-documents`](firestore-get-documents.md) - Retrieve documents by their paths
- [`firestore-query-collection`](firestore-query-collection.md) - Query documents in a collection
- [`firestore-delete-documents`](firestore-delete-documents.md) - Delete documents from Firestore

Some files were not shown because too many files have changed in this diff Show More