mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-01-11 16:38:15 -05:00
Compare commits
1 Commits
sig
...
admin-endp
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9bc91a6cd8 |
@@ -20,9 +20,9 @@ DESCRIPTIONS=(
|
||||
)
|
||||
|
||||
# Write the table header
|
||||
ROW_FMT="| %-105s | %-120s | %-67s | %-108s |\n"
|
||||
output_string+=$(printf "$ROW_FMT" "**OS/Architecture**" "**Description**" "**SHA256 Hash**" "**Signature**")$'\n'
|
||||
output_string+=$(printf "$ROW_FMT" "$(printf -- '-%0.s' {1..105})" "$(printf -- '-%0.s' {1..120})" "$(printf -- '-%0.s' {1..67})" "$(printf -- '-%0.s' {1..67})")$'\n'
|
||||
ROW_FMT="| %-105s | %-120s | %-67s |\n"
|
||||
output_string+=$(printf "$ROW_FMT" "**OS/Architecture**" "**Description**" "**SHA256 Hash**")$'\n'
|
||||
output_string+=$(printf "$ROW_FMT" "$(printf -- '-%0.s' {1..105})" "$(printf -- '-%0.s' {1..120})" "$(printf -- '-%0.s' {1..67})")$'\n'
|
||||
|
||||
|
||||
# Loop through all files matching the pattern "toolbox.*.*"
|
||||
@@ -43,19 +43,16 @@ do
|
||||
URL="https://storage.googleapis.com/genai-toolbox/$VERSION/$OS/$ARCH/toolbox"
|
||||
fi
|
||||
|
||||
# Generate the signature URL & link
|
||||
SIG_URL="${URL}.sig"
|
||||
SIG_LINK="[.sig]($SIG_URL)"
|
||||
|
||||
curl "$URL" --fail --output toolbox || exit 1
|
||||
|
||||
# Calculate the SHA256 checksum of the file
|
||||
SHA256=$(shasum -a 256 toolbox | awk '{print $1}')
|
||||
|
||||
# Write the table row
|
||||
output_string+=$(printf "$ROW_FMT" "[$OS/$ARCH]($URL)" "$description_text" "$SHA256" "$SIG_LINK")$'\n'
|
||||
output_string+=$(printf "$ROW_FMT" "[$OS/$ARCH]($URL)" "$description_text" "$SHA256")$'\n'
|
||||
|
||||
rm toolbox
|
||||
done
|
||||
|
||||
printf "$output_string\n"
|
||||
|
||||
|
||||
@@ -62,28 +62,6 @@ steps:
|
||||
postgressql \
|
||||
postgresexecutesql
|
||||
|
||||
- id: "alloydb"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "ALLOYDB_PROJECT=$PROJECT_ID"
|
||||
- "ALLOYDB_CLUSTER=$_ALLOYDB_POSTGRES_CLUSTER"
|
||||
- "ALLOYDB_INSTANCE=$_ALLOYDB_POSTGRES_INSTANCE"
|
||||
- "ALLOYDB_REGION=$_REGION"
|
||||
secretEnv: ["ALLOYDB_POSTGRES_USER"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"AlloyDB" \
|
||||
alloydb \
|
||||
alloydb
|
||||
|
||||
- id: "alloydb-pg"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
@@ -174,25 +152,25 @@ steps:
|
||||
bigquery \
|
||||
bigquery
|
||||
|
||||
- id: "dataplex"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "DATAPLEX_PROJECT=$PROJECT_ID"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Dataplex" \
|
||||
dataplex \
|
||||
dataplex
|
||||
# - id: "dataplex"
|
||||
# name: golang:1
|
||||
# waitFor: ["compile-test-binary"]
|
||||
# entrypoint: /bin/bash
|
||||
# env:
|
||||
# - "GOPATH=/gopath"
|
||||
# - "DATAPLEX_PROJECT=$PROJECT_ID"
|
||||
# - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
# secretEnv: ["CLIENT_ID"]
|
||||
# volumes:
|
||||
# - name: "go"
|
||||
# path: "/gopath"
|
||||
# args:
|
||||
# - -c
|
||||
# - |
|
||||
# .ci/test_with_coverage.sh \
|
||||
# "Dataplex" \
|
||||
# dataplex \
|
||||
# dataplex
|
||||
|
||||
- id: "postgres"
|
||||
name: golang:1
|
||||
@@ -350,23 +328,23 @@ steps:
|
||||
mssql \
|
||||
mssql
|
||||
|
||||
# - id: "dgraph"
|
||||
# name: golang:1
|
||||
# waitFor: ["compile-test-binary"]
|
||||
# entrypoint: /bin/bash
|
||||
# env:
|
||||
# - "GOPATH=/gopath"
|
||||
# - "DGRAPH_URL=$_DGRAPHURL"
|
||||
# volumes:
|
||||
# - name: "go"
|
||||
# path: "/gopath"
|
||||
# args:
|
||||
# - -c
|
||||
# - |
|
||||
# .ci/test_with_coverage.sh \
|
||||
# "Dgraph" \
|
||||
# dgraph \
|
||||
# dgraph
|
||||
- id: "dgraph"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "DGRAPH_URL=$_DGRAPHURL"
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Dgraph" \
|
||||
dgraph \
|
||||
dgraph
|
||||
|
||||
- id: "http"
|
||||
name: golang:1
|
||||
@@ -486,7 +464,7 @@ steps:
|
||||
"OceanBase" \
|
||||
oceanbase \
|
||||
oceanbase
|
||||
|
||||
|
||||
- id: "firestore"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
@@ -553,24 +531,6 @@ steps:
|
||||
utility \
|
||||
utility/alloydbwaitforoperation
|
||||
|
||||
- id: "cloud-sql"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
secretEnv: ["CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Cloud SQL Wait for Operation" \
|
||||
cloudsql \
|
||||
cloudsql
|
||||
|
||||
- id: "tidb"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
@@ -660,25 +620,6 @@ steps:
|
||||
trino \
|
||||
trinosql trinoexecutesql
|
||||
|
||||
- id: "yugabytedb"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "YUGABYTEDB_DATABASE=$_YUGABYTEDB_DATABASE"
|
||||
- "YUGABYTEDB_PORT=$_YUGABYTEDB_PORT"
|
||||
- "YUGABYTEDB_LOADBALANCE=$_YUGABYTEDB_LOADBALANCE"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["YUGABYTEDB_USER", "YUGABYTEDB_PASS", "YUGABYTEDB_HOST", "CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
./yugabytedb.test -test.v
|
||||
|
||||
availableSecrets:
|
||||
secretManager:
|
||||
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_user/versions/latest
|
||||
@@ -757,13 +698,6 @@ availableSecrets:
|
||||
env: OCEANBASE_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/oceanbase_pass/versions/latest
|
||||
env: OCEANBASE_PASSWORD
|
||||
- versionName: projects/$PROJECT_ID/secrets/yugabytedb_host/versions/latest
|
||||
env: YUGABYTEDB_HOST
|
||||
- versionName: projects/$PROJECT_ID/secrets/yugabytedb_user/versions/latest
|
||||
env: YUGABYTEDB_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/yugabytedb_pass/versions/latest
|
||||
env: YUGABYTEDB_PASS
|
||||
|
||||
|
||||
options:
|
||||
logging: CLOUD_LOGGING_ONLY
|
||||
@@ -810,6 +744,3 @@ substitutions:
|
||||
_TRINO_SCHEMA: "default"
|
||||
_OCEANBASE_PORT: "2883"
|
||||
_OCEANBASE_DATABASE: "oceanbase"
|
||||
_YUGABYTEDB_DATABASE: "yugabyte"
|
||||
_YUGABYTEDB_PORT: "5433"
|
||||
_YUGABYTEDB_LOADBALANCE: "false"
|
||||
|
||||
@@ -17,7 +17,6 @@ steps:
|
||||
waitFor: ['-']
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
docker buildx create --name container-builder --driver docker-container --bootstrap --use
|
||||
|
||||
@@ -27,41 +26,6 @@ steps:
|
||||
fi
|
||||
docker buildx build --platform linux/amd64,linux/arm64 --build-arg BUILD_TYPE=container.release --build-arg COMMIT_SHA=$(git rev-parse HEAD) $TAGS --push .
|
||||
|
||||
- id: "generate-token"
|
||||
name: "gcr.io/cloud-builders/gcloud"
|
||||
waitFor: ['-']
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
gcloud auth print-identity-token --audiences=sigstore > /workspace/token
|
||||
|
||||
- id: "get-docker-digest"
|
||||
name: "gcr.io/cloud-builders/gcloud"
|
||||
waitFor:
|
||||
- "build-docker"
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
IMAGE_DIGEST=$(\
|
||||
gcloud container images describe ${_DOCKER_URI}:$VERSION \
|
||||
--format='get(image_summary.fully_qualified_digest)'\
|
||||
)
|
||||
echo $IMAGE_DIGEST > /workspace/image_digest
|
||||
|
||||
- id: "sign-docker"
|
||||
name: "gcr.io/projectsigstore/cosign"
|
||||
waitFor:
|
||||
- "get-docker-digest"
|
||||
- "generate-token"
|
||||
env:
|
||||
- 'SIGSTORE_NO_CACHE=true'
|
||||
script: |
|
||||
#!/busybox/sh
|
||||
set -e
|
||||
IMAGE_DIGEST=$(cat /workspace/image_digest)
|
||||
cosign sign --identity-token=$(cat /workspace/token) $IMAGE_DIGEST -y
|
||||
|
||||
- id: "install-dependencies"
|
||||
name: golang:1
|
||||
waitFor: ['-']
|
||||
@@ -88,31 +52,14 @@ steps:
|
||||
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.linux.amd64
|
||||
|
||||
- id: "sign-linux-amd64"
|
||||
name: "gcr.io/projectsigstore/cosign"
|
||||
waitFor:
|
||||
- "build-linux-amd64"
|
||||
- "generate-token"
|
||||
env:
|
||||
- 'SIGSTORE_NO_CACHE=true'
|
||||
script: |
|
||||
#!/busybox/sh
|
||||
set -e
|
||||
cosign sign-blob --identity-token=$(cat /workspace/token) --bundle=toolbox.linux.amd64.sig ./toolbox.linux.amd64 -y
|
||||
|
||||
- id: "store-linux-amd64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
waitFor:
|
||||
- "sign-linux-amd64"
|
||||
- "build-linux-amd64"
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
export VERSION=v$(cat ./cmd/version.txt)
|
||||
|
||||
gcloud storage cp toolbox.linux.amd64
|
||||
gs://$_BUCKET_NAME/test/$VERSION/linux/amd64/toolbox
|
||||
|
||||
gcloud storage cp toolbox.linux.amd64.sig gs://$_BUCKET_NAME/test/$VERSION/linux/amd64/toolbox.sig
|
||||
gcloud storage cp toolbox.linux.amd64 gs://$_BUCKET_NAME/$VERSION/linux/amd64/toolbox
|
||||
|
||||
- id: "build-darwin-arm64"
|
||||
name: golang:1
|
||||
@@ -129,30 +76,14 @@ steps:
|
||||
CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.arm64
|
||||
|
||||
- id: "sign-darwin-arm64"
|
||||
name: "gcr.io/projectsigstore/cosign"
|
||||
waitFor:
|
||||
- "build-darwin-arm64"
|
||||
- "generate-token"
|
||||
env:
|
||||
- 'SIGSTORE_NO_CACHE=true'
|
||||
script: |
|
||||
#!/busybox/sh
|
||||
set -e
|
||||
cosign sign-blob --identity-token=$(cat /workspace/token) --bundle=toolbox.darwin.arm64.sig ./toolbox.darwin.arm64 -y
|
||||
|
||||
- id: "store-darwin-arm64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
waitFor:
|
||||
- "sign-darwin-arm64"
|
||||
- "build-darwin-arm64"
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
export VERSION=v$(cat ./cmd/version.txt)
|
||||
|
||||
gcloud storage cp toolbox.darwin.arm64 gs://$_BUCKET_NAME/test/$VERSION/darwin/arm64/toolbox
|
||||
|
||||
gcloud storage cp toolbox.darwin.arm64.sig gs://$_BUCKET_NAME/test/$VERSION/darwin/arm64/toolbox.sig
|
||||
gcloud storage cp toolbox.darwin.arm64 gs://$_BUCKET_NAME/$VERSION/darwin/arm64/toolbox
|
||||
|
||||
- id: "build-darwin-amd64"
|
||||
name: golang:1
|
||||
@@ -169,30 +100,14 @@ steps:
|
||||
CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.amd64
|
||||
|
||||
- id: "sign-darwin-amd64"
|
||||
name: "gcr.io/projectsigstore/cosign"
|
||||
waitFor:
|
||||
- "build-darwin-amd64"
|
||||
- "generate-token"
|
||||
env:
|
||||
- 'SIGSTORE_NO_CACHE=true'
|
||||
script: |
|
||||
#!/busybox/sh
|
||||
set -e
|
||||
cosign sign-blob --identity-token=$(cat /workspace/token) --bundle=toolbox.darwin.amd64.sig ./toolbox.darwin.amd64 -y
|
||||
|
||||
- id: "store-darwin-amd64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
waitFor:
|
||||
- "sign-darwin-amd64"
|
||||
- "build-darwin-amd64"
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
export VERSION=v$(cat ./cmd/version.txt)
|
||||
|
||||
gcloud storage cp toolbox.darwin.amd64 gs://$_BUCKET_NAME/test/$VERSION/darwin/amd64/toolbox
|
||||
|
||||
gcloud storage cp toolbox.darwin.amd64.sig gs://$_BUCKET_NAME/test/$VERSION/darwin/amd64/toolbox.sig
|
||||
gcloud storage cp toolbox.darwin.amd64 gs://$_BUCKET_NAME/$VERSION/darwin/amd64/toolbox
|
||||
|
||||
- id: "build-windows-amd64"
|
||||
name: golang:1
|
||||
@@ -209,30 +124,14 @@ steps:
|
||||
CGO_ENABLED=0 GOOS=windows GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.windows.amd64
|
||||
|
||||
- id: "sign-windows-amd64"
|
||||
name: "gcr.io/projectsigstore/cosign"
|
||||
waitFor:
|
||||
- "build-windows-amd64"
|
||||
- "generate-token"
|
||||
env:
|
||||
- 'SIGSTORE_NO_CACHE=true'
|
||||
script: |
|
||||
#!/busybox/sh
|
||||
set -e
|
||||
cosign sign-blob --identity-token=$(cat /workspace/token) --bundle=toolbox.windows.amd64.sig ./toolbox.windows.amd64 -y
|
||||
|
||||
- id: "store-windows-amd64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
waitFor:
|
||||
- "sign-windows-amd64"
|
||||
- "build-windows-amd64"
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
export VERSION=v$(cat ./cmd/version.txt)
|
||||
|
||||
gcloud storage cp toolbox.windows.amd64 gs://$_BUCKET_NAME/test/$VERSION/windows/amd64/toolbox.exe
|
||||
|
||||
gcloud storage cp toolbox.windows.amd64.sig gs://$_BUCKET_NAME/test/$VERSION/windows/amd64/toolbox.exe.sig
|
||||
gcloud storage cp toolbox.windows.amd64 gs://$_BUCKET_NAME/$VERSION/windows/amd64/toolbox.exe
|
||||
|
||||
options:
|
||||
automapSubstitutions: true
|
||||
@@ -245,5 +144,5 @@ substitutions:
|
||||
_AR_HOSTNAME: ${_REGION}-docker.pkg.dev
|
||||
_AR_REPO_NAME: toolbox
|
||||
_BUCKET_NAME: genai-toolbox
|
||||
_DOCKER_URI: ${_AR_HOSTNAME}/${PROJECT_ID}/${_AR_REPO_NAME}/test
|
||||
_DOCKER_URI: ${_AR_HOSTNAME}/${PROJECT_ID}/${_AR_REPO_NAME}/toolbox
|
||||
_PUSH_LATEST: "true"
|
||||
|
||||
7
.github/PULL_REQUEST_TEMPLATE.md
vendored
7
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,19 +1,16 @@
|
||||
## Description
|
||||
|
||||
---
|
||||
> Should include a concise description of the changes (bug or feature), it's
|
||||
> impact, along with a summary of the solution
|
||||
|
||||
## PR Checklist
|
||||
|
||||
---
|
||||
> Thank you for opening a Pull Request! Before submitting your PR, there are a
|
||||
> few things you can do to make sure it goes smoothly:
|
||||
|
||||
- [ ] Make sure you reviewed
|
||||
[CONTRIBUTING.md](https://github.com/googleapis/genai-toolbox/blob/main/CONTRIBUTING.md)
|
||||
- [ ] Make sure to open an issue as a
|
||||
[bug/issue](https://github.com/googleapis/genai-toolbox/issues/new/choose)
|
||||
[bug/issue](https://github.com/googleapis/langchain-google-alloydb-pg-python/issues/new/choose)
|
||||
before writing your code! That way we can discuss the change, evaluate
|
||||
designs, and agree on the general idea
|
||||
- [ ] Ensure the tests and linter pass
|
||||
@@ -21,4 +18,4 @@
|
||||
- [ ] Appropriate docs were updated (if necessary)
|
||||
- [ ] Make sure to add `!` if this involve a breaking change
|
||||
|
||||
🛠️ Fixes #<issue_number_goes_here>
|
||||
🛠️ Fixes #<issue_number_goes_here>
|
||||
17
.github/blunderbuss.yml
vendored
17
.github/blunderbuss.yml
vendored
@@ -1,24 +1,7 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
assign_issues:
|
||||
- Yuan325
|
||||
- duwenxin99
|
||||
- averikitsch
|
||||
- anubhav756
|
||||
- dishaprakash
|
||||
- twishabansal
|
||||
assign_issues_by:
|
||||
- labels:
|
||||
- 'product: bigquery'
|
||||
|
||||
@@ -20,5 +20,3 @@ sourceFileExtensions:
|
||||
- 'go'
|
||||
- 'yaml'
|
||||
- 'yml'
|
||||
ignoreFiles:
|
||||
- 'docs/en/getting-started/quickstart/**'
|
||||
4
.github/labels.yaml
vendored
4
.github/labels.yaml
vendored
@@ -88,10 +88,6 @@
|
||||
color: 8befd7
|
||||
description: 'Status: reviewer is awaiting feedback or responses from the author before proceeding.'
|
||||
|
||||
- name: 'release candidate'
|
||||
color: 32CD32
|
||||
description: 'Use label to signal PR should be included in the next release.'
|
||||
|
||||
# Product Labels
|
||||
- name: 'product: bigquery'
|
||||
color: 5065c7
|
||||
|
||||
19
.github/release-please.yml
vendored
19
.github/release-please.yml
vendored
@@ -20,21 +20,26 @@ extraFiles: [
|
||||
"README.md",
|
||||
"docs/en/getting-started/colab_quickstart.ipynb",
|
||||
"docs/en/getting-started/introduction/_index.md",
|
||||
"docs/en/getting-started/local_quickstart.md",
|
||||
"docs/en/getting-started/local_quickstart_js.md",
|
||||
"docs/en/getting-started/local_quickstart_go.md",
|
||||
"docs/en/getting-started/mcp_quickstart/_index.md",
|
||||
"docs/en/getting-started/quickstart/shared/configure_toolbox.md",
|
||||
"docs/en/samples/alloydb/_index.md",
|
||||
"docs/en/samples/alloydb/mcp_quickstart.md",
|
||||
"docs/en/samples/alloydb/ai-nl/alloydb_ai_nl.ipynb",
|
||||
"docs/en/samples/bigquery/local_quickstart.md",
|
||||
"docs/en/samples/bigquery/mcp_quickstart/_index.md",
|
||||
"docs/en/samples/bigquery/colab_quickstart_bigquery.ipynb",
|
||||
"docs/en/samples/looker/looker_gemini.md",
|
||||
"docs/en/samples/looker/looker_gemini_oauth/_index.md",
|
||||
"docs/en/samples/looker/looker_mcp_inspector/_index.md",
|
||||
"docs/en/samples/looker/looker_mcp_inspector.md",
|
||||
"docs/en/how-to/connect-ide/alloydb_pg_mcp.md",
|
||||
"docs/en/how-to/connect-ide/alloydb_pg_admin_mcp.md",
|
||||
"docs/en/how-to/connect-ide/bigquery_mcp.md",
|
||||
"docs/en/how-to/connect-ide/cloud_sql_pg_mcp.md",
|
||||
"docs/en/how-to/connect-ide/cloud_sql_mssql_mcp.md",
|
||||
"docs/en/how-to/connect-ide/cloud_sql_mysql_mcp.md",
|
||||
"docs/en/how-to/connect-ide/firestore_mcp.md",
|
||||
"docs/en/how-to/connect-ide/looker_mcp.md",
|
||||
"docs/en/how-to/connect-ide/mysql_mcp.md",
|
||||
"docs/en/how-to/connect-ide/mssql_mcp.md",
|
||||
"docs/en/how-to/connect-ide/postgres_mcp.md",
|
||||
"docs/en/how-to/connect-ide/neo4j_mcp.md",
|
||||
"docs/en/how-to/connect-ide/sqlite_mcp.md",
|
||||
"docs/en/how-to/connect-ide/spanner_mcp.md",
|
||||
]
|
||||
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
runs-on: 'ubuntu-latest'
|
||||
|
||||
steps:
|
||||
- uses: 'actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b' # v7
|
||||
- uses: 'actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea' # v7
|
||||
with:
|
||||
script: |-
|
||||
// parse test names
|
||||
|
||||
2
.github/workflows/docs_preview_clean.yaml
vendored
2
.github/workflows/docs_preview_clean.yaml
vendored
@@ -48,7 +48,7 @@ jobs:
|
||||
git push
|
||||
|
||||
- name: Comment
|
||||
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
|
||||
2
.github/workflows/docs_preview_deploy.yaml
vendored
2
.github/workflows/docs_preview_deploy.yaml
vendored
@@ -90,7 +90,7 @@ jobs:
|
||||
commit_message: "stage: PR-${{ github.event.number }}: ${{ github.event.head_commit.message }}"
|
||||
|
||||
- name: Comment
|
||||
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
|
||||
2
.github/workflows/lint.yaml
vendored
2
.github/workflows/lint.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
steps:
|
||||
- name: Remove PR Label
|
||||
if: "${{ github.event.action == 'labeled' && github.event.label.name == 'tests: run' }}"
|
||||
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7.1.0
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
|
||||
2
.github/workflows/tests.yaml
vendored
2
.github/workflows/tests.yaml
vendored
@@ -41,7 +41,7 @@ jobs:
|
||||
steps:
|
||||
- name: Remove PR label
|
||||
if: "${{ github.event.action == 'labeled' && github.event.label.name == 'tests: run' }}"
|
||||
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7.1.0
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
|
||||
28
CHANGELOG.md
28
CHANGELOG.md
@@ -1,33 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## [0.14.0](https://github.com/googleapis/genai-toolbox/compare/v0.13.0...v0.14.0) (2025-09-05)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **bigquery:** Move `useClientOAuth` config from tool to source ([#1279](https://github.com/googleapis/genai-toolbox/issues/1279)) ([8d20a48](https://github.com/googleapis/genai-toolbox/commit/8d20a48f13bcda853d41bdf80a162de12b076d1b))
|
||||
* **tools/bigquerysql:** remove `useClientOAuth` from tools config ([#1312](https://github.com/googleapis/genai-toolbox/issues/1312))
|
||||
|
||||
### Features
|
||||
|
||||
* **clickhouse:** Add ClickHouse Source and Tools ([#1088](https://github.com/googleapis/genai-toolbox/issues/1088)) ([75a04a5](https://github.com/googleapis/genai-toolbox/commit/75a04a55dd2259bed72fe95119a7a51a906c0b21))
|
||||
* **prebuilt/alloydb-postgres:** Support ipType and IAM users ([#1324](https://github.com/googleapis/genai-toolbox/issues/1324)) ([0b2121e](https://github.com/googleapis/genai-toolbox/commit/0b2121ea72eb81348dcd9c740a62ccd32e71fe37))
|
||||
* **server/mcp:** Support toolbox auth in mcp ([#1140](https://github.com/googleapis/genai-toolbox/issues/1140)) ([ca353e0](https://github.com/googleapis/genai-toolbox/commit/ca353e0b66fedc00e9110df57db18632aef49018))
|
||||
* **source/mysql:** Support `queryParams` in MySQL source ([#1299](https://github.com/googleapis/genai-toolbox/issues/1299)) ([3ae2526](https://github.com/googleapis/genai-toolbox/commit/3ae2526e0fe36b57b05a9b54f1d99f3fc68d9657))
|
||||
* **tools/bigquery:** Support end-user credential passthrough on multiple BQ tools ([#1314](https://github.com/googleapis/genai-toolbox/issues/1314)) ([88f4b30](https://github.com/googleapis/genai-toolbox/commit/88f4b3028df3b6a400936cdf8a035bf55021924c))
|
||||
* **tools/looker:** Add description for looker-get-models tool ([#1266](https://github.com/googleapis/genai-toolbox/issues/1266)) ([89af3a4](https://github.com/googleapis/genai-toolbox/commit/89af3a4ca332f029615b2a739d1f6cd50519638d))
|
||||
* **tools/looker:** Authenticate via end user credentials ([#1257](https://github.com/googleapis/genai-toolbox/issues/1257)) ([8755e3d](https://github.com/googleapis/genai-toolbox/commit/8755e3db3476abb35629b3cca9c78db7366757a4))
|
||||
* **tools/looker:** Report field suggestions to agent ([#1267](https://github.com/googleapis/genai-toolbox/issues/1267)) ([2cad82e](https://github.com/googleapis/genai-toolbox/commit/2cad82e5107566dd6c9b75e34e9976af63af0bb5))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Do not print usage on runtime error ([#1315](https://github.com/googleapis/genai-toolbox/issues/1315)) ([afba7a5](https://github.com/googleapis/genai-toolbox/commit/afba7a57cdd4fe7c1b0741dbf8f8c78b14a68089))
|
||||
* Update env var to allow empty string ([#1260](https://github.com/googleapis/genai-toolbox/issues/1260)) ([03aa9fa](https://github.com/googleapis/genai-toolbox/commit/03aa9fabacda06f860c9f178485126bddb7d5782))
|
||||
* **tools/firestore:** Add document/collection path validation ([#1229](https://github.com/googleapis/genai-toolbox/issues/1229)) ([14c2249](https://github.com/googleapis/genai-toolbox/commit/14c224939a2f9bb349fa00a7d5227877198530c2))
|
||||
* **tools/looker-get-dashboards:** Fix Looker client OAuth check ([#1338](https://github.com/googleapis/genai-toolbox/issues/1338)) ([36225aa](https://github.com/googleapis/genai-toolbox/commit/36225aa6db7f8426ad87930866530fde4e9bf0cd))
|
||||
* **tools/oceanbase:** Fix encoded text with mysql driver ([#1283](https://github.com/googleapis/genai-toolbox/issues/1283)) ([d16f89f](https://github.com/googleapis/genai-toolbox/commit/d16f89fbb6e49c03998f114ef7dc2b584b5e4967)), closes [#1161](https://github.com/googleapis/genai-toolbox/issues/1161)
|
||||
|
||||
## [0.13.0](https://github.com/googleapis/genai-toolbox/compare/v0.12.0...v0.13.0) (2025-08-27)
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
@@ -25,42 +25,33 @@ This project follows
|
||||
|
||||
## Contribution process
|
||||
|
||||
> [!NOTE]
|
||||
> New contributions should always include both unit and integration tests.
|
||||
|
||||
### Code reviews
|
||||
|
||||
All submissions, including submissions by project members, require review. We
|
||||
use GitHub pull requests for this purpose. Consult
|
||||
[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
|
||||
information on using pull requests.
|
||||
|
||||
### Code reviews
|
||||
|
||||
* Within 2-5 days, a reviewer will review your PR. They may approve it, or request
|
||||
changes.
|
||||
* When requesting changes, reviewers should self-assign the PR to ensure
|
||||
Within 2-5 days, a reviewer will review your PR. They may approve it, or request
|
||||
changes. When requesting changes, reviewers should self-assign the PR to ensure
|
||||
they are aware of any updates.
|
||||
* If additional changes are needed, push additional commits to your PR branch -
|
||||
this helps the reviewer know which parts of the PR have changed.
|
||||
* Commits will be
|
||||
If additional changes are needed, push additional commits to your PR branch -
|
||||
this helps the reviewer know which parts of the PR have changed. Commits will be
|
||||
squashed when merged.
|
||||
* Please follow up with changes promptly.
|
||||
* If a PR is awaiting changes by the
|
||||
Please follow up with changes promptly. If a PR is awaiting changes by the
|
||||
author for more than 10 days, maintainers may mark that PR as Draft. PRs that
|
||||
are inactive for more than 30 days may be closed.
|
||||
|
||||
## Adding a New Database Source or Tool
|
||||
### Adding a New Database Source and Tool
|
||||
|
||||
Please create an
|
||||
We recommend creating an
|
||||
[issue](https://github.com/googleapis/genai-toolbox/issues) before
|
||||
implementation to ensure we can accept the contribution and no duplicated work. This issue
|
||||
should include an overview of the API design. If you have any questions, reach out on our
|
||||
[Discord](https://discord.gg/Dmm69peqjh) to chat directly with the team.
|
||||
implementation to ensure we can accept the contribution and no duplicated work.
|
||||
If you have any questions, reach out on our
|
||||
[Discord](https://discord.gg/Dmm69peqjh) to chat directly with the team. New
|
||||
contributions should be added with both unit tests and integration tests.
|
||||
|
||||
> [!NOTE]
|
||||
> New tools can be added for [pre-existing data sources](https://github.com/googleapis/genai-toolbox/tree/main/internal/sources). However, any new database source should also include at least one new tool type.
|
||||
|
||||
### Adding a New Database Source
|
||||
#### 1. Implement the New Data Source
|
||||
|
||||
We recommend looking at an [example source
|
||||
implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/sources/postgres/postgres.go).
|
||||
@@ -87,10 +78,7 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s
|
||||
* **Implement `init()`** to register the new Source.
|
||||
* **Implement Unit Tests** in a file named `newdb_test.go`.
|
||||
|
||||
### Adding a New Tool
|
||||
|
||||
> [!NOTE]
|
||||
> Please follow the tool naming convention detailed [here](./DEVELOPER.md#tool-naming-conventions).
|
||||
#### 2. Implement the New Tool
|
||||
|
||||
We recommend looking at an [example tool
|
||||
implementation](https://github.com/googleapis/genai-toolbox/tree/main/internal/tools/postgres/postgressql).
|
||||
@@ -123,7 +111,7 @@ tools.
|
||||
* **Implement `init()`** to register the new Tool.
|
||||
* **Implement Unit Tests** in a file named `newdb_test.go`.
|
||||
|
||||
### Adding Integration Tests
|
||||
#### 3. Add Integration Tests
|
||||
|
||||
* **Add a test file** under a new directory `tests/newdb`.
|
||||
* **Add pre-defined integration test suites** in the
|
||||
@@ -165,7 +153,7 @@ tools.
|
||||
[temp-param-doc]:
|
||||
https://googleapis.github.io/genai-toolbox/resources/tools/#template-parameters
|
||||
|
||||
### Adding Documentation
|
||||
#### 4. Add Documentation
|
||||
|
||||
* **Update the documentation** to include information about your new data source
|
||||
and tool. This includes:
|
||||
@@ -175,7 +163,7 @@ tools.
|
||||
|
||||
* **(Optional) Add samples** to the `docs/en/samples/<newdb>` directory.
|
||||
|
||||
### (Optional) Adding Prebuilt Tools
|
||||
#### (Optional) 5. Add Prebuilt Tools
|
||||
|
||||
You can provide developers with a set of "build-time" tools to aid common
|
||||
software development user journeys like viewing and creating tables/collections
|
||||
@@ -189,7 +177,7 @@ and data.
|
||||
[internal/prebuiltconfigs/prebuiltconfigs_test.go](internal/prebuiltconfigs/prebuiltconfigs_test.go)
|
||||
and [cmd/root_test.go](cmd/root_test.go).
|
||||
|
||||
## Submitting a Pull Request
|
||||
#### 6. Submit a Pull Request
|
||||
|
||||
Submit a pull request to the repository with your changes. Be sure to include a
|
||||
detailed description of your changes and any requests for long term testing
|
||||
@@ -230,4 +218,4 @@ resources.
|
||||
* **PR Description:** PR description should **always** be included. It should
|
||||
include a concise description of the changes, it's impact, along with a
|
||||
summary of the solution. If the PR is related to a specific issue, the issue
|
||||
number should be mentioned in the PR description (e.g. `Fixes #1`).
|
||||
number should be mentioned in the PR description (e.g. `Fixes #1`).
|
||||
41
DEVELOPER.md
41
DEVELOPER.md
@@ -44,47 +44,6 @@ Before you begin, ensure you have the following:
|
||||
curl http://127.0.0.1:5000
|
||||
```
|
||||
|
||||
### Tool Naming Conventions
|
||||
|
||||
This section details the purpose and conventions for MCP Toolbox's tools naming
|
||||
properties, **tool name** and **tool kind**.
|
||||
|
||||
```
|
||||
cancel_hotel: <- tool name
|
||||
kind: postgres-sql <- tool kind
|
||||
source: my_pg_source
|
||||
```
|
||||
|
||||
#### Tool Name
|
||||
|
||||
Tool name is the identifier used by a Large Language Model (LLM) to invoke a
|
||||
specific tool.
|
||||
* Custom tools: The user can define any name they want. The below guidelines
|
||||
do not apply.
|
||||
* Pre-built tools: The tool name is predefined and cannot be changed. It
|
||||
should follow the guidelines.
|
||||
|
||||
The following guidelines apply to tool names:
|
||||
* Should use underscores over hyphens (e.g., `list_collections` instead of
|
||||
`list-collections`).
|
||||
* Should not have the product name in the name (e.g., `list_collections` instead
|
||||
of `firestore_list_collections`).
|
||||
* Superficial changes are NOT considered as breaking (e.g., changing tool name).
|
||||
* Non-superficial changes MAY be considered breaking (e.g. adding new parameters
|
||||
to a function) until they can be validated through extensive testing to ensure
|
||||
they do not negatively impact agent's performances.
|
||||
|
||||
#### Tool Kind
|
||||
|
||||
Tool kind serves as a category or type that a user can assign to a tool.
|
||||
|
||||
The following guidelines apply to tool kinds:
|
||||
* Should user hyphens over underscores (e.g. `firestore-list-collections` or
|
||||
`firestore_list_colelctions`).
|
||||
* Should use product name in name (e.g. `firestore-list-collections` over
|
||||
`list-collections`).
|
||||
* Changes to tool kind are breaking changes and should be avoided.
|
||||
|
||||
## Testing
|
||||
|
||||
### Infrastructure
|
||||
|
||||
61
README.md
61
README.md
@@ -117,7 +117,7 @@ To install Toolbox as a binary:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.14.0
|
||||
export VERSION=0.13.0
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -130,7 +130,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.14.0
|
||||
export VERSION=0.13.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -154,7 +154,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.14.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.13.0
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -165,66 +165,22 @@ go install github.com/googleapis/genai-toolbox@v0.14.0
|
||||
[Configure](#configuration) a `tools.yaml` to define your tools, and then
|
||||
execute `toolbox` to start the server:
|
||||
|
||||
<details open>
|
||||
<summary>Binary</summary>
|
||||
|
||||
To run Toolbox from binary:
|
||||
|
||||
```sh
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
ⓘ **NOTE:**
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
> [!NOTE]
|
||||
> Toolbox enables dynamic reloading by default. To disable, use the
|
||||
> `--disable-reload` flag.
|
||||
|
||||
</details>
|
||||
#### Homebrew Users
|
||||
|
||||
<details>
|
||||
|
||||
<summary>Container image</summary>
|
||||
|
||||
To run the server after pulling the [container image](#installing-the-server):
|
||||
|
||||
```sh
|
||||
export VERSION=0.11.0 # Use the version you pulled
|
||||
docker run -p 5000:5000 \
|
||||
-v $(pwd)/tools.yaml:/app/tools.yaml \
|
||||
us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION \
|
||||
--tools-file "/app/tools.yaml"
|
||||
```
|
||||
|
||||
ⓘ **NOTE:**
|
||||
The `-v` flag mounts your local `tools.yaml` into the container, and `-p` maps the container's port `5000` to your host's port `5000`.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
|
||||
<summary>Source</summary>
|
||||
|
||||
To run the server directly from source, navigate to the project root directory and run:
|
||||
|
||||
```sh
|
||||
go run .
|
||||
```
|
||||
|
||||
ⓘ **NOTE:**
|
||||
This command runs the project from source, and is more suitable for development and testing. It does **not** compile a binary into your `$GOPATH`. If you want to compile a binary instead, refer the [Developer Documentation](./DEVELOPER.md#building-the-binary).
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
|
||||
<summary>Homebrew</summary>
|
||||
|
||||
If you installed Toolbox using [Homebrew](https://brew.sh/), the `toolbox` binary is available in your system path. You can start the server with the same command:
|
||||
If you installed Toolbox using Homebrew, the `toolbox` binary is available in your system path. You can start the server with the same command:
|
||||
|
||||
```sh
|
||||
toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
You can use `toolbox help` for a full list of flags! To stop the server, send a
|
||||
terminate signal (`ctrl+c` on most platforms).
|
||||
|
||||
@@ -232,7 +188,6 @@ For more detailed documentation on deploying to different environments, check
|
||||
out the resources in the [How-to
|
||||
section](https://googleapis.github.io/genai-toolbox/how-to/)
|
||||
|
||||
|
||||
### Integrating your application
|
||||
|
||||
Once your server is up and running, you can load the tools into your
|
||||
|
||||
26
cmd/root.go
26
cmd/root.go
@@ -42,12 +42,7 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
|
||||
// Import tool packages for side effect of registration
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetcluster"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistclusters"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistinstances"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistusers"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydbainl"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryanalyzecontribution"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryconversationalanalytics"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryforecast"
|
||||
@@ -58,13 +53,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigtable"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouseexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselistdatabases"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhousesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreateusers"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistinstances"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/couchbase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexlookupentry"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchaspecttypes"
|
||||
@@ -77,7 +66,6 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetdocuments"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetrules"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorelistcollections"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequery"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequerycollection"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreupdatedocument"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules"
|
||||
@@ -109,7 +97,6 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jcypher"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jexecutecypher"
|
||||
@@ -117,14 +104,11 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbaseexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbasesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/redis"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannersql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqliteexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlitesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinoexecutesql"
|
||||
@@ -132,17 +116,13 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/alloydbwaitforoperation"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/wait"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/valkey"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/yugabytedbsql"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/alloydbadmin"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/alloydbpg"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/bigtable"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/clickhouse"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudmonitoring"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmssql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg"
|
||||
@@ -165,7 +145,6 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/tidb"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/trino"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/valkey"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/yugabytedb"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -240,9 +219,6 @@ func NewCommand(opts ...Option) *Command {
|
||||
o(cmd)
|
||||
}
|
||||
|
||||
// Do not print Usage on runtime error
|
||||
cmd.SilenceUsage = true
|
||||
|
||||
// Set server version
|
||||
cmd.cfg.Version = versionString
|
||||
|
||||
|
||||
@@ -1244,8 +1244,6 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
postgresconfig, _ := prebuiltconfigs.Get("postgres")
|
||||
spanner_config, _ := prebuiltconfigs.Get("spanner")
|
||||
spannerpg_config, _ := prebuiltconfigs.Get("spanner-postgres")
|
||||
sqlite_config, _ := prebuiltconfigs.Get("sqlite")
|
||||
neo4jconfig, _ := prebuiltconfigs.Get("neo4j")
|
||||
|
||||
// Set environment variables
|
||||
t.Setenv("API_KEY", "your_api_key")
|
||||
@@ -1320,13 +1318,6 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
t.Setenv("LOOKER_CLIENT_SECRET", "your_looker_client_secret")
|
||||
t.Setenv("LOOKER_VERIFY_SSL", "true")
|
||||
|
||||
t.Setenv("SQLITE_DATABASE", "test.db")
|
||||
|
||||
t.Setenv("NEO4J_URI", "bolt://localhost:7687")
|
||||
t.Setenv("NEO4J_DATABASE", "neo4j")
|
||||
t.Setenv("NEO4J_USERNAME", "your_neo4j_user")
|
||||
t.Setenv("NEO4J_PASSWORD", "your_neo4j_password")
|
||||
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
@@ -1340,9 +1331,9 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
name: "alloydb postgres admin prebuilt tools",
|
||||
in: alloydb_admin_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"alloydb_postgres_admin_tools": tools.ToolsetConfig{
|
||||
Name: "alloydb_postgres_admin_tools",
|
||||
ToolNames: []string{"create_cluster", "wait_for_operation", "create_instance", "list_clusters", "list_instances", "list_users", "create_user", "get_cluster"},
|
||||
"alloydb-postgres-admin-tools": tools.ToolsetConfig{
|
||||
Name: "alloydb-postgres-admin-tools",
|
||||
ToolNames: []string{"alloydb-create-cluster", "alloydb-operations-get", "alloydb-create-instance", "alloydb-list-clusters", "alloydb-list-instances", "alloydb-list-users", "alloydb-create-user"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1350,8 +1341,8 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
name: "alloydb prebuilt tools",
|
||||
in: alloydb_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"alloydb_postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "alloydb_postgres_database_tools",
|
||||
"alloydb-postgres-database-tools": tools.ToolsetConfig{
|
||||
Name: "alloydb-postgres-database-tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
@@ -1360,9 +1351,9 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
name: "bigquery prebuilt tools",
|
||||
in: bigquery_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"bigquery_database_tools": tools.ToolsetConfig{
|
||||
Name: "bigquery_database_tools",
|
||||
ToolNames: []string{"analyze_contribution", "ask_data_insights", "execute_sql", "forecast", "get_dataset_info", "get_table_info", "list_dataset_ids", "list_table_ids"},
|
||||
"bigquery-database-tools": tools.ToolsetConfig{
|
||||
Name: "bigquery-database-tools",
|
||||
ToolNames: []string{"ask_data_insights", "execute_sql", "forecast", "get_dataset_info", "get_table_info", "list_dataset_ids", "list_table_ids"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1370,9 +1361,9 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
name: "clickhouse prebuilt tools",
|
||||
in: clickhouse_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"clickhouse_database_tools": tools.ToolsetConfig{
|
||||
Name: "clickhouse_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_databases"},
|
||||
"clickhouse-database-tools": tools.ToolsetConfig{
|
||||
Name: "clickhouse-database-tools",
|
||||
ToolNames: []string{"execute_sql"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1380,8 +1371,8 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
name: "cloudsqlpg prebuilt tools",
|
||||
in: cloudsqlpg_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_postgres_database_tools",
|
||||
"cloud-sql-postgres-database-tools": tools.ToolsetConfig{
|
||||
Name: "cloud-sql-postgres-database-tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
@@ -1390,8 +1381,8 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
name: "cloudsqlmysql prebuilt tools",
|
||||
in: cloudsqlmysql_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_mysql_database_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_mysql_database_tools",
|
||||
"cloud-sql-mysql-database-tools": tools.ToolsetConfig{
|
||||
Name: "cloud-sql-mysql-database-tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
@@ -1400,8 +1391,8 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
name: "cloudsqlmssql prebuilt tools",
|
||||
in: cloudsqlmssql_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_mssql_database_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_mssql_database_tools",
|
||||
"cloud-sql-mssql-database-tools": tools.ToolsetConfig{
|
||||
Name: "cloud-sql-mssql-database-tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
@@ -1410,9 +1401,9 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
name: "dataplex prebuilt tools",
|
||||
in: dataplex_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"dataplex_tools": tools.ToolsetConfig{
|
||||
Name: "dataplex_tools",
|
||||
ToolNames: []string{"search_entries", "lookup_entry", "search_aspect_types"},
|
||||
"dataplex-tools": tools.ToolsetConfig{
|
||||
Name: "dataplex-tools",
|
||||
ToolNames: []string{"dataplex_search_entries", "dataplex_lookup_entry", "dataplex_search_aspect_types"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1420,9 +1411,9 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
name: "firestore prebuilt tools",
|
||||
in: firestoreconfig,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"firestore_database_tools": tools.ToolsetConfig{
|
||||
Name: "firestore_database_tools",
|
||||
ToolNames: []string{"get_documents", "add_documents", "update_document", "list_collections", "delete_documents", "query_collection", "get_rules", "validate_rules"},
|
||||
"firestore-database-tools": tools.ToolsetConfig{
|
||||
Name: "firestore-database-tools",
|
||||
ToolNames: []string{"firestore-get-documents", "firestore-add-documents", "firestore-update-document", "firestore-list-collections", "firestore-delete-documents", "firestore-query-collection", "firestore-get-rules", "firestore-validate-rules"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1430,8 +1421,8 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
name: "mysql prebuilt tools",
|
||||
in: mysql_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"mysql_database_tools": tools.ToolsetConfig{
|
||||
Name: "mysql_database_tools",
|
||||
"mysql-database-tools": tools.ToolsetConfig{
|
||||
Name: "mysql-database-tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
@@ -1440,8 +1431,8 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
name: "mssql prebuilt tools",
|
||||
in: mssql_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"mssql_database_tools": tools.ToolsetConfig{
|
||||
Name: "mssql_database_tools",
|
||||
"mssql-database-tools": tools.ToolsetConfig{
|
||||
Name: "mssql-database-tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
@@ -1450,8 +1441,8 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
name: "looker prebuilt tools",
|
||||
in: looker_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"looker_tools": tools.ToolsetConfig{
|
||||
Name: "looker_tools",
|
||||
"looker-tools": tools.ToolsetConfig{
|
||||
Name: "looker-tools",
|
||||
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look", "make_look", "get_dashboards", "make_dashboard", "add_dashboard_element"},
|
||||
},
|
||||
},
|
||||
@@ -1460,8 +1451,8 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
name: "postgres prebuilt tools",
|
||||
in: postgresconfig,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "postgres_database_tools",
|
||||
"postgres-database-tools": tools.ToolsetConfig{
|
||||
Name: "postgres-database-tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
@@ -1480,32 +1471,12 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
name: "spanner pg prebuilt tools",
|
||||
in: spannerpg_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"spanner_postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "spanner_postgres_database_tools",
|
||||
"spanner-postgres-database-tools": tools.ToolsetConfig{
|
||||
Name: "spanner-postgres-database-tools",
|
||||
ToolNames: []string{"execute_sql", "execute_sql_dql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "sqlite prebuilt tools",
|
||||
in: sqlite_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"sqlite_database_tools": tools.ToolsetConfig{
|
||||
Name: "sqlite_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "neo4j prebuilt tools",
|
||||
in: neo4jconfig,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"neo4j_database_tools": tools.ToolsetConfig{
|
||||
Name: "neo4j_database_tools",
|
||||
ToolNames: []string{"execute_cypher", "get_schema"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
|
||||
@@ -1 +1 @@
|
||||
0.14.0
|
||||
0.13.0
|
||||
|
||||
@@ -234,7 +234,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.14.0\" # x-release-please-version\n",
|
||||
"version = \"0.13.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -86,7 +86,7 @@ To install Toolbox as a binary:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.14.0
|
||||
export VERSION=0.13.0
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -97,7 +97,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.14.0
|
||||
export VERSION=0.13.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -115,7 +115,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.14.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.13.0
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
@@ -17,11 +17,6 @@ This guide assumes you have already done the following:
|
||||
your preferred virtual environment tool for managing dependencies e.g. [venv][install-venv]).
|
||||
1. Installed [PostgreSQL 16+ and the `psql` client][install-postgres].
|
||||
|
||||
[install-python]: https://wiki.python.org/moin/BeginnersGuide/Download
|
||||
[install-pip]: https://pip.pypa.io/en/stable/installation/
|
||||
[install-venv]: https://packaging.python.org/en/latest/tutorials/installing-packages/#creating-virtual-environments
|
||||
[install-postgres]: https://www.postgresql.org/download/
|
||||
|
||||
### Cloud Setup (Optional)
|
||||
{{< regionInclude "quickstart/shared/cloud_setup.md" "cloud_setup" >}}
|
||||
|
||||
@@ -99,23 +94,305 @@ pip install google-genai
|
||||
code to create an agent:
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="ADK" lang="python" >}}
|
||||
from google.adk.agents import Agent
|
||||
from google.adk.runners import Runner
|
||||
from google.adk.sessions import InMemorySessionService
|
||||
from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService
|
||||
from google.genai import types
|
||||
from toolbox_core import ToolboxSyncClient
|
||||
|
||||
{{< include "quickstart/python/adk/quickstart.py" >}}
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
# TODO(developer): replace this with your Google API key
|
||||
|
||||
os.environ['GOOGLE_API_KEY'] = 'your-api-key'
|
||||
|
||||
async def main():
|
||||
with ToolboxSyncClient("<http://127.0.0.1:5000>") as toolbox_client:
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
root_agent = Agent(
|
||||
model='gemini-2.0-flash-001',
|
||||
name='hotel_agent',
|
||||
description='A helpful AI assistant.',
|
||||
instruction=prompt,
|
||||
tools=toolbox_client.load_toolset("my-toolset"),
|
||||
)
|
||||
|
||||
session_service = InMemorySessionService()
|
||||
artifacts_service = InMemoryArtifactService()
|
||||
session = await session_service.create_session(
|
||||
state={}, app_name='hotel_agent', user_id='123'
|
||||
)
|
||||
runner = Runner(
|
||||
app_name='hotel_agent',
|
||||
agent=root_agent,
|
||||
artifact_service=artifacts_service,
|
||||
session_service=session_service,
|
||||
)
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
for query in queries:
|
||||
content = types.Content(role='user', parts=[types.Part(text=query)])
|
||||
events = runner.run(session_id=session.id,
|
||||
user_id='123', new_message=content)
|
||||
|
||||
responses = (
|
||||
part.text
|
||||
for event in events
|
||||
for part in event.content.parts
|
||||
if part.text is not None
|
||||
)
|
||||
|
||||
for text in responses:
|
||||
print(text)
|
||||
|
||||
asyncio.run(main())
|
||||
{{< /tab >}}
|
||||
{{< tab header="LangChain" lang="python" >}}
|
||||
import asyncio
|
||||
|
||||
{{< include "quickstart/python/langchain/quickstart.py" >}}
|
||||
from langgraph.prebuilt import create_react_agent
|
||||
|
||||
# TODO(developer): replace this with another import if needed
|
||||
|
||||
from langchain_google_vertexai import ChatVertexAI
|
||||
|
||||
# from langchain_google_genai import ChatGoogleGenerativeAI
|
||||
|
||||
# from langchain_anthropic import ChatAnthropic
|
||||
|
||||
from langgraph.checkpoint.memory import MemorySaver
|
||||
|
||||
from toolbox_langchain import ToolboxClient
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
async def run_application():
|
||||
# TODO(developer): replace this with another model if needed
|
||||
model = ChatVertexAI(model_name="gemini-2.0-flash-001")
|
||||
# model = ChatGoogleGenerativeAI(model="gemini-2.0-flash-001")
|
||||
# model = ChatAnthropic(model="claude-3-5-sonnet-20240620")
|
||||
|
||||
# Load the tools from the Toolbox server
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
tools = await client.aload_toolset()
|
||||
|
||||
agent = create_react_agent(model, tools, checkpointer=MemorySaver())
|
||||
|
||||
config = {"configurable": {"thread_id": "thread-1"}}
|
||||
for query in queries:
|
||||
inputs = {"messages": [("user", prompt + query)]}
|
||||
response = agent.invoke(inputs, stream_mode="values", config=config)
|
||||
print(response["messages"][-1].content)
|
||||
|
||||
asyncio.run(run_application())
|
||||
{{< /tab >}}
|
||||
{{< tab header="LlamaIndex" lang="python" >}}
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
{{< include "quickstart/python/llamaindex/quickstart.py" >}}
|
||||
from llama_index.core.agent.workflow import AgentWorkflow
|
||||
|
||||
from llama_index.core.workflow import Context
|
||||
|
||||
# TODO(developer): replace this with another import if needed
|
||||
|
||||
from llama_index.llms.google_genai import GoogleGenAI
|
||||
|
||||
# from llama_index.llms.anthropic import Anthropic
|
||||
|
||||
from toolbox_llamaindex import ToolboxClient
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
async def run_application():
|
||||
# TODO(developer): replace this with another model if needed
|
||||
llm = GoogleGenAI(
|
||||
model="gemini-2.0-flash-001",
|
||||
vertexai_config={"project": "project-id", "location": "us-central1"},
|
||||
)
|
||||
# llm = GoogleGenAI(
|
||||
# api_key=os.getenv("GOOGLE_API_KEY"),
|
||||
# model="gemini-2.0-flash-001",
|
||||
# )
|
||||
# llm = Anthropic(
|
||||
# model="claude-3-7-sonnet-latest",
|
||||
# api_key=os.getenv("ANTHROPIC_API_KEY")
|
||||
# )
|
||||
|
||||
# Load the tools from the Toolbox server
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
tools = await client.aload_toolset()
|
||||
|
||||
agent = AgentWorkflow.from_tools_or_functions(
|
||||
tools,
|
||||
llm=llm,
|
||||
system_prompt=prompt,
|
||||
)
|
||||
ctx = Context(agent)
|
||||
for query in queries:
|
||||
response = await agent.run(user_msg=query, ctx=ctx)
|
||||
print(f"---- {query} ----")
|
||||
print(str(response))
|
||||
|
||||
asyncio.run(run_application())
|
||||
{{< /tab >}}
|
||||
{{< tab header="Core" lang="python" >}}
|
||||
import asyncio
|
||||
|
||||
{{< include "quickstart/python/core/quickstart.py" >}}
|
||||
from google import genai
|
||||
from google.genai.types import (
|
||||
Content,
|
||||
FunctionDeclaration,
|
||||
GenerateContentConfig,
|
||||
Part,
|
||||
Tool,
|
||||
)
|
||||
|
||||
from toolbox_core import ToolboxClient
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel id while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Please book the hotel Hilton Basel for me.",
|
||||
"This is too expensive. Please cancel it.",
|
||||
"Please book Hyatt Regency for me",
|
||||
"My check in dates for my booking would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
async def run_application():
|
||||
async with ToolboxClient("<http://127.0.0.1:5000>") as toolbox_client:
|
||||
|
||||
# The toolbox_tools list contains Python callables (functions/methods) designed for LLM tool-use
|
||||
# integration. While this example uses Google's genai client, these callables can be adapted for
|
||||
# various function-calling or agent frameworks. For easier integration with supported frameworks
|
||||
# (https://github.com/googleapis/mcp-toolbox-python-sdk/tree/main/packages), use the
|
||||
# provided wrapper packages, which handle framework-specific boilerplate.
|
||||
toolbox_tools = await toolbox_client.load_toolset("my-toolset")
|
||||
genai_client = genai.Client(
|
||||
vertexai=True, project="project-id", location="us-central1"
|
||||
)
|
||||
|
||||
genai_tools = [
|
||||
Tool(
|
||||
function_declarations=[
|
||||
FunctionDeclaration.from_callable_with_api_option(callable=tool)
|
||||
]
|
||||
)
|
||||
for tool in toolbox_tools
|
||||
]
|
||||
history = []
|
||||
for query in queries:
|
||||
user_prompt_content = Content(
|
||||
role="user",
|
||||
parts=[Part.from_text(text=query)],
|
||||
)
|
||||
history.append(user_prompt_content)
|
||||
|
||||
response = genai_client.models.generate_content(
|
||||
model="gemini-2.0-flash-001",
|
||||
contents=history,
|
||||
config=GenerateContentConfig(
|
||||
system_instruction=prompt,
|
||||
tools=genai_tools,
|
||||
),
|
||||
)
|
||||
history.append(response.candidates[0].content)
|
||||
function_response_parts = []
|
||||
for function_call in response.function_calls:
|
||||
fn_name = function_call.name
|
||||
# The tools are sorted alphabetically
|
||||
if fn_name == "search-hotels-by-name":
|
||||
function_result = await toolbox_tools[3](**function_call.args)
|
||||
elif fn_name == "search-hotels-by-location":
|
||||
function_result = await toolbox_tools[2](**function_call.args)
|
||||
elif fn_name == "book-hotel":
|
||||
function_result = await toolbox_tools[0](**function_call.args)
|
||||
elif fn_name == "update-hotel":
|
||||
function_result = await toolbox_tools[4](**function_call.args)
|
||||
elif fn_name == "cancel-hotel":
|
||||
function_result = await toolbox_tools[1](**function_call.args)
|
||||
else:
|
||||
raise ValueError("Function name not present.")
|
||||
function_response = {"result": function_result}
|
||||
function_response_part = Part.from_function_response(
|
||||
name=function_call.name,
|
||||
response=function_response,
|
||||
)
|
||||
function_response_parts.append(function_response_part)
|
||||
|
||||
if function_response_parts:
|
||||
tool_response_content = Content(role="tool", parts=function_response_parts)
|
||||
history.append(tool_response_content)
|
||||
|
||||
response2 = genai_client.models.generate_content(
|
||||
model="gemini-2.0-flash-001",
|
||||
contents=history,
|
||||
config=GenerateContentConfig(
|
||||
tools=genai_tools,
|
||||
),
|
||||
)
|
||||
final_model_response_content = response2.candidates[0].content
|
||||
history.append(final_model_response_content)
|
||||
print(response2.text)
|
||||
|
||||
asyncio.run(run_application())
|
||||
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
@@ -49,28 +49,614 @@ from Toolbox.
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="LangChain Go" lang="go" >}}
|
||||
|
||||
{{< include "quickstart/go/langchain/quickstart.go" >}}
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"github.com/tmc/langchaingo/llms"
|
||||
"github.com/tmc/langchaingo/llms/googleai"
|
||||
)
|
||||
|
||||
// ConvertToLangchainTool converts a generic core.ToolboxTool into a LangChainGo llms.Tool.
|
||||
func ConvertToLangchainTool(toolboxTool *core.ToolboxTool) llms.Tool {
|
||||
|
||||
// Fetch the tool's input schema
|
||||
inputschema, err := toolboxTool.InputSchema()
|
||||
if err != nil {
|
||||
return llms.Tool{}
|
||||
}
|
||||
|
||||
var paramsSchema map[string]any
|
||||
_ = json.Unmarshal(inputschema, ¶msSchema)
|
||||
|
||||
// Convert into LangChain's llms.Tool
|
||||
return llms.Tool{
|
||||
Type: "function",
|
||||
Function: &llms.FunctionDefinition{
|
||||
Name: toolboxTool.Name(),
|
||||
Description: toolboxTool.Description(),
|
||||
Parameters: paramsSchema,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it.",
|
||||
"Please book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
genaiKey := os.Getenv("GOOGLE_API_KEY")
|
||||
toolboxURL := "http://localhost:5000"
|
||||
ctx := context.Background()
|
||||
|
||||
// Initialize the Google AI client (LLM).
|
||||
llm, err := googleai.New(ctx, googleai.WithAPIKey(genaiKey), googleai.WithDefaultModel("gemini-1.5-flash"))
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Google AI client: %v", err)
|
||||
}
|
||||
|
||||
// Initialize the MCP Toolbox client.
|
||||
toolboxClient, err := core.NewToolboxClient(toolboxURL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tool using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
|
||||
|
||||
langchainTools := make([]llms.Tool, len(tools))
|
||||
// Convert the loaded ToolboxTools into the format LangChainGo requires.
|
||||
for i, tool := range tools {
|
||||
langchainTools[i] = ConvertToLangchainTool(tool)
|
||||
toolsMap[tool.Name()] = tool
|
||||
}
|
||||
|
||||
// Start the conversation history.
|
||||
messageHistory := []llms.MessageContent{
|
||||
llms.TextParts(llms.ChatMessageTypeSystem, systemPrompt),
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
messageHistory = append(messageHistory, llms.TextParts(llms.ChatMessageTypeHuman, query))
|
||||
|
||||
// Make the first call to the LLM, making it aware of the tool.
|
||||
resp, err := llm.GenerateContent(ctx, messageHistory, llms.WithTools(langchainTools))
|
||||
if err != nil {
|
||||
log.Fatalf("LLM call failed: %v", err)
|
||||
}
|
||||
respChoice := resp.Choices[0]
|
||||
|
||||
assistantResponse := llms.TextParts(llms.ChatMessageTypeAI, respChoice.Content)
|
||||
for _, tc := range respChoice.ToolCalls {
|
||||
assistantResponse.Parts = append(assistantResponse.Parts, tc)
|
||||
}
|
||||
messageHistory = append(messageHistory, assistantResponse)
|
||||
|
||||
// Process each tool call requested by the model.
|
||||
for _, tc := range respChoice.ToolCalls {
|
||||
toolName := tc.FunctionCall.Name
|
||||
tool := toolsMap[toolName]
|
||||
var args map[string]any
|
||||
if err := json.Unmarshal([]byte(tc.FunctionCall.Arguments), &args); err != nil {
|
||||
log.Fatalf("Failed to unmarshal arguments for tool '%s': %v", toolName, err)
|
||||
}
|
||||
toolResult, err := tool.Invoke(ctx, args)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to execute tool '%s': %v", toolName, err)
|
||||
}
|
||||
if toolResult == "" || toolResult == nil {
|
||||
toolResult = "Operation completed successfully with no specific return value."
|
||||
}
|
||||
|
||||
// Create the tool call response message and add it to the history.
|
||||
toolResponse := llms.MessageContent{
|
||||
Role: llms.ChatMessageTypeTool,
|
||||
Parts: []llms.ContentPart{
|
||||
llms.ToolCallResponse{
|
||||
Name: toolName,
|
||||
Content: fmt.Sprintf("%v", toolResult),
|
||||
},
|
||||
},
|
||||
}
|
||||
messageHistory = append(messageHistory, toolResponse)
|
||||
}
|
||||
finalResp, err := llm.GenerateContent(ctx, messageHistory)
|
||||
if err != nil {
|
||||
log.Fatalf("Final LLM call failed after tool execution: %v", err)
|
||||
}
|
||||
|
||||
// Add the final textual response from the LLM to the history
|
||||
messageHistory = append(messageHistory, llms.TextParts(llms.ChatMessageTypeAI, finalResp.Choices[0].Content))
|
||||
|
||||
fmt.Println(finalResp.Choices[0].Content)
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="Genkit Go" lang="go" >}}
|
||||
|
||||
{{< include "quickstart/go/genkit/quickstart.go" >}}
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/tbgenkit"
|
||||
|
||||
"github.com/firebase/genkit/go/ai"
|
||||
"github.com/firebase/genkit/go/genkit"
|
||||
"github.com/firebase/genkit/go/plugins/googlegenai"
|
||||
)
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
ctx := context.Background()
|
||||
|
||||
// Create Toolbox Client
|
||||
toolboxClient, err := core.NewToolboxClient("http://127.0.0.1:5000")
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tools using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
// Initialize Genkit
|
||||
g, err := genkit.Init(ctx,
|
||||
genkit.WithPlugins(&googlegenai.GoogleAI{}),
|
||||
genkit.WithDefaultModel("googleai/gemini-1.5-flash"),
|
||||
)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to init genkit: %v\n", err)
|
||||
}
|
||||
|
||||
// Create a conversation history
|
||||
conversationHistory := []*ai.Message{
|
||||
ai.NewSystemTextMessage(systemPrompt),
|
||||
}
|
||||
|
||||
// Convert your tool to a Genkit tool.
|
||||
genkitTools := make([]ai.Tool, len(tools))
|
||||
for i, tool := range tools {
|
||||
newTool, err := tbgenkit.ToGenkitTool(tool, g)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to convert tool: %v\n", err)
|
||||
}
|
||||
genkitTools[i] = newTool
|
||||
}
|
||||
|
||||
toolRefs := make([]ai.ToolRef, len(genkitTools))
|
||||
|
||||
for i, tool := range genkitTools {
|
||||
toolRefs[i] = tool
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
conversationHistory = append(conversationHistory, ai.NewUserTextMessage(query))
|
||||
response, err := genkit.Generate(ctx, g,
|
||||
ai.WithMessages(conversationHistory...),
|
||||
ai.WithTools(toolRefs...),
|
||||
ai.WithReturnToolRequests(true),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("%v\n", err)
|
||||
}
|
||||
conversationHistory = append(conversationHistory, response.Message)
|
||||
|
||||
parts := []*ai.Part{}
|
||||
|
||||
for _, req := range response.ToolRequests() {
|
||||
tool := genkit.LookupTool(g, req.Name)
|
||||
if tool == nil {
|
||||
log.Fatalf("tool %q not found", req.Name)
|
||||
}
|
||||
|
||||
output, err := tool.RunRaw(ctx, req.Input)
|
||||
if err != nil {
|
||||
log.Fatalf("tool %q execution failed: %v", tool.Name(), err)
|
||||
}
|
||||
|
||||
parts = append(parts,
|
||||
ai.NewToolResponsePart(&ai.ToolResponse{
|
||||
Name: req.Name,
|
||||
Ref: req.Ref,
|
||||
Output: output,
|
||||
}))
|
||||
|
||||
}
|
||||
|
||||
if len(parts) > 0 {
|
||||
resp, err := genkit.Generate(ctx, g,
|
||||
ai.WithMessages(append(response.History(), ai.NewMessage(ai.RoleTool, nil, parts...))...),
|
||||
ai.WithTools(toolRefs...),
|
||||
)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Println("\n", resp.Text())
|
||||
conversationHistory = append(conversationHistory, resp.Message)
|
||||
} else {
|
||||
fmt.Println("\n", response.Text())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="Go GenAI" lang="go" >}}
|
||||
|
||||
{{< include "quickstart/go/genAI/quickstart.go" >}}
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"google.golang.org/genai"
|
||||
)
|
||||
|
||||
// ConvertToGenaiTool translates a ToolboxTool into the genai.FunctionDeclaration format.
|
||||
func ConvertToGenaiTool(toolboxTool *core.ToolboxTool) *genai.Tool {
|
||||
|
||||
inputschema, err := toolboxTool.InputSchema()
|
||||
if err != nil {
|
||||
return &genai.Tool{}
|
||||
}
|
||||
|
||||
var paramsSchema *genai.Schema
|
||||
_ = json.Unmarshal(inputschema, ¶msSchema)
|
||||
// First, create the function declaration.
|
||||
funcDeclaration := &genai.FunctionDeclaration{
|
||||
Name: toolboxTool.Name(),
|
||||
Description: toolboxTool.Description(),
|
||||
Parameters: paramsSchema,
|
||||
}
|
||||
|
||||
// Then, wrap the function declaration in a genai.Tool struct.
|
||||
return &genai.Tool{
|
||||
FunctionDeclarations: []*genai.FunctionDeclaration{funcDeclaration},
|
||||
}
|
||||
}
|
||||
|
||||
func printResponse(resp *genai.GenerateContentResponse) {
|
||||
for _, cand := range resp.Candidates {
|
||||
if cand.Content != nil {
|
||||
for _, part := range cand.Content.Parts {
|
||||
fmt.Println(part.Text)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it.",
|
||||
"Please book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Setup
|
||||
ctx := context.Background()
|
||||
apiKey := os.Getenv("GOOGLE_API_KEY")
|
||||
toolboxURL := "http://localhost:5000"
|
||||
|
||||
// Initialize the Google GenAI client using the explicit ClientConfig.
|
||||
client, err := genai.NewClient(ctx, &genai.ClientConfig{
|
||||
APIKey: apiKey,
|
||||
})
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Google GenAI client: %v", err)
|
||||
}
|
||||
|
||||
// Initialize the MCP Toolbox client.
|
||||
toolboxClient, err := core.NewToolboxClient(toolboxURL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tool using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
genAITools := make([]*genai.Tool, len(tools))
|
||||
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
|
||||
|
||||
for i, tool := range tools {
|
||||
genAITools[i] = ConvertToGenaiTool(tool)
|
||||
toolsMap[tool.Name()] = tool
|
||||
}
|
||||
|
||||
// Set up the generative model with the available tool.
|
||||
modelName := "gemini-2.0-flash"
|
||||
|
||||
// Create the initial content prompt for the model.
|
||||
messageHistory := []*genai.Content{
|
||||
genai.NewContentFromText(systemPrompt, genai.RoleUser),
|
||||
}
|
||||
config := &genai.GenerateContentConfig{
|
||||
Tools: genAITools,
|
||||
ToolConfig: &genai.ToolConfig{
|
||||
FunctionCallingConfig: &genai.FunctionCallingConfig{
|
||||
Mode: genai.FunctionCallingConfigModeAny,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
|
||||
messageHistory = append(messageHistory, genai.NewContentFromText(query, genai.RoleUser))
|
||||
|
||||
genContentResp, err := client.Models.GenerateContent(ctx, modelName, messageHistory, config)
|
||||
if err != nil {
|
||||
log.Fatalf("LLM call failed for query '%s': %v", query, err)
|
||||
}
|
||||
|
||||
if len(genContentResp.Candidates) > 0 && genContentResp.Candidates[0].Content != nil {
|
||||
messageHistory = append(messageHistory, genContentResp.Candidates[0].Content)
|
||||
}
|
||||
|
||||
functionCalls := genContentResp.FunctionCalls()
|
||||
|
||||
toolResponseParts := []*genai.Part{}
|
||||
|
||||
for _, fc := range functionCalls {
|
||||
|
||||
toolToInvoke, found := toolsMap[fc.Name]
|
||||
if !found {
|
||||
log.Fatalf("Tool '%s' not found in loaded tools map. Check toolset configuration.", fc.Name)
|
||||
}
|
||||
|
||||
toolResult, invokeErr := toolToInvoke.Invoke(ctx, fc.Args)
|
||||
if invokeErr != nil {
|
||||
log.Fatalf("Failed to execute tool '%s': %v", fc.Name, invokeErr)
|
||||
}
|
||||
|
||||
// Enhanced Tool Result Handling (retained to prevent nil issues)
|
||||
toolResultString := ""
|
||||
if toolResult != nil {
|
||||
jsonBytes, marshalErr := json.Marshal(toolResult)
|
||||
if marshalErr == nil {
|
||||
toolResultString = string(jsonBytes)
|
||||
} else {
|
||||
toolResultString = fmt.Sprintf("%v", toolResult)
|
||||
}
|
||||
}
|
||||
|
||||
responseMap := map[string]any{"result": toolResultString}
|
||||
|
||||
toolResponseParts = append(toolResponseParts, genai.NewPartFromFunctionResponse(fc.Name, responseMap))
|
||||
}
|
||||
// Add all accumulated tool responses for this turn to the message history.
|
||||
toolResponseContent := genai.NewContentFromParts(toolResponseParts, "function")
|
||||
messageHistory = append(messageHistory, toolResponseContent)
|
||||
|
||||
finalResponse, err := client.Models.GenerateContent(ctx, modelName, messageHistory, &genai.GenerateContentConfig{})
|
||||
if err != nil {
|
||||
log.Fatalf("Error calling GenerateContent (with function result): %v", err)
|
||||
}
|
||||
|
||||
printResponse(finalResponse)
|
||||
// Add the final textual response from the LLM to the history
|
||||
if len(finalResponse.Candidates) > 0 && finalResponse.Candidates[0].Content != nil {
|
||||
messageHistory = append(messageHistory, finalResponse.Candidates[0].Content)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="OpenAI Go" lang="go" >}}
|
||||
|
||||
{{< include "quickstart/go/openAI/quickstart.go" >}}
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"log"
|
||||
"fmt
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
openai "github.com/openai/openai-go"
|
||||
)
|
||||
|
||||
// ConvertToOpenAITool converts a ToolboxTool into the go-openai library's Tool format.
|
||||
func ConvertToOpenAITool(toolboxTool *core.ToolboxTool) openai.ChatCompletionToolParam {
|
||||
// Get the input schema
|
||||
jsonSchemaBytes, err := toolboxTool.InputSchema()
|
||||
if err != nil {
|
||||
return openai.ChatCompletionToolParam{}
|
||||
}
|
||||
|
||||
// Unmarshal the JSON bytes into FunctionParameters
|
||||
var paramsSchema openai.FunctionParameters
|
||||
if err := json.Unmarshal(jsonSchemaBytes, ¶msSchema); err != nil {
|
||||
return openai.ChatCompletionToolParam{}
|
||||
}
|
||||
|
||||
// Create and return the final tool parameter struct.
|
||||
return openai.ChatCompletionToolParam{
|
||||
Function: openai.FunctionDefinitionParam{
|
||||
Name: toolboxTool.Name(),
|
||||
Description: openai.String(toolboxTool.Description()),
|
||||
Parameters: paramsSchema,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Setup
|
||||
ctx := context.Background()
|
||||
toolboxURL := "http://localhost:5000"
|
||||
openAIClient := openai.NewClient()
|
||||
|
||||
// Initialize the MCP Toolbox client.
|
||||
toolboxClient, err := core.NewToolboxClient(toolboxURL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tools using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tool : %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
openAITools := make([]openai.ChatCompletionToolParam, len(tools))
|
||||
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
|
||||
|
||||
for i, tool := range tools {
|
||||
// Convert the Toolbox tool into the openAI FunctionDeclaration format.
|
||||
openAITools[i] = ConvertToOpenAITool(tool)
|
||||
// Add tool to a map for lookup later
|
||||
toolsMap[tool.Name()] = tool
|
||||
|
||||
}
|
||||
|
||||
params := openai.ChatCompletionNewParams{
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{
|
||||
openai.SystemMessage(systemPrompt),
|
||||
},
|
||||
Tools: openAITools,
|
||||
Seed: openai.Int(0),
|
||||
Model: openai.ChatModelGPT4o,
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
|
||||
params.Messages = append(params.Messages, openai.UserMessage(query))
|
||||
|
||||
// Make initial chat completion request
|
||||
completion, err := openAIClient.Chat.Completions.New(ctx, params)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
toolCalls := completion.Choices[0].Message.ToolCalls
|
||||
|
||||
// Return early if there are no tool calls
|
||||
if len(toolCalls) == 0 {
|
||||
log.Println("No function call")
|
||||
}
|
||||
|
||||
// If there was a function call, continue the conversation
|
||||
params.Messages = append(params.Messages, completion.Choices[0].Message.ToParam())
|
||||
for _, toolCall := range toolCalls {
|
||||
|
||||
toolName := toolCall.Function.Name
|
||||
toolToInvoke := toolsMap[toolName]
|
||||
|
||||
var args map[string]any
|
||||
err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
result, err := toolToInvoke.Invoke(ctx, args)
|
||||
if err != nil {
|
||||
log.Fatal("Could not invoke tool", err)
|
||||
}
|
||||
|
||||
params.Messages = append(params.Messages, openai.ToolMessage(result.(string), toolCall.ID))
|
||||
}
|
||||
|
||||
completion, err = openAIClient.Chat.Completions.New(ctx, params)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
params.Messages = append(params.Messages, openai.AssistantMessage(query))
|
||||
|
||||
fmt.println("\n", completion.Choices[0].Message.Content)
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
@@ -64,26 +64,384 @@ npm install @google/genai
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="LangChain" lang="js" >}}
|
||||
|
||||
{{< include "quickstart/js/langchain/quickstart.js" >}}
|
||||
import { ChatGoogleGenerativeAI } from "@langchain/google-genai";
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
import { tool } from "@langchain/core/tools";
|
||||
import { createReactAgent } from "@langchain/langgraph/prebuilt";
|
||||
import { MemorySaver } from "@langchain/langgraph";
|
||||
|
||||
// Replace it with your API key
|
||||
process.env.GOOGLE_API_KEY = 'your-api-key';
|
||||
|
||||
const prompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
async function runApplication() {
|
||||
const model = new ChatGoogleGenerativeAI({
|
||||
model: "gemini-2.0-flash",
|
||||
});
|
||||
|
||||
const client = new ToolboxClient("http://127.0.0.1:5000");
|
||||
const toolboxTools = await client.loadToolset("my-toolset");
|
||||
|
||||
// Define the basics of the tool: name, description, schema and core logic
|
||||
const getTool = (toolboxTool) => tool(toolboxTool, {
|
||||
name: toolboxTool.getName(),
|
||||
description: toolboxTool.getDescription(),
|
||||
schema: toolboxTool.getParamSchema()
|
||||
});
|
||||
const tools = toolboxTools.map(getTool);
|
||||
|
||||
const agent = createReactAgent({
|
||||
llm: model,
|
||||
tools: tools,
|
||||
checkpointer: new MemorySaver(),
|
||||
systemPrompt: prompt,
|
||||
});
|
||||
|
||||
const langGraphConfig = {
|
||||
configurable: {
|
||||
thread_id: "test-thread",
|
||||
},
|
||||
};
|
||||
|
||||
for (const query of queries) {
|
||||
const agentOutput = await agent.invoke(
|
||||
{
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: query,
|
||||
},
|
||||
],
|
||||
verbose: true,
|
||||
},
|
||||
langGraphConfig
|
||||
);
|
||||
const response = agentOutput.messages[agentOutput.messages.length - 1].content;
|
||||
console.log(response);
|
||||
}
|
||||
}
|
||||
|
||||
runApplication()
|
||||
.catch(console.error)
|
||||
.finally(() => console.log("\nApplication finished."));
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="GenkitJS" lang="js" >}}
|
||||
|
||||
{{< include "quickstart/js/genkit/quickstart.js" >}}
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
import { genkit } from "genkit";
|
||||
import { googleAI } from '@genkit-ai/googleai';
|
||||
|
||||
// Replace it with your API key
|
||||
process.env.GOOGLE_API_KEY = 'your-api-key';
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
async function run() {
|
||||
const toolboxClient = new ToolboxClient("http://127.0.0.1:5000");
|
||||
|
||||
const ai = genkit({
|
||||
plugins: [
|
||||
googleAI({
|
||||
apiKey: process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY
|
||||
})
|
||||
],
|
||||
model: googleAI.model('gemini-2.0-flash'),
|
||||
});
|
||||
|
||||
const toolboxTools = await toolboxClient.loadToolset("my-toolset");
|
||||
const toolMap = Object.fromEntries(
|
||||
toolboxTools.map((tool) => {
|
||||
const definedTool = ai.defineTool(
|
||||
{
|
||||
name: tool.getName(),
|
||||
description: tool.getDescription(),
|
||||
inputSchema: tool.getParamSchema(),
|
||||
},
|
||||
tool
|
||||
);
|
||||
return [tool.getName(), definedTool];
|
||||
})
|
||||
);
|
||||
const tools = Object.values(toolMap);
|
||||
|
||||
let conversationHistory = [{ role: "system", content: [{ text: systemPrompt }] }];
|
||||
|
||||
for (const query of queries) {
|
||||
conversationHistory.push({ role: "user", content: [{ text: query }] });
|
||||
const response = await ai.generate({
|
||||
messages: conversationHistory,
|
||||
tools: tools,
|
||||
});
|
||||
conversationHistory.push(response.message);
|
||||
|
||||
const toolRequests = response.toolRequests;
|
||||
if (toolRequests?.length > 0) {
|
||||
// Execute tools concurrently and collect their responses.
|
||||
const toolResponses = await Promise.all(
|
||||
toolRequests.map(async (call) => {
|
||||
try {
|
||||
const toolOutput = await toolMap[call.name].invoke(call.input);
|
||||
return { role: "tool", content: [{ toolResponse: { name: call.name, output: toolOutput } }] };
|
||||
} catch (e) {
|
||||
console.error(`Error executing tool ${call.name}:`, e);
|
||||
return { role: "tool", content: [{ toolResponse: { name: call.name, output: { error: e.message } } }] };
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
conversationHistory.push(...toolResponses);
|
||||
|
||||
// Call the AI again with the tool results.
|
||||
response = await ai.generate({ messages: conversationHistory, tools });
|
||||
conversationHistory.push(response.message);
|
||||
}
|
||||
|
||||
console.log(response.text);
|
||||
}
|
||||
}
|
||||
|
||||
run();
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="LlamaIndex" lang="js" >}}
|
||||
|
||||
{{< include "quickstart/js/llamaindex/quickstart.js" >}}
|
||||
import { gemini, GEMINI_MODEL } from "@llamaindex/google";
|
||||
import { agent } from "@llamaindex/workflow";
|
||||
import { createMemory, staticBlock, tool } from "llamaindex";
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
|
||||
const TOOLBOX_URL = "http://127.0.0.1:5000"; // Update if needed
|
||||
process.env.GOOGLE_API_KEY = 'your-api-key'; // Replace it with your API key
|
||||
|
||||
const prompt = `
|
||||
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and cancellations.
|
||||
When the user searches for a hotel, mention its name, id, location and price tier.
|
||||
Always mention hotel ids while performing any searches — this is very important for operations.
|
||||
For any bookings or cancellations, please provide the appropriate confirmation.
|
||||
Update check-in or check-out dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
async function main() {
|
||||
// Connect to MCP Toolbox
|
||||
const client = new ToolboxClient(TOOLBOX_URL);
|
||||
const toolboxTools = await client.loadToolset("my-toolset");
|
||||
const tools = toolboxTools.map((toolboxTool) => {
|
||||
return tool({
|
||||
name: toolboxTool.getName(),
|
||||
description: toolboxTool.getDescription(),
|
||||
parameters: toolboxTool.getParamSchema(),
|
||||
execute: toolboxTool,
|
||||
});
|
||||
});
|
||||
|
||||
// Initialize LLM
|
||||
const llm = gemini({
|
||||
model: GEMINI_MODEL.GEMINI_2_0_FLASH,
|
||||
apiKey: process.env.GOOGLE_API_KEY,
|
||||
});
|
||||
|
||||
const memory = createMemory({
|
||||
memoryBlocks: [
|
||||
staticBlock({
|
||||
content: prompt,
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
// Create the Agent
|
||||
const myAgent = agent({
|
||||
tools: tools,
|
||||
llm,
|
||||
memory,
|
||||
systemPrompt: prompt,
|
||||
});
|
||||
|
||||
for (const query of queries) {
|
||||
const result = await myAgent.run(query);
|
||||
const output = result.data.result;
|
||||
|
||||
console.log(`\nUser: ${query}`);
|
||||
if (typeof output === "string") {
|
||||
console.log(output.trim());
|
||||
} else if (typeof output === "object" && "text" in output) {
|
||||
console.log(output.text.trim());
|
||||
} else {
|
||||
console.log(JSON.stringify(output));
|
||||
}
|
||||
}
|
||||
//You may observe some extra logs during execution due to the run method provided by Llama.
|
||||
console.log("Agent run finished.");
|
||||
}
|
||||
|
||||
main();
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="GoogleGenAI" lang="js" >}}
|
||||
import { GoogleGenAI } from "@google/genai";
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
|
||||
{{< include "quickstart/js/genAI/quickstart.js" >}}
|
||||
|
||||
const TOOLBOX_URL = "http://127.0.0.1:5000"; // Update if needed
|
||||
const GOOGLE_API_KEY = 'enter your api here'; // Replace it with your API key
|
||||
|
||||
const prompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, you MUST use the available tools to find information. Mention its name, id,
|
||||
location and price tier. Always mention hotel id while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
function mapZodTypeToOpenAPIType(zodTypeName) {
|
||||
|
||||
console.log(zodTypeName)
|
||||
const typeMap = {
|
||||
'ZodString': 'string',
|
||||
'ZodNumber': 'number',
|
||||
'ZodBoolean': 'boolean',
|
||||
'ZodArray': 'array',
|
||||
'ZodObject': 'object',
|
||||
};
|
||||
return typeMap[zodTypeName] || 'string';
|
||||
}
|
||||
|
||||
async function runApplication() {
|
||||
|
||||
const toolboxClient = new ToolboxClient(TOOLBOX_URL);
|
||||
const toolboxTools = await toolboxClient.loadToolset("my-toolset");
|
||||
|
||||
const geminiTools = [{
|
||||
functionDeclarations: toolboxTools.map(tool => {
|
||||
|
||||
const schema = tool.getParamSchema();
|
||||
const properties = {};
|
||||
const required = [];
|
||||
|
||||
|
||||
for (const [key, param] of Object.entries(schema.shape)) {
|
||||
properties[key] = {
|
||||
type: mapZodTypeToOpenAPIType(param.constructor.name),
|
||||
description: param.description || '',
|
||||
};
|
||||
required.push(key)
|
||||
}
|
||||
|
||||
return {
|
||||
name: tool.getName(),
|
||||
description: tool.getDescription(),
|
||||
parameters: { type: 'object', properties, required },
|
||||
};
|
||||
})
|
||||
}];
|
||||
|
||||
|
||||
const genAI = new GoogleGenAI({ apiKey: GOOGLE_API_KEY });
|
||||
|
||||
const chat = genAI.chats.create({
|
||||
model: "gemini-2.5-flash",
|
||||
config: {
|
||||
systemInstruction: prompt,
|
||||
tools: geminiTools,
|
||||
}
|
||||
});
|
||||
|
||||
for (const query of queries) {
|
||||
|
||||
let currentResult = await chat.sendMessage({ message: query });
|
||||
|
||||
let finalResponseGiven = false
|
||||
while (!finalResponseGiven) {
|
||||
|
||||
const response = currentResult;
|
||||
const functionCalls = response.functionCalls || [];
|
||||
|
||||
if (functionCalls.length === 0) {
|
||||
console.log(response.text)
|
||||
finalResponseGiven = true;
|
||||
} else {
|
||||
const toolResponses = [];
|
||||
for (const call of functionCalls) {
|
||||
const toolName = call.name
|
||||
const toolToExecute = toolboxTools.find(t => t.getName() === toolName);
|
||||
|
||||
if (toolToExecute) {
|
||||
try {
|
||||
const functionResult = await toolToExecute(call.args);
|
||||
toolResponses.push({
|
||||
functionResponse: { name: call.name, response: { result: functionResult } }
|
||||
});
|
||||
} catch (e) {
|
||||
console.error(`Error executing tool '${toolName}':`, e);
|
||||
toolResponses.push({
|
||||
functionResponse: { name: call.name, response: { error: e.message } }
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
currentResult = await chat.sendMessage({ message: toolResponses });
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
runApplication()
|
||||
.catch(console.error)
|
||||
.finally(() => console.log("\nApplication finished."));
|
||||
{{< /tab >}}
|
||||
|
||||
{{< /tabpane >}}
|
||||
|
||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
module genai-quickstart
|
||||
|
||||
go 1.24.6
|
||||
|
||||
require (
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0
|
||||
google.golang.org/genai v1.24.0
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go v0.121.1 // indirect
|
||||
cloud.google.com/go/auth v0.16.2 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.7.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/google/go-cmp v0.7.0 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
|
||||
go.opentelemetry.io/otel v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.36.0 // indirect
|
||||
golang.org/x/crypto v0.39.0 // indirect
|
||||
golang.org/x/net v0.41.0 // indirect
|
||||
golang.org/x/oauth2 v0.30.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/text v0.26.0 // indirect
|
||||
google.golang.org/api v0.242.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/grpc v1.73.0 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
)
|
||||
@@ -1,174 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"google.golang.org/genai"
|
||||
)
|
||||
|
||||
// ConvertToGenaiTool translates a ToolboxTool into the genai.FunctionDeclaration format.
|
||||
func ConvertToGenaiTool(toolboxTool *core.ToolboxTool) *genai.Tool {
|
||||
|
||||
inputschema, err := toolboxTool.InputSchema()
|
||||
if err != nil {
|
||||
return &genai.Tool{}
|
||||
}
|
||||
|
||||
var paramsSchema *genai.Schema
|
||||
_ = json.Unmarshal(inputschema, ¶msSchema)
|
||||
// First, create the function declaration.
|
||||
funcDeclaration := &genai.FunctionDeclaration{
|
||||
Name: toolboxTool.Name(),
|
||||
Description: toolboxTool.Description(),
|
||||
Parameters: paramsSchema,
|
||||
}
|
||||
|
||||
// Then, wrap the function declaration in a genai.Tool struct.
|
||||
return &genai.Tool{
|
||||
FunctionDeclarations: []*genai.FunctionDeclaration{funcDeclaration},
|
||||
}
|
||||
}
|
||||
|
||||
func printResponse(resp *genai.GenerateContentResponse) {
|
||||
for _, cand := range resp.Candidates {
|
||||
if cand.Content != nil {
|
||||
for _, part := range cand.Content.Parts {
|
||||
fmt.Println(part.Text)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it.",
|
||||
"Please book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Setup
|
||||
ctx := context.Background()
|
||||
apiKey := os.Getenv("GOOGLE_API_KEY")
|
||||
toolboxURL := "http://localhost:5000"
|
||||
|
||||
// Initialize the Google GenAI client using the explicit ClientConfig.
|
||||
client, err := genai.NewClient(ctx, &genai.ClientConfig{
|
||||
APIKey: apiKey,
|
||||
})
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Google GenAI client: %v", err)
|
||||
}
|
||||
|
||||
// Initialize the MCP Toolbox client.
|
||||
toolboxClient, err := core.NewToolboxClient(toolboxURL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tool using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
genAITools := make([]*genai.Tool, len(tools))
|
||||
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
|
||||
|
||||
for i, tool := range tools {
|
||||
genAITools[i] = ConvertToGenaiTool(tool)
|
||||
toolsMap[tool.Name()] = tool
|
||||
}
|
||||
|
||||
// Set up the generative model with the available tool.
|
||||
modelName := "gemini-2.0-flash"
|
||||
|
||||
// Create the initial content prompt for the model.
|
||||
messageHistory := []*genai.Content{
|
||||
genai.NewContentFromText(systemPrompt, genai.RoleUser),
|
||||
}
|
||||
config := &genai.GenerateContentConfig{
|
||||
Tools: genAITools,
|
||||
ToolConfig: &genai.ToolConfig{
|
||||
FunctionCallingConfig: &genai.FunctionCallingConfig{
|
||||
Mode: genai.FunctionCallingConfigModeAny,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
|
||||
messageHistory = append(messageHistory, genai.NewContentFromText(query, genai.RoleUser))
|
||||
|
||||
genContentResp, err := client.Models.GenerateContent(ctx, modelName, messageHistory, config)
|
||||
if err != nil {
|
||||
log.Fatalf("LLM call failed for query '%s': %v", query, err)
|
||||
}
|
||||
|
||||
if len(genContentResp.Candidates) > 0 && genContentResp.Candidates[0].Content != nil {
|
||||
messageHistory = append(messageHistory, genContentResp.Candidates[0].Content)
|
||||
}
|
||||
|
||||
functionCalls := genContentResp.FunctionCalls()
|
||||
|
||||
toolResponseParts := []*genai.Part{}
|
||||
|
||||
for _, fc := range functionCalls {
|
||||
|
||||
toolToInvoke, found := toolsMap[fc.Name]
|
||||
if !found {
|
||||
log.Fatalf("Tool '%s' not found in loaded tools map. Check toolset configuration.", fc.Name)
|
||||
}
|
||||
|
||||
toolResult, invokeErr := toolToInvoke.Invoke(ctx, fc.Args)
|
||||
if invokeErr != nil {
|
||||
log.Fatalf("Failed to execute tool '%s': %v", fc.Name, invokeErr)
|
||||
}
|
||||
|
||||
// Enhanced Tool Result Handling (retained to prevent nil issues)
|
||||
toolResultString := ""
|
||||
if toolResult != nil {
|
||||
jsonBytes, marshalErr := json.Marshal(toolResult)
|
||||
if marshalErr == nil {
|
||||
toolResultString = string(jsonBytes)
|
||||
} else {
|
||||
toolResultString = fmt.Sprintf("%v", toolResult)
|
||||
}
|
||||
}
|
||||
|
||||
responseMap := map[string]any{"result": toolResultString}
|
||||
|
||||
toolResponseParts = append(toolResponseParts, genai.NewPartFromFunctionResponse(fc.Name, responseMap))
|
||||
}
|
||||
// Add all accumulated tool responses for this turn to the message history.
|
||||
toolResponseContent := genai.NewContentFromParts(toolResponseParts, "function")
|
||||
messageHistory = append(messageHistory, toolResponseContent)
|
||||
|
||||
finalResponse, err := client.Models.GenerateContent(ctx, modelName, messageHistory, &genai.GenerateContentConfig{})
|
||||
if err != nil {
|
||||
log.Fatalf("Error calling GenerateContent (with function result): %v", err)
|
||||
}
|
||||
|
||||
printResponse(finalResponse)
|
||||
// Add the final textual response from the LLM to the history
|
||||
if len(finalResponse.Candidates) > 0 && finalResponse.Candidates[0].Content != nil {
|
||||
messageHistory = append(messageHistory, finalResponse.Candidates[0].Content)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
module genkit-quickstart
|
||||
|
||||
go 1.24.6
|
||||
|
||||
require (
|
||||
github.com/firebase/genkit/go v0.6.2
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go v0.121.1 // indirect
|
||||
cloud.google.com/go/auth v0.16.2 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.7.0 // indirect
|
||||
github.com/bahlo/generic-list-go v0.2.0 // indirect
|
||||
github.com/buger/jsonparser v1.1.1 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/goccy/go-yaml v1.17.1 // indirect
|
||||
github.com/google/dotprompt/go v0.0.0-20250611200215-bb73406b05ca // indirect
|
||||
github.com/google/go-cmp v0.7.0 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
github.com/invopop/jsonschema v0.13.0 // indirect
|
||||
github.com/mailru/easyjson v0.9.0 // indirect
|
||||
github.com/mbleigh/raymond v0.0.0-20250414171441-6b3a58ab9e0a // indirect
|
||||
github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
|
||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
|
||||
github.com/xeipuuv/gojsonschema v1.2.0 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
|
||||
go.opentelemetry.io/otel v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.36.0 // indirect
|
||||
golang.org/x/crypto v0.39.0 // indirect
|
||||
golang.org/x/net v0.41.0 // indirect
|
||||
golang.org/x/oauth2 v0.30.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/text v0.26.0 // indirect
|
||||
google.golang.org/api v0.242.0 // indirect
|
||||
google.golang.org/genai v1.11.1 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/grpc v1.73.0 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
@@ -1,129 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/tbgenkit"
|
||||
|
||||
"github.com/firebase/genkit/go/ai"
|
||||
"github.com/firebase/genkit/go/genkit"
|
||||
"github.com/firebase/genkit/go/plugins/googlegenai"
|
||||
)
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
ctx := context.Background()
|
||||
|
||||
// Create Toolbox Client
|
||||
toolboxClient, err := core.NewToolboxClient("http://127.0.0.1:5000")
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tools using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
// Initialize Genkit
|
||||
g, err := genkit.Init(ctx,
|
||||
genkit.WithPlugins(&googlegenai.GoogleAI{}),
|
||||
genkit.WithDefaultModel("googleai/gemini-1.5-flash"),
|
||||
)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to init genkit: %v\n", err)
|
||||
}
|
||||
|
||||
// Create a conversation history
|
||||
conversationHistory := []*ai.Message{
|
||||
ai.NewSystemTextMessage(systemPrompt),
|
||||
}
|
||||
|
||||
// Convert your tool to a Genkit tool.
|
||||
genkitTools := make([]ai.Tool, len(tools))
|
||||
for i, tool := range tools {
|
||||
newTool, err := tbgenkit.ToGenkitTool(tool, g)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to convert tool: %v\n", err)
|
||||
}
|
||||
genkitTools[i] = newTool
|
||||
}
|
||||
|
||||
toolRefs := make([]ai.ToolRef, len(genkitTools))
|
||||
|
||||
for i, tool := range genkitTools {
|
||||
toolRefs[i] = tool
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
conversationHistory = append(conversationHistory, ai.NewUserTextMessage(query))
|
||||
response, err := genkit.Generate(ctx, g,
|
||||
ai.WithMessages(conversationHistory...),
|
||||
ai.WithTools(toolRefs...),
|
||||
ai.WithReturnToolRequests(true),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("%v\n", err)
|
||||
}
|
||||
conversationHistory = append(conversationHistory, response.Message)
|
||||
|
||||
parts := []*ai.Part{}
|
||||
|
||||
for _, req := range response.ToolRequests() {
|
||||
tool := genkit.LookupTool(g, req.Name)
|
||||
if tool == nil {
|
||||
log.Fatalf("tool %q not found", req.Name)
|
||||
}
|
||||
|
||||
output, err := tool.RunRaw(ctx, req.Input)
|
||||
if err != nil {
|
||||
log.Fatalf("tool %q execution failed: %v", tool.Name(), err)
|
||||
}
|
||||
|
||||
parts = append(parts,
|
||||
ai.NewToolResponsePart(&ai.ToolResponse{
|
||||
Name: req.Name,
|
||||
Ref: req.Ref,
|
||||
Output: output,
|
||||
}))
|
||||
|
||||
}
|
||||
|
||||
if len(parts) > 0 {
|
||||
resp, err := genkit.Generate(ctx, g,
|
||||
ai.WithMessages(append(response.History(), ai.NewMessage(ai.RoleTool, nil, parts...))...),
|
||||
ai.WithTools(toolRefs...),
|
||||
)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Println("\n", resp.Text())
|
||||
conversationHistory = append(conversationHistory, resp.Message)
|
||||
} else {
|
||||
fmt.Println("\n", response.Text())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
module langchan-quickstart
|
||||
|
||||
go 1.24.6
|
||||
|
||||
require (
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0
|
||||
github.com/tmc/langchaingo v0.1.13
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go v0.121.1 // indirect
|
||||
cloud.google.com/go/ai v0.7.0 // indirect
|
||||
cloud.google.com/go/aiplatform v1.85.0 // indirect
|
||||
cloud.google.com/go/auth v0.16.2 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.7.0 // indirect
|
||||
cloud.google.com/go/iam v1.5.2 // indirect
|
||||
cloud.google.com/go/longrunning v0.6.7 // indirect
|
||||
cloud.google.com/go/vertexai v0.12.0 // indirect
|
||||
github.com/dlclark/regexp2 v1.10.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/google/generative-ai-go v0.15.1 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/pkoukk/tiktoken-go v0.1.6 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
|
||||
go.opentelemetry.io/otel v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.36.0 // indirect
|
||||
golang.org/x/crypto v0.39.0 // indirect
|
||||
golang.org/x/net v0.41.0 // indirect
|
||||
golang.org/x/oauth2 v0.30.0 // indirect
|
||||
golang.org/x/sync v0.15.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/text v0.26.0 // indirect
|
||||
golang.org/x/time v0.12.0 // indirect
|
||||
google.golang.org/api v0.242.0 // indirect
|
||||
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/grpc v1.73.0 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
)
|
||||
@@ -1,149 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"github.com/tmc/langchaingo/llms"
|
||||
"github.com/tmc/langchaingo/llms/googleai"
|
||||
)
|
||||
|
||||
// ConvertToLangchainTool converts a generic core.ToolboxTool into a LangChainGo llms.Tool.
|
||||
func ConvertToLangchainTool(toolboxTool *core.ToolboxTool) llms.Tool {
|
||||
|
||||
// Fetch the tool's input schema
|
||||
inputschema, err := toolboxTool.InputSchema()
|
||||
if err != nil {
|
||||
return llms.Tool{}
|
||||
}
|
||||
|
||||
var paramsSchema map[string]any
|
||||
_ = json.Unmarshal(inputschema, ¶msSchema)
|
||||
|
||||
// Convert into LangChain's llms.Tool
|
||||
return llms.Tool{
|
||||
Type: "function",
|
||||
Function: &llms.FunctionDefinition{
|
||||
Name: toolboxTool.Name(),
|
||||
Description: toolboxTool.Description(),
|
||||
Parameters: paramsSchema,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it.",
|
||||
"Please book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
genaiKey := os.Getenv("GOOGLE_API_KEY")
|
||||
toolboxURL := "http://localhost:5000"
|
||||
ctx := context.Background()
|
||||
|
||||
// Initialize the Google AI client (LLM).
|
||||
llm, err := googleai.New(ctx, googleai.WithAPIKey(genaiKey), googleai.WithDefaultModel("gemini-1.5-flash"))
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Google AI client: %v", err)
|
||||
}
|
||||
|
||||
// Initialize the MCP Toolbox client.
|
||||
toolboxClient, err := core.NewToolboxClient(toolboxURL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tool using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
|
||||
|
||||
langchainTools := make([]llms.Tool, len(tools))
|
||||
// Convert the loaded ToolboxTools into the format LangChainGo requires.
|
||||
for i, tool := range tools {
|
||||
langchainTools[i] = ConvertToLangchainTool(tool)
|
||||
toolsMap[tool.Name()] = tool
|
||||
}
|
||||
|
||||
// Start the conversation history.
|
||||
messageHistory := []llms.MessageContent{
|
||||
llms.TextParts(llms.ChatMessageTypeSystem, systemPrompt),
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
messageHistory = append(messageHistory, llms.TextParts(llms.ChatMessageTypeHuman, query))
|
||||
|
||||
// Make the first call to the LLM, making it aware of the tool.
|
||||
resp, err := llm.GenerateContent(ctx, messageHistory, llms.WithTools(langchainTools))
|
||||
if err != nil {
|
||||
log.Fatalf("LLM call failed: %v", err)
|
||||
}
|
||||
respChoice := resp.Choices[0]
|
||||
|
||||
assistantResponse := llms.TextParts(llms.ChatMessageTypeAI, respChoice.Content)
|
||||
for _, tc := range respChoice.ToolCalls {
|
||||
assistantResponse.Parts = append(assistantResponse.Parts, tc)
|
||||
}
|
||||
messageHistory = append(messageHistory, assistantResponse)
|
||||
|
||||
// Process each tool call requested by the model.
|
||||
for _, tc := range respChoice.ToolCalls {
|
||||
toolName := tc.FunctionCall.Name
|
||||
tool := toolsMap[toolName]
|
||||
var args map[string]any
|
||||
if err := json.Unmarshal([]byte(tc.FunctionCall.Arguments), &args); err != nil {
|
||||
log.Fatalf("Failed to unmarshal arguments for tool '%s': %v", toolName, err)
|
||||
}
|
||||
toolResult, err := tool.Invoke(ctx, args)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to execute tool '%s': %v", toolName, err)
|
||||
}
|
||||
if toolResult == "" || toolResult == nil {
|
||||
toolResult = "Operation completed successfully with no specific return value."
|
||||
}
|
||||
|
||||
// Create the tool call response message and add it to the history.
|
||||
toolResponse := llms.MessageContent{
|
||||
Role: llms.ChatMessageTypeTool,
|
||||
Parts: []llms.ContentPart{
|
||||
llms.ToolCallResponse{
|
||||
Name: toolName,
|
||||
Content: fmt.Sprintf("%v", toolResult),
|
||||
},
|
||||
},
|
||||
}
|
||||
messageHistory = append(messageHistory, toolResponse)
|
||||
}
|
||||
finalResp, err := llm.GenerateContent(ctx, messageHistory)
|
||||
if err != nil {
|
||||
log.Fatalf("Final LLM call failed after tool execution: %v", err)
|
||||
}
|
||||
|
||||
// Add the final textual response from the LLM to the history
|
||||
messageHistory = append(messageHistory, llms.TextParts(llms.ChatMessageTypeAI, finalResp.Choices[0].Content))
|
||||
|
||||
fmt.Println(finalResp.Choices[0].Content)
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
module openai-quickstart
|
||||
|
||||
go 1.24.6
|
||||
|
||||
require (
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0
|
||||
github.com/openai/openai-go v1.12.0
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go/auth v0.16.2 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.7.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/tidwall/gjson v1.14.4 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.1 // indirect
|
||||
github.com/tidwall/sjson v1.2.5 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
|
||||
go.opentelemetry.io/otel v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.36.0 // indirect
|
||||
golang.org/x/crypto v0.39.0 // indirect
|
||||
golang.org/x/net v0.41.0 // indirect
|
||||
golang.org/x/oauth2 v0.30.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/text v0.26.0 // indirect
|
||||
google.golang.org/api v0.242.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/grpc v1.73.0 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
)
|
||||
@@ -1,141 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
openai "github.com/openai/openai-go"
|
||||
)
|
||||
|
||||
// ConvertToOpenAITool converts a ToolboxTool into the go-openai library's Tool format.
|
||||
func ConvertToOpenAITool(toolboxTool *core.ToolboxTool) openai.ChatCompletionToolParam {
|
||||
// Get the input schema
|
||||
jsonSchemaBytes, err := toolboxTool.InputSchema()
|
||||
if err != nil {
|
||||
return openai.ChatCompletionToolParam{}
|
||||
}
|
||||
|
||||
// Unmarshal the JSON bytes into FunctionParameters
|
||||
var paramsSchema openai.FunctionParameters
|
||||
if err := json.Unmarshal(jsonSchemaBytes, ¶msSchema); err != nil {
|
||||
return openai.ChatCompletionToolParam{}
|
||||
}
|
||||
|
||||
// Create and return the final tool parameter struct.
|
||||
return openai.ChatCompletionToolParam{
|
||||
Function: openai.FunctionDefinitionParam{
|
||||
Name: toolboxTool.Name(),
|
||||
Description: openai.String(toolboxTool.Description()),
|
||||
Parameters: paramsSchema,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Setup
|
||||
ctx := context.Background()
|
||||
toolboxURL := "http://localhost:5000"
|
||||
openAIClient := openai.NewClient()
|
||||
|
||||
// Initialize the MCP Toolbox client.
|
||||
toolboxClient, err := core.NewToolboxClient(toolboxURL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tools using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tool : %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
openAITools := make([]openai.ChatCompletionToolParam, len(tools))
|
||||
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
|
||||
|
||||
for i, tool := range tools {
|
||||
// Convert the Toolbox tool into the openAI FunctionDeclaration format.
|
||||
openAITools[i] = ConvertToOpenAITool(tool)
|
||||
// Add tool to a map for lookup later
|
||||
toolsMap[tool.Name()] = tool
|
||||
|
||||
}
|
||||
|
||||
params := openai.ChatCompletionNewParams{
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{
|
||||
openai.SystemMessage(systemPrompt),
|
||||
},
|
||||
Tools: openAITools,
|
||||
Seed: openai.Int(0),
|
||||
Model: openai.ChatModelGPT4o,
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
|
||||
params.Messages = append(params.Messages, openai.UserMessage(query))
|
||||
|
||||
// Make initial chat completion request
|
||||
completion, err := openAIClient.Chat.Completions.New(ctx, params)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
toolCalls := completion.Choices[0].Message.ToolCalls
|
||||
|
||||
// Return early if there are no tool calls
|
||||
if len(toolCalls) == 0 {
|
||||
log.Println("No function call")
|
||||
}
|
||||
|
||||
// If there was a function call, continue the conversation
|
||||
params.Messages = append(params.Messages, completion.Choices[0].Message.ToParam())
|
||||
for _, toolCall := range toolCalls {
|
||||
|
||||
toolName := toolCall.Function.Name
|
||||
toolToInvoke := toolsMap[toolName]
|
||||
|
||||
var args map[string]any
|
||||
err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
result, err := toolToInvoke.Invoke(ctx, args)
|
||||
if err != nil {
|
||||
log.Fatal("Could not invoke tool", err)
|
||||
}
|
||||
|
||||
params.Messages = append(params.Messages, openai.ToolMessage(result.(string), toolCall.ID))
|
||||
}
|
||||
|
||||
completion, err = openAIClient.Chat.Completions.New(ctx, params)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
params.Messages = append(params.Messages, openai.AssistantMessage(query))
|
||||
|
||||
fmt.Println("\n", completion.Choices[0].Message.Content)
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,119 +0,0 @@
|
||||
import { GoogleGenAI } from "@google/genai";
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
|
||||
|
||||
const TOOLBOX_URL = "http://127.0.0.1:5000"; // Update if needed
|
||||
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || 'your-api-key'; // Replace it with your API key
|
||||
|
||||
const prompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, you MUST use the available tools to find information. Mention its name, id,
|
||||
location and price tier. Always mention hotel id while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
function mapZodTypeToOpenAPIType(zodTypeName) {
|
||||
|
||||
console.log(zodTypeName)
|
||||
const typeMap = {
|
||||
'ZodString': 'string',
|
||||
'ZodNumber': 'number',
|
||||
'ZodBoolean': 'boolean',
|
||||
'ZodArray': 'array',
|
||||
'ZodObject': 'object',
|
||||
};
|
||||
return typeMap[zodTypeName] || 'string';
|
||||
}
|
||||
|
||||
async function main() {
|
||||
|
||||
const toolboxClient = new ToolboxClient(TOOLBOX_URL);
|
||||
const toolboxTools = await toolboxClient.loadToolset("my-toolset");
|
||||
|
||||
const geminiTools = [{
|
||||
functionDeclarations: toolboxTools.map(tool => {
|
||||
|
||||
const schema = tool.getParamSchema();
|
||||
const properties = {};
|
||||
const required = [];
|
||||
|
||||
|
||||
for (const [key, param] of Object.entries(schema.shape)) {
|
||||
properties[key] = {
|
||||
type: mapZodTypeToOpenAPIType(param.constructor.name),
|
||||
description: param.description || '',
|
||||
};
|
||||
required.push(key)
|
||||
}
|
||||
|
||||
return {
|
||||
name: tool.getName(),
|
||||
description: tool.getDescription(),
|
||||
parameters: { type: 'object', properties, required },
|
||||
};
|
||||
})
|
||||
}];
|
||||
|
||||
|
||||
const genAI = new GoogleGenAI({ apiKey: GOOGLE_API_KEY });
|
||||
|
||||
const chat = genAI.chats.create({
|
||||
model: "gemini-2.5-flash",
|
||||
config: {
|
||||
systemInstruction: prompt,
|
||||
tools: geminiTools,
|
||||
}
|
||||
});
|
||||
|
||||
for (const query of queries) {
|
||||
|
||||
let currentResult = await chat.sendMessage({ message: query });
|
||||
|
||||
let finalResponseGiven = false
|
||||
while (!finalResponseGiven) {
|
||||
|
||||
const response = currentResult;
|
||||
const functionCalls = response.functionCalls || [];
|
||||
|
||||
if (functionCalls.length === 0) {
|
||||
console.log(response.text)
|
||||
finalResponseGiven = true;
|
||||
} else {
|
||||
const toolResponses = [];
|
||||
for (const call of functionCalls) {
|
||||
const toolName = call.name
|
||||
const toolToExecute = toolboxTools.find(t => t.getName() === toolName);
|
||||
|
||||
if (toolToExecute) {
|
||||
try {
|
||||
const functionResult = await toolToExecute(call.args);
|
||||
toolResponses.push({
|
||||
functionResponse: { name: call.name, response: { result: functionResult } }
|
||||
});
|
||||
} catch (e) {
|
||||
console.error(`Error executing tool '${toolName}':`, e);
|
||||
toolResponses.push({
|
||||
functionResponse: { name: call.name, response: { error: e.message } }
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
currentResult = await chat.sendMessage({ message: toolResponses });
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -1,88 +0,0 @@
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
import { genkit } from "genkit";
|
||||
import { googleAI } from '@genkit-ai/googleai';
|
||||
|
||||
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || 'your-api-key'; // Replace it with your API key
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
async function main() {
|
||||
const toolboxClient = new ToolboxClient("http://127.0.0.1:5000");
|
||||
|
||||
const ai = genkit({
|
||||
plugins: [
|
||||
googleAI({
|
||||
apiKey: process.env.GEMINI_API_KEY || GOOGLE_API_KEY
|
||||
})
|
||||
],
|
||||
model: googleAI.model('gemini-2.0-flash'),
|
||||
});
|
||||
|
||||
const toolboxTools = await toolboxClient.loadToolset("my-toolset");
|
||||
const toolMap = Object.fromEntries(
|
||||
toolboxTools.map((tool) => {
|
||||
const definedTool = ai.defineTool(
|
||||
{
|
||||
name: tool.getName(),
|
||||
description: tool.getDescription(),
|
||||
inputSchema: tool.getParamSchema(),
|
||||
},
|
||||
tool
|
||||
);
|
||||
return [tool.getName(), definedTool];
|
||||
})
|
||||
);
|
||||
const tools = Object.values(toolMap);
|
||||
|
||||
let conversationHistory = [{ role: "system", content: [{ text: systemPrompt }] }];
|
||||
|
||||
for (const query of queries) {
|
||||
conversationHistory.push({ role: "user", content: [{ text: query }] });
|
||||
const response = await ai.generate({
|
||||
messages: conversationHistory,
|
||||
tools: tools,
|
||||
});
|
||||
conversationHistory.push(response.message);
|
||||
|
||||
const toolRequests = response.toolRequests;
|
||||
if (toolRequests?.length > 0) {
|
||||
// Execute tools concurrently and collect their responses.
|
||||
const toolResponses = await Promise.all(
|
||||
toolRequests.map(async (call) => {
|
||||
try {
|
||||
const toolOutput = await toolMap[call.name].invoke(call.input);
|
||||
return { role: "tool", content: [{ toolResponse: { name: call.name, output: toolOutput } }] };
|
||||
} catch (e) {
|
||||
console.error(`Error executing tool ${call.name}:`, e);
|
||||
return { role: "tool", content: [{ toolResponse: { name: call.name, output: { error: e.message } } }] };
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
conversationHistory.push(...toolResponses);
|
||||
|
||||
// Call the AI again with the tool results.
|
||||
response = await ai.generate({ messages: conversationHistory, tools });
|
||||
conversationHistory.push(response.message);
|
||||
}
|
||||
|
||||
console.log(response.text);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -1,73 +0,0 @@
|
||||
import { ChatGoogleGenerativeAI } from "@langchain/google-genai";
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
import { tool } from "@langchain/core/tools";
|
||||
import { createReactAgent } from "@langchain/langgraph/prebuilt";
|
||||
import { MemorySaver } from "@langchain/langgraph";
|
||||
|
||||
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || 'your-api-key'; // Replace it with your API key
|
||||
|
||||
const prompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
async function main() {
|
||||
const model = new ChatGoogleGenerativeAI({
|
||||
model: "gemini-2.0-flash",
|
||||
});
|
||||
|
||||
const client = new ToolboxClient("http://127.0.0.1:5000");
|
||||
const toolboxTools = await client.loadToolset("my-toolset");
|
||||
|
||||
// Define the basics of the tool: name, description, schema and core logic
|
||||
const getTool = (toolboxTool) => tool(toolboxTool, {
|
||||
name: toolboxTool.getName(),
|
||||
description: toolboxTool.getDescription(),
|
||||
schema: toolboxTool.getParamSchema()
|
||||
});
|
||||
const tools = toolboxTools.map(getTool);
|
||||
|
||||
const agent = createReactAgent({
|
||||
llm: model,
|
||||
tools: tools,
|
||||
checkpointer: new MemorySaver(),
|
||||
systemPrompt: prompt,
|
||||
});
|
||||
|
||||
const langGraphConfig = {
|
||||
configurable: {
|
||||
thread_id: "test-thread",
|
||||
},
|
||||
};
|
||||
|
||||
for (const query of queries) {
|
||||
const agentOutput = await agent.invoke(
|
||||
{
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: query,
|
||||
},
|
||||
],
|
||||
verbose: true,
|
||||
},
|
||||
langGraphConfig
|
||||
);
|
||||
const response = agentOutput.messages[agentOutput.messages.length - 1].content;
|
||||
console.log(response);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -1,79 +0,0 @@
|
||||
import { gemini, GEMINI_MODEL } from "@llamaindex/google";
|
||||
import { agent } from "@llamaindex/workflow";
|
||||
import { createMemory, staticBlock, tool } from "llamaindex";
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
|
||||
const TOOLBOX_URL = "http://127.0.0.1:5000"; // Update if needed
|
||||
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || 'your-api-key'; // Replace it with your API key
|
||||
|
||||
const prompt = `
|
||||
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and cancellations.
|
||||
When the user searches for a hotel, mention its name, id, location and price tier.
|
||||
Always mention hotel ids while performing any searches — this is very important for operations.
|
||||
For any bookings or cancellations, please provide the appropriate confirmation.
|
||||
Update check-in or check-out dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
async function main() {
|
||||
// Connect to MCP Toolbox
|
||||
const client = new ToolboxClient(TOOLBOX_URL);
|
||||
const toolboxTools = await client.loadToolset("my-toolset");
|
||||
const tools = toolboxTools.map((toolboxTool) => {
|
||||
return tool({
|
||||
name: toolboxTool.getName(),
|
||||
description: toolboxTool.getDescription(),
|
||||
parameters: toolboxTool.getParamSchema(),
|
||||
execute: toolboxTool,
|
||||
});
|
||||
});
|
||||
|
||||
// Initialize LLM
|
||||
const llm = gemini({
|
||||
model: GEMINI_MODEL.GEMINI_2_0_FLASH,
|
||||
apiKey: GOOGLE_API_KEY,
|
||||
});
|
||||
|
||||
const memory = createMemory({
|
||||
memoryBlocks: [
|
||||
staticBlock({
|
||||
content: prompt,
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
// Create the Agent
|
||||
const myAgent = agent({
|
||||
tools: tools,
|
||||
llm,
|
||||
memory,
|
||||
systemPrompt: prompt,
|
||||
});
|
||||
|
||||
for (const query of queries) {
|
||||
const result = await myAgent.run(query);
|
||||
const output = result.data.result;
|
||||
|
||||
console.log(`\nUser: ${query}`);
|
||||
if (typeof output === "string") {
|
||||
console.log(output.trim());
|
||||
} else if (typeof output === "object" && "text" in output) {
|
||||
console.log(output.text.trim());
|
||||
} else {
|
||||
console.log(JSON.stringify(output));
|
||||
}
|
||||
}
|
||||
//You may observe some extra logs during execution due to the run method provided by Llama.
|
||||
console.log("Agent run finished.");
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -1,70 +0,0 @@
|
||||
from google.adk.agents import Agent
|
||||
from google.adk.runners import Runner
|
||||
from google.adk.sessions import InMemorySessionService
|
||||
from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService
|
||||
from google.genai import types
|
||||
from toolbox_core import ToolboxSyncClient
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
# TODO(developer): replace this with your Google API key
|
||||
|
||||
os.environ['GOOGLE_API_KEY'] = 'your-api-key'
|
||||
|
||||
async def main():
|
||||
with ToolboxSyncClient("http://127.0.0.1:5000") as toolbox_client:
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
root_agent = Agent(
|
||||
model='gemini-2.0-flash-001',
|
||||
name='hotel_agent',
|
||||
description='A helpful AI assistant.',
|
||||
instruction=prompt,
|
||||
tools=toolbox_client.load_toolset("my-toolset"),
|
||||
)
|
||||
|
||||
session_service = InMemorySessionService()
|
||||
artifacts_service = InMemoryArtifactService()
|
||||
session = await session_service.create_session(
|
||||
state={}, app_name='hotel_agent', user_id='123'
|
||||
)
|
||||
runner = Runner(
|
||||
app_name='hotel_agent',
|
||||
agent=root_agent,
|
||||
artifact_service=artifacts_service,
|
||||
session_service=session_service,
|
||||
)
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
for query in queries:
|
||||
content = types.Content(role='user', parts=[types.Part(text=query)])
|
||||
events = runner.run(session_id=session.id,
|
||||
user_id='123', new_message=content)
|
||||
|
||||
responses = (
|
||||
part.text
|
||||
for event in events
|
||||
for part in event.content.parts
|
||||
if part.text is not None
|
||||
)
|
||||
|
||||
for text in responses:
|
||||
print(text)
|
||||
|
||||
asyncio.run(main())
|
||||
@@ -1,108 +0,0 @@
|
||||
import asyncio
|
||||
|
||||
from google import genai
|
||||
from google.genai.types import (
|
||||
Content,
|
||||
FunctionDeclaration,
|
||||
GenerateContentConfig,
|
||||
Part,
|
||||
Tool,
|
||||
)
|
||||
|
||||
from toolbox_core import ToolboxClient
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel id while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Please book the hotel Hilton Basel for me.",
|
||||
"This is too expensive. Please cancel it.",
|
||||
"Please book Hyatt Regency for me",
|
||||
"My check in dates for my booking would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
async def main():
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as toolbox_client:
|
||||
|
||||
# The toolbox_tools list contains Python callables (functions/methods) designed for LLM tool-use
|
||||
# integration. While this example uses Google's genai client, these callables can be adapted for
|
||||
# various function-calling or agent frameworks. For easier integration with supported frameworks
|
||||
# (https://github.com/googleapis/mcp-toolbox-python-sdk/tree/main/packages), use the
|
||||
# provided wrapper packages, which handle framework-specific boilerplate.
|
||||
toolbox_tools = await toolbox_client.load_toolset("my-toolset")
|
||||
genai_client = genai.Client(
|
||||
vertexai=True, project="project-id", location="us-central1"
|
||||
)
|
||||
|
||||
genai_tools = [
|
||||
Tool(
|
||||
function_declarations=[
|
||||
FunctionDeclaration.from_callable_with_api_option(callable=tool)
|
||||
]
|
||||
)
|
||||
for tool in toolbox_tools
|
||||
]
|
||||
history = []
|
||||
for query in queries:
|
||||
user_prompt_content = Content(
|
||||
role="user",
|
||||
parts=[Part.from_text(text=query)],
|
||||
)
|
||||
history.append(user_prompt_content)
|
||||
|
||||
response = genai_client.models.generate_content(
|
||||
model="gemini-2.0-flash-001",
|
||||
contents=history,
|
||||
config=GenerateContentConfig(
|
||||
system_instruction=prompt,
|
||||
tools=genai_tools,
|
||||
),
|
||||
)
|
||||
history.append(response.candidates[0].content)
|
||||
function_response_parts = []
|
||||
for function_call in response.function_calls:
|
||||
fn_name = function_call.name
|
||||
# The tools are sorted alphabetically
|
||||
if fn_name == "search-hotels-by-name":
|
||||
function_result = await toolbox_tools[3](**function_call.args)
|
||||
elif fn_name == "search-hotels-by-location":
|
||||
function_result = await toolbox_tools[2](**function_call.args)
|
||||
elif fn_name == "book-hotel":
|
||||
function_result = await toolbox_tools[0](**function_call.args)
|
||||
elif fn_name == "update-hotel":
|
||||
function_result = await toolbox_tools[4](**function_call.args)
|
||||
elif fn_name == "cancel-hotel":
|
||||
function_result = await toolbox_tools[1](**function_call.args)
|
||||
else:
|
||||
raise ValueError("Function name not present.")
|
||||
function_response = {"result": function_result}
|
||||
function_response_part = Part.from_function_response(
|
||||
name=function_call.name,
|
||||
response=function_response,
|
||||
)
|
||||
function_response_parts.append(function_response_part)
|
||||
|
||||
if function_response_parts:
|
||||
tool_response_content = Content(role="tool", parts=function_response_parts)
|
||||
history.append(tool_response_content)
|
||||
|
||||
response2 = genai_client.models.generate_content(
|
||||
model="gemini-2.0-flash-001",
|
||||
contents=history,
|
||||
config=GenerateContentConfig(
|
||||
tools=genai_tools,
|
||||
),
|
||||
)
|
||||
final_model_response_content = response2.candidates[0].content
|
||||
history.append(final_model_response_content)
|
||||
print(response2.text)
|
||||
|
||||
asyncio.run(main())
|
||||
@@ -1,52 +0,0 @@
|
||||
import asyncio
|
||||
|
||||
from langgraph.prebuilt import create_react_agent
|
||||
|
||||
# TODO(developer): replace this with another import if needed
|
||||
|
||||
from langchain_google_vertexai import ChatVertexAI
|
||||
|
||||
# from langchain_google_genai import ChatGoogleGenerativeAI
|
||||
|
||||
# from langchain_anthropic import ChatAnthropic
|
||||
|
||||
from langgraph.checkpoint.memory import MemorySaver
|
||||
|
||||
from toolbox_langchain import ToolboxClient
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
async def main():
|
||||
# TODO(developer): replace this with another model if needed
|
||||
model = ChatVertexAI(model_name="gemini-2.0-flash-001")
|
||||
# model = ChatGoogleGenerativeAI(model="gemini-2.0-flash-001")
|
||||
# model = ChatAnthropic(model="claude-3-5-sonnet-20240620")
|
||||
|
||||
# Load the tools from the Toolbox server
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
tools = await client.aload_toolset()
|
||||
|
||||
agent = create_react_agent(model, tools, checkpointer=MemorySaver())
|
||||
|
||||
config = {"configurable": {"thread_id": "thread-1"}}
|
||||
for query in queries:
|
||||
inputs = {"messages": [("user", prompt + query)]}
|
||||
response = agent.invoke(inputs, stream_mode="values", config=config)
|
||||
print(response["messages"][-1].content)
|
||||
|
||||
asyncio.run(main())
|
||||
@@ -1,63 +0,0 @@
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
from llama_index.core.agent.workflow import AgentWorkflow
|
||||
|
||||
from llama_index.core.workflow import Context
|
||||
|
||||
# TODO(developer): replace this with another import if needed
|
||||
|
||||
from llama_index.llms.google_genai import GoogleGenAI
|
||||
|
||||
# from llama_index.llms.anthropic import Anthropic
|
||||
|
||||
from toolbox_llamaindex import ToolboxClient
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
async def main():
|
||||
# TODO(developer): replace this with another model if needed
|
||||
llm = GoogleGenAI(
|
||||
model="gemini-2.0-flash-001",
|
||||
vertexai_config={"project": "project-id", "location": "us-central1"},
|
||||
)
|
||||
# llm = GoogleGenAI(
|
||||
# api_key=os.getenv("GOOGLE_API_KEY"),
|
||||
# model="gemini-2.0-flash-001",
|
||||
# )
|
||||
# llm = Anthropic(
|
||||
# model="claude-3-7-sonnet-latest",
|
||||
# api_key=os.getenv("ANTHROPIC_API_KEY")
|
||||
# )
|
||||
|
||||
# Load the tools from the Toolbox server
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
tools = await client.aload_toolset()
|
||||
|
||||
agent = AgentWorkflow.from_tools_or_functions(
|
||||
tools,
|
||||
llm=llm,
|
||||
system_prompt=prompt,
|
||||
)
|
||||
ctx = Context(agent)
|
||||
for query in queries:
|
||||
response = await agent.run(user_msg=query, ctx=ctx)
|
||||
print(f"---- {query} ----")
|
||||
print(str(response))
|
||||
|
||||
asyncio.run(main())
|
||||
@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.12.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -6,9 +6,328 @@ description: >
|
||||
Connect your IDE to Firestore using Toolbox.
|
||||
---
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://cloud.google.com/firestore/native/docs/connect-ide-using-mcp-toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/firestore/native/docs/connect-ide-using-mcp-toolbox"/>
|
||||
</head>
|
||||
</html>
|
||||
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is
|
||||
an open protocol for connecting Large Language Models (LLMs) to data sources
|
||||
like Firestore. This guide covers how to use [MCP Toolbox for Databases][toolbox]
|
||||
to expose your developer assistant tools to a Firestore instance:
|
||||
|
||||
* [Cursor][cursor]
|
||||
* [Windsurf][windsurf] (Codium)
|
||||
* [Visual Studio Code][vscode] (Copilot)
|
||||
* [Cline][cline] (VS Code extension)
|
||||
* [Claude desktop][claudedesktop]
|
||||
* [Claude code][claudecode]
|
||||
* [Gemini CLI][geminicli]
|
||||
* [Gemini Code Assist][geminicodeassist]
|
||||
|
||||
[toolbox]: https://github.com/googleapis/genai-toolbox
|
||||
[cursor]: #configure-your-mcp-client
|
||||
[windsurf]: #configure-your-mcp-client
|
||||
[vscode]: #configure-your-mcp-client
|
||||
[cline]: #configure-your-mcp-client
|
||||
[claudedesktop]: #configure-your-mcp-client
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
[geminicli]: #configure-your-mcp-client
|
||||
[geminicodeassist]: #configure-your-mcp-client
|
||||
|
||||
## Set up Firestore
|
||||
|
||||
1. Create or select a Google Cloud project.
|
||||
|
||||
* [Create a new
|
||||
project](https://cloud.google.com/resource-manager/docs/creating-managing-projects)
|
||||
* [Select an existing
|
||||
project](https://cloud.google.com/resource-manager/docs/creating-managing-projects#identifying_projects)
|
||||
|
||||
1. [Enable the Firestore
|
||||
API](https://console.cloud.google.com/apis/library/firestore.googleapis.com)
|
||||
for your project.
|
||||
|
||||
1. [Create a Firestore
|
||||
database](https://cloud.google.com/firestore/docs/create-database-web-mobile-client-library)
|
||||
if you haven't already.
|
||||
|
||||
1. Set up authentication for your local environment.
|
||||
|
||||
* [Install gcloud CLI](https://cloud.google.com/sdk/docs/install)
|
||||
* Run `gcloud auth application-default login` to authenticate
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct
|
||||
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
|
||||
to your OS and CPU architecture. You are required to use Toolbox version
|
||||
V0.10.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/windows/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Verify the installation:
|
||||
|
||||
```bash
|
||||
./toolbox --version
|
||||
```
|
||||
|
||||
## Configure your MCP Client
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Claude code" lang="en" %}}
|
||||
|
||||
1. Install [Claude
|
||||
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
1. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"firestore": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","firestore","--stdio"],
|
||||
"env": {
|
||||
"FIRESTORE_PROJECT": "your-project-id",
|
||||
"FIRESTORE_DATABASE": "(default)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude code to apply the new configuration.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Claude desktop" lang="en" %}}
|
||||
|
||||
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
|
||||
1. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"firestore": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","firestore","--stdio"],
|
||||
"env": {
|
||||
"FIRESTORE_PROJECT": "your-project-id",
|
||||
"FIRESTORE_DATABASE": "(default)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude desktop.
|
||||
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
|
||||
new MCP server available.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Cline" lang="en" %}}
|
||||
|
||||
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap
|
||||
the **MCP Servers** icon.
|
||||
1. Tap Configure MCP Servers to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"firestore": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","firestore","--stdio"],
|
||||
"env": {
|
||||
"FIRESTORE_PROJECT": "your-project-id",
|
||||
"FIRESTORE_DATABASE": "(default)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. You should see a green active status after the server is successfully
|
||||
connected.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Cursor" lang="en" %}}
|
||||
|
||||
1. Create a `.cursor` directory in your project root if it doesn't exist.
|
||||
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"firestore": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","firestore","--stdio"],
|
||||
"env": {
|
||||
"FIRESTORE_PROJECT": "your-project-id",
|
||||
"FIRESTORE_DATABASE": "(default)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
|
||||
Settings > MCP**. You should see a green active status after the server is
|
||||
successfully connected.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
|
||||
|
||||
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
|
||||
create a `.vscode` directory in your project root if it doesn't exist.
|
||||
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"firestore": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","firestore","--stdio"],
|
||||
"env": {
|
||||
"FIRESTORE_PROJECT": "your-project-id",
|
||||
"FIRESTORE_DATABASE": "(default)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
|
||||
Cascade assistant.
|
||||
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"firestore": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","firestore","--stdio"],
|
||||
"env": {
|
||||
"FIRESTORE_PROJECT": "your-project-id",
|
||||
"FIRESTORE_DATABASE": "(default)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini CLI" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini
|
||||
CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create
|
||||
a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"firestore": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","firestore","--stdio"],
|
||||
"env": {
|
||||
"FIRESTORE_PROJECT": "your-project-id",
|
||||
"FIRESTORE_DATABASE": "(default)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini Code
|
||||
Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist)
|
||||
extension in Visual Studio Code.
|
||||
1. Enable Agent Mode in Gemini Code Assist chat.
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create
|
||||
a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"firestore": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","firestore","--stdio"],
|
||||
"env": {
|
||||
"FIRESTORE_PROJECT": "your-project-id",
|
||||
"FIRESTORE_DATABASE": "(default)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Use Tools
|
||||
|
||||
Your AI tool is now connected to Firestore using MCP. Try asking your AI
|
||||
assistant to list collections, get documents, query collections, or manage
|
||||
security rules.
|
||||
|
||||
The following tools are available to the LLM:
|
||||
|
||||
1. **firestore-get-documents**: Gets multiple documents from Firestore by their
|
||||
paths
|
||||
1. **firestore-list-collections**: List Firestore collections for a given parent
|
||||
path
|
||||
1. **firestore-delete-documents**: Delete multiple documents from Firestore
|
||||
1. **firestore-query-collection**: Query documents from a collection with
|
||||
filtering, ordering, and limit options
|
||||
1. **firestore-get-rules**: Retrieves the active Firestore security rules for
|
||||
the current project
|
||||
1. **firestore-validate-rules**: Validates Firestore security rules syntax and
|
||||
errors
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
will adapt to the tools available, so this shouldn't affect most users.
|
||||
{{< /notice >}}
|
||||
|
||||
@@ -48,19 +48,19 @@ to expose your developer assistant tools to a Looker instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
@@ -235,7 +235,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
|
||||
|
||||
```json
|
||||
{
|
||||
"servers": {
|
||||
"mcpServers": {
|
||||
"looker-toolbox": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--stdio", "--prebuilt", "looker"],
|
||||
|
||||
@@ -37,19 +37,19 @@ description: "Connect your IDE to SQL Server using Toolbox."
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
@@ -182,17 +182,19 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
|
||||
|
||||
```json
|
||||
{
|
||||
"servers": {
|
||||
"mssql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
"mcp" : {
|
||||
"servers": {
|
||||
"cloud-sql-sqlserver": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","cloud-sql-mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -284,4 +286,4 @@ The following tools are available to the LLM:
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
|
||||
{{< /notice >}}
|
||||
{{< /notice >}}
|
||||
@@ -37,19 +37,19 @@ description: "Connect your IDE to MySQL using Toolbox."
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
@@ -182,7 +182,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
|
||||
|
||||
```json
|
||||
{
|
||||
"servers": {
|
||||
"mcpServers": {
|
||||
"mysql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mysql","--stdio"],
|
||||
|
||||
@@ -1,281 +0,0 @@
|
||||
---
|
||||
title: Neo4j using MCP
|
||||
type: docs
|
||||
weight: 2
|
||||
description: "Connect your IDE to Neo4j using Toolbox."
|
||||
---
|
||||
|
||||
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like Neo4j. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a Neo4j instance:
|
||||
|
||||
* [Cursor][cursor]
|
||||
* [Windsurf][windsurf] (Codium)
|
||||
* [Visual Studio Code][vscode] (Copilot)
|
||||
* [Cline][cline] (VS Code extension)
|
||||
* [Claude desktop][claudedesktop]
|
||||
* [Claude code][claudecode]
|
||||
* [Gemini CLI][geminicli]
|
||||
* [Gemini Code Assist][geminicodeassist]
|
||||
|
||||
[toolbox]: https://github.com/googleapis/genai-toolbox
|
||||
[cursor]: #configure-your-mcp-client
|
||||
[windsurf]: #configure-your-mcp-client
|
||||
[vscode]: #configure-your-mcp-client
|
||||
[cline]: #configure-your-mcp-client
|
||||
[claudedesktop]: #configure-your-mcp-client
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
[geminicli]: #configure-your-mcp-client
|
||||
[geminicodeassist]: #configure-your-mcp-client
|
||||
|
||||
## Set up the database
|
||||
|
||||
1. [Create or select a Neo4j instance.](https://neo4j.com/cloud/platform/aura-graph-database/)
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version v0.15.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Verify the installation:
|
||||
|
||||
```bash
|
||||
./toolbox --version
|
||||
```
|
||||
|
||||
## Configure your MCP Client
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Claude code" lang="en" %}}
|
||||
|
||||
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
1. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude code to apply the new configuration.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Claude desktop" lang="en" %}}
|
||||
|
||||
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
|
||||
1. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude desktop.
|
||||
1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cline" lang="en" %}}
|
||||
|
||||
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
|
||||
1. Tap Configure MCP Servers to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cursor" lang="en" %}}
|
||||
|
||||
1. Create a `.cursor` directory in your project root if it doesn't exist.
|
||||
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
|
||||
|
||||
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
|
||||
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcp" : {
|
||||
"servers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
|
||||
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini CLI" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
|
||||
1. Enable Agent Mode in Gemini Code Assist chat.
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Use Tools
|
||||
|
||||
Your AI tool is now connected to Neo4j using MCP. Try asking your AI assistant to get the graph schema or execute Cypher statements.
|
||||
|
||||
The following tools are available to the LLM:
|
||||
|
||||
1. **get_schema**: extracts the complete database schema, including details about node labels, relationships, properties, constraints, and indexes.
|
||||
1. **execute_cypher**: executes any arbitrary Cypher statement.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
|
||||
{{< /notice >}}
|
||||
@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
@@ -217,7 +217,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
|
||||
|
||||
```json
|
||||
{
|
||||
"servers": {
|
||||
"mcpServers": {
|
||||
"postgres": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","postgres","--stdio"],
|
||||
|
||||
@@ -1,255 +0,0 @@
|
||||
---
|
||||
title: SQLite using MCP
|
||||
type: docs
|
||||
weight: 2
|
||||
description: "Connect your IDE to SQLite using Toolbox."
|
||||
---
|
||||
|
||||
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like SQLite. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a SQLite instance:
|
||||
|
||||
* [Cursor][cursor]
|
||||
* [Windsurf][windsurf] (Codium)
|
||||
* [Visual Studio Code][vscode] (Copilot)
|
||||
* [Cline][cline] (VS Code extension)
|
||||
* [Claude desktop][claudedesktop]
|
||||
* [Claude code][claudecode]
|
||||
* [Gemini CLI][geminicli]
|
||||
* [Gemini Code Assist][geminicodeassist]
|
||||
|
||||
[toolbox]: https://github.com/googleapis/genai-toolbox
|
||||
[cursor]: #configure-your-mcp-client
|
||||
[windsurf]: #configure-your-mcp-client
|
||||
[vscode]: #configure-your-mcp-client
|
||||
[cline]: #configure-your-mcp-client
|
||||
[claudedesktop]: #configure-your-mcp-client
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
[geminicli]: #configure-your-mcp-client
|
||||
[geminicodeassist]: #configure-your-mcp-client
|
||||
|
||||
## Set up the database
|
||||
|
||||
1. [Create or select a SQLite database file.](https://www.sqlite.org/download.html)
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version V0.10.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Verify the installation:
|
||||
|
||||
```bash
|
||||
./toolbox --version
|
||||
```
|
||||
|
||||
## Configure your MCP Client
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Claude code" lang="en" %}}
|
||||
|
||||
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
1. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlite": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "sqlite", "--stdio"],
|
||||
"env": {
|
||||
"SQLITE_DATABASE": "./sample.db"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude code to apply the new configuration.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Claude desktop" lang="en" %}}
|
||||
|
||||
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
|
||||
1. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlite": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "sqlite", "--stdio"],
|
||||
"env": {
|
||||
"SQLITE_DATABASE": "./sample.db"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude desktop.
|
||||
1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cline" lang="en" %}}
|
||||
|
||||
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
|
||||
1. Tap Configure MCP Servers to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlite": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "sqlite", "--stdio"],
|
||||
"env": {
|
||||
"SQLITE_DATABASE": "./sample.db"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cursor" lang="en" %}}
|
||||
|
||||
1. Create a `.cursor` directory in your project root if it doesn't exist.
|
||||
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlite": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "sqlite", "--stdio"],
|
||||
"env": {
|
||||
"SQLITE_DATABASE": "./sample.db"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
|
||||
|
||||
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
|
||||
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"servers": {
|
||||
"sqlite": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","sqlite","--stdio"],
|
||||
"env": {
|
||||
"SQLITE_DATABASE": "./sample.db"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
|
||||
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlite": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","sqlite","--stdio"],
|
||||
"env": {
|
||||
"SQLITE_DATABASE": "./sample.db"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini CLI" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlite": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","sqlite","--stdio"],
|
||||
"env": {
|
||||
"SQLITE_DATABASE": "./sample.db"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
|
||||
1. Enable Agent Mode in Gemini Code Assist chat.
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlite": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","sqlite","--stdio"],
|
||||
"env": {
|
||||
"SQLITE_DATABASE": "./sample.db"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Use Tools
|
||||
|
||||
Your AI tool is now connected to SQLite using MCP. Try asking your AI assistant to list tables, create a table, or define and execute other SQL statements.
|
||||
|
||||
The following tools are available to the LLM:
|
||||
|
||||
1. **list_tables**: lists tables and descriptions
|
||||
1. **execute_sql**: execute any SQL statement
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
|
||||
{{< /notice >}}
|
||||
@@ -19,9 +19,8 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
|
||||
* `ALLOYDB_POSTGRES_CLUSTER`: The ID of your AlloyDB cluster.
|
||||
* `ALLOYDB_POSTGRES_INSTANCE`: The ID of your AlloyDB instance.
|
||||
* `ALLOYDB_POSTGRES_DATABASE`: The name of the database to connect to.
|
||||
* `ALLOYDB_POSTGRES_USER`: (Optional) The database username. Defaults to IAM authentication if unspecified.
|
||||
* `ALLOYDB_POSTGRES_PASSWORD`: (Optional) The password for the database user. Defaults to IAM authentication if unspecified.
|
||||
* `ALLOYDB_POSTGRES_IP_TYPE`: (Optional) The IP type i.e. "Public" or "Private" (Default: Public).
|
||||
* `ALLOYDB_POSTGRES_USER`: The database username.
|
||||
* `ALLOYDB_POSTGRES_PASSWORD`: The password for the database user.
|
||||
* **Permissions:**
|
||||
* **AlloyDB Client** (`roles/alloydb.client`) to connect to the instance.
|
||||
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
|
||||
@@ -50,14 +49,12 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
|
||||
* `--prebuilt` value: `bigquery`
|
||||
* **Environment Variables:**
|
||||
* `BIGQUERY_PROJECT`: The GCP project ID.
|
||||
* `BIGQUERY_LOCATION`: (Optional) The dataset location.
|
||||
* **Permissions:**
|
||||
* **BigQuery User** (`roles/bigquery.user`) to execute queries and view metadata.
|
||||
* **BigQuery Metadata Viewer** (`roles/bigquery.metadataViewer`) to view all datasets.
|
||||
* **BigQuery Data Editor** (`roles/bigquery.dataEditor`) to create or modify datasets and tables.
|
||||
* **Gemini for Google Cloud** (`roles/cloudaicompanion.user`) to use the conversational analytics API.
|
||||
* **Tools:**
|
||||
* `analyze_contribution`: Use this tool to perform contribution analysis, also called key driver analysis.
|
||||
* `ask_data_insights`: Use this tool to perform data analysis, get insights, or answer complex questions about the contents of specific BigQuery tables. For more information on required roles, API setup, and IAM configuration, see the setup and authentication section of the [Conversational Analytics API documentation](https://cloud.google.com/gemini/docs/conversational-analytics-api/overview).
|
||||
* `execute_sql`: Executes a SQL statement.
|
||||
* `forecast`: Use this tool to forecast time series data.
|
||||
@@ -76,8 +73,6 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
|
||||
* `CLOUD_SQL_MYSQL_DATABASE`: The name of the database to connect to.
|
||||
* `CLOUD_SQL_MYSQL_USER`: The database username.
|
||||
* `CLOUD_SQL_MYSQL_PASSWORD`: The password for the database user.
|
||||
* `CLOUD_SQL_MYSQL_IP_TYPE`: The IP type i.e. "Public
|
||||
or "Private" (Default: Public).
|
||||
* **Permissions:**
|
||||
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the instance.
|
||||
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
|
||||
@@ -93,9 +88,8 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
|
||||
* `CLOUD_SQL_POSTGRES_REGION`: The region of your Cloud SQL instance.
|
||||
* `CLOUD_SQL_POSTGRES_INSTANCE`: The ID of your Cloud SQL instance.
|
||||
* `CLOUD_SQL_POSTGRES_DATABASE`: The name of the database to connect to.
|
||||
* `CLOUD_SQL_POSTGRES_USER`: (Optional) The database username. Defaults to IAM authentication if unspecified.
|
||||
* `CLOUD_SQL_POSTGRES_PASSWORD`: (Optional) The password for the database user. Defaults to IAM authentication if unspecified.
|
||||
* `CLOUD_SQL_POSTGRES_IP_TYPE`: (Optional) The IP type i.e. "Public" or "Private" (Default: Public).
|
||||
* `CLOUD_SQL_POSTGRES_USER`: The database username.
|
||||
* `CLOUD_SQL_POSTGRES_PASSWORD`: The password for the database user.
|
||||
* **Permissions:**
|
||||
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the instance.
|
||||
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
|
||||
@@ -114,7 +108,6 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
|
||||
* `CLOUD_SQL_MSSQL_IP_ADDRESS`: The IP address of the Cloud SQL instance.
|
||||
* `CLOUD_SQL_MSSQL_USER`: The database username.
|
||||
* `CLOUD_SQL_MSSQL_PASSWORD`: The password for the database user.
|
||||
* `CLOUD_SQL_MSSQL_IP_TYPE`: (Optional) The IP type i.e. "Public" or "Private" (Default: Public).
|
||||
* **Permissions:**
|
||||
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the instance.
|
||||
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
|
||||
@@ -140,7 +133,7 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
|
||||
* `--prebuilt` value: `firestore`
|
||||
* **Environment Variables:**
|
||||
* `FIRESTORE_PROJECT`: The GCP project ID.
|
||||
* `FIRESTORE_DATABASE`: (Optional) The Firestore database ID. Defaults to "(default)".
|
||||
* `FIRESTORE_DATABASE`: The Firestore database ID.
|
||||
* **Permissions:**
|
||||
* **Cloud Datastore User** (`roles/datastore.user`) to get documents, list collections, and query collections.
|
||||
* **Firebase Rules Viewer** (`roles/firebaserules.viewer`) to get and validate Firestore rules.
|
||||
@@ -233,7 +226,6 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
|
||||
* `POSTGRES_DATABASE`: The name of the database to connect to.
|
||||
* `POSTGRES_USER`: The database username.
|
||||
* `POSTGRES_PASSWORD`: The password for the database user.
|
||||
* `POSTGRES_QUERY_PARAMS`: (Optional) Raw query to be added to the db connection string.
|
||||
* **Permissions:**
|
||||
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
|
||||
* **Tools:**
|
||||
@@ -269,28 +261,3 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
|
||||
* `execute_sql`: Executes a DML SQL query using the PostgreSQL interface for Spanner.
|
||||
* `execute_sql_dql`: Executes a DQL SQL query using the PostgreSQL interface for Spanner.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
|
||||
## SQLite
|
||||
|
||||
* `--prebuilt` value: `sqlite`
|
||||
* **Environment Variables:**
|
||||
* `SQLITE_DATABASE`: The path to the SQLite database file (e.g., `./sample.db`).
|
||||
* **Permissions:**
|
||||
* File system read/write permissions for the specified database file.
|
||||
* **Tools:**
|
||||
* `execute_sql`: Executes a SQL query.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
|
||||
## Neo4j
|
||||
|
||||
* `--prebuilt` value: `neo4j`
|
||||
* **Environment Variables:**
|
||||
* `NEO4J_URI`: The URI of the Neo4j instance (e.g., `bolt://localhost:7687`).
|
||||
* `NEO4J_DATABASE`: The name of the Neo4j database to connect to.
|
||||
* `NEO4J_USERNAME`: The username for the Neo4j instance.
|
||||
* `NEO4J_PASSWORD`: The password for the Neo4j instance.
|
||||
* **Permissions:**
|
||||
* **Database-level permissions** are required to execute Cypher queries.
|
||||
* **Tools:**
|
||||
* `execute_cypher`: Executes a Cypher query.
|
||||
* `get_schema`: Retrieves the schema of the Neo4j database.
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
---
|
||||
title: "AlloyDB Admin"
|
||||
linkTitle: "AlloyDB Admin"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
The "alloydb-admin" source provides a client for the AlloyDB API.
|
||||
aliases:
|
||||
- /resources/sources/alloydb-admin
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `alloydb-admin` source provides a client to interact with the [Google AlloyDB API](https://cloud.google.com/alloydb/docs/reference/rest). This allows tools to perform administrative tasks on AlloyDB resources, such as managing clusters, instances, and users.
|
||||
|
||||
Authentication can be handled in two ways:
|
||||
1. **Application Default Credentials (ADC):** By default, the source uses ADC to authenticate with the API.
|
||||
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will expect an OAuth 2.0 access token to be provided by the client (e.g., a web browser) for each request.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-alloydb-admin:
|
||||
kind: alloy-admin
|
||||
|
||||
my-oauth-alloydb-admin:
|
||||
kind: alloydb-admin
|
||||
useClientOAuth: true
|
||||
```
|
||||
|
||||
## Reference
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "alloydb-admin". |
|
||||
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |
|
||||
@@ -33,9 +33,6 @@ cluster][alloydb-free-trial].
|
||||
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
|
||||
Run parameterized SQL statements in AlloyDB Postgres.
|
||||
|
||||
- [`postgres-list-tables`](../tools/postgres/postgres-list-tables.md)
|
||||
List tables in an AlloyDB for PostgreSQL database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [AlloyDB using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/alloydb_pg_mcp/)
|
||||
|
||||
@@ -36,18 +36,12 @@ avoiding full table scans or complex filters.
|
||||
|
||||
## Available Tools
|
||||
|
||||
- [`bigquery-analyze-contribution`](../tools/bigquery/bigquery-analyze-contribution.md)
|
||||
Performs contribution analysis, also called key driver analysis in BigQuery.
|
||||
|
||||
- [`bigquery-conversational-analytics`](../tools/bigquery/bigquery-conversational-analytics.md)
|
||||
Allows conversational interaction with a BigQuery source.
|
||||
- [`bigquery-sql`](../tools/bigquery/bigquery-sql.md)
|
||||
Run SQL queries directly against BigQuery datasets.
|
||||
|
||||
- [`bigquery-execute-sql`](../tools/bigquery/bigquery-execute-sql.md)
|
||||
Execute structured queries using parameters.
|
||||
|
||||
- [`bigquery-forecast`](../tools/bigquery/bigquery-forecast.md)
|
||||
Forecasts time series data in BigQuery.
|
||||
|
||||
- [`bigquery-get-dataset-info`](../tools/bigquery/bigquery-get-dataset-info.md)
|
||||
Retrieve metadata for a specific dataset.
|
||||
|
||||
@@ -60,9 +54,6 @@ avoiding full table scans or complex filters.
|
||||
- [`bigquery-list-table-ids`](../tools/bigquery/bigquery-list-table-ids.md)
|
||||
List tables in a given dataset.
|
||||
|
||||
- [`bigquery-sql`](../tools/bigquery/bigquery-sql.md)
|
||||
Run SQL queries directly against BigQuery datasets.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [BigQuery using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/bigquery_mcp/)
|
||||
@@ -94,11 +85,15 @@ allows Toolbox to make queries to [BigQuery][bigquery-docs] on behalf of the
|
||||
client or the end-user.
|
||||
|
||||
When using this on-behalf-of authentication, you must ensure that the
|
||||
identity used has been granted the correct IAM permissions.
|
||||
identity used has been granted the correct IAM permissions. Currently,
|
||||
this option is only supported by the following BigQuery tools:
|
||||
|
||||
[iam-overview]: <https://cloud.google.com/bigquery/docs/access-control>
|
||||
[adc]: <https://cloud.google.com/docs/authentication#adc>
|
||||
[set-adc]: <https://cloud.google.com/docs/authentication/provide-credentials-adc>
|
||||
- [`bigquery-sql`](../tools/bigquery/bigquery-sql.md)
|
||||
Run SQL queries directly against BigQuery datasets.
|
||||
|
||||
[iam-overview]: https://cloud.google.com/bigquery/docs/access-control
|
||||
[adc]: https://cloud.google.com/docs/authentication#adc
|
||||
[set-adc]: https://cloud.google.com/docs/authentication/provide-credentials-adc
|
||||
|
||||
## Example
|
||||
|
||||
@@ -109,10 +104,6 @@ sources:
|
||||
my-bigquery-source:
|
||||
kind: "bigquery"
|
||||
project: "my-project-id"
|
||||
# location: "US" # Optional: Specifies the location for query jobs.
|
||||
# allowedDatasets: # Optional: Restricts tool access to a specific list of datasets.
|
||||
# - "my_dataset_1"
|
||||
# - "other_project.my_dataset_2"
|
||||
```
|
||||
|
||||
Initialize a BigQuery source that uses the client's access token:
|
||||
@@ -123,18 +114,13 @@ sources:
|
||||
kind: "bigquery"
|
||||
project: "my-project-id"
|
||||
useClientOAuth: true
|
||||
# location: "US" # Optional: Specifies the location for query jobs.
|
||||
# allowedDatasets: # Optional: Restricts tool access to a specific list of datasets.
|
||||
# - "my_dataset_1"
|
||||
# - "other_project.my_dataset_2"
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "bigquery". |
|
||||
| project | string | true | Id of the Google Cloud project to use for billing and as the default project for BigQuery resources. |
|
||||
| location | string | false | Specifies the location (e.g., 'us', 'asia-northeast1') in which to run the query job. This location must match the location of any tables referenced in the query. Defaults to the table's location or 'US' if the location cannot be determined. [Learn More](https://cloud.google.com/bigquery/docs/locations) |
|
||||
| allowedDatasets | []string | false | An optional list of dataset IDs that tools using this source are allowed to access. If provided, any tool operation attempting to access a dataset not in this list will be rejected. To enforce this, two types of operations are also disallowed: 1) Dataset-level operations (e.g., `CREATE SCHEMA`), and 2) operations where table access cannot be statically analyzed (e.g., `EXECUTE IMMEDIATE`, `CREATE PROCEDURE`). If a single dataset is provided, it will be treated as the default for prebuilt tools. |
|
||||
| useClientOAuth | bool | false | If true, forwards the client's OAuth access token from the "Authorization" header to downstream queries. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------|:--------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "bigquery". |
|
||||
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
|
||||
| location | string | false | Specifies the location (e.g., 'us', 'asia-northeast1') in which to run the query job. This location must match the location of any tables referenced in the query. The default behavior is for it to be executed in the US multi-region |
|
||||
| useClientOAuth | bool | false | If true, forwards the client's OAuth access token from the "Authorization" header to downstream queries. |
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
---
|
||||
title: "Cloud Monitoring"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "cloud-monitoring" source provides a client for the Cloud Monitoring API.
|
||||
aliases:
|
||||
- /resources/sources/cloud-monitoring
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `cloud-monitoring` source provides a client to interact with the [Google Cloud Monitoring API](https://cloud.google.com/monitoring/api). This allows tools to access cloud monitoring metrics explorer and run promql queries.
|
||||
|
||||
Authentication can be handled in two ways:
|
||||
1. **Application Default Credentials (ADC):** By default, the source uses ADC to authenticate with the API.
|
||||
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will expect an OAuth 2.0 access token to be provided by the client (e.g., a web browser) for each request.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-cloud-monitoring:
|
||||
kind: cloud-monitoring
|
||||
|
||||
my-oauth-cloud-monitoring:
|
||||
kind: cloud-monitoring
|
||||
useClientOAuth: true
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "cloud-monitoring". |
|
||||
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |
|
||||
@@ -1,36 +0,0 @@
|
||||
---
|
||||
title: "Cloud SQL Admin"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "cloud-sql-admin" source provides a client for the Cloud SQL Admin API.
|
||||
aliases:
|
||||
- /resources/sources/cloud-sql-admin
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `cloud-sql-admin` source provides a client to interact with the [Google Cloud SQL Admin API](https://cloud.google.com/sql/docs/mysql/admin-api/v1). This allows tools to perform administrative tasks on Cloud SQL instances, such as creating users and databases.
|
||||
|
||||
Authentication can be handled in two ways:
|
||||
1. **Application Default Credentials (ADC):** By default, the source uses ADC to authenticate with the API.
|
||||
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will expect an OAuth 2.0 access token to be provided by the client (e.g., a web browser) for each request.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-cloud-sql-admin:
|
||||
kind: cloud-sql-admin
|
||||
|
||||
my-oauth-cloud-sql-admin:
|
||||
kind: cloud-sql-admin
|
||||
useClientOAuth: true
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "cloud-sql-admin". |
|
||||
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |
|
||||
@@ -28,9 +28,6 @@ to a database by following these instructions][csql-mysql-quickstart].
|
||||
- [`mysql-execute-sql`](../tools/mysql/mysql-execute-sql.md)
|
||||
Run parameterized SQL queries in Cloud SQL for MySQL.
|
||||
|
||||
- [`mysql-list-tables`](../tools/mysql/mysql-list-tables.md)
|
||||
List tables in a Cloud SQL for MySQL database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [Cloud SQL for MySQL using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_mysql_mcp/)
|
||||
|
||||
@@ -28,9 +28,6 @@ to a database by following these instructions][csql-pg-quickstart].
|
||||
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
|
||||
Run parameterized SQL statements in PostgreSQL.
|
||||
|
||||
- [`postgres-list-tables`](../tools/postgres/postgres-list-tables.md)
|
||||
List tables in a PostgreSQL database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [Cloud SQL for Postgres using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_pg_mcp/)
|
||||
|
||||
@@ -47,8 +47,7 @@ a self-signed ssl certificate for the Looker server. Anything other than "true"
|
||||
will be interpreted as false.
|
||||
|
||||
The client id and client secret are seemingly random character sequences
|
||||
assigned by the looker server. If you are using Looker OAuth you don't need
|
||||
these settings
|
||||
assigned by the looker server.
|
||||
|
||||
{{< notice tip >}}
|
||||
Use environment variable replacement with the format ${ENV_NAME}
|
||||
@@ -61,11 +60,10 @@ instead of hardcoding your secrets into the configuration file.
|
||||
| -------------------- | :------: | :----------: | ----------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "looker". |
|
||||
| base_url | string | true | The URL of your Looker server with no trailing /). |
|
||||
| client_id | string | false | The client id assigned by Looker. |
|
||||
| client_secret | string | false | The client secret assigned by Looker. |
|
||||
| verify_ssl | string | false | Whether to check the ssl certificate of the server. |
|
||||
| client_id | string | true | The client id assigned by Looker. |
|
||||
| client_secret | string | true | The client secret assigned by Looker. |
|
||||
| verify_ssl | string | true | Whether to check the ssl certificate of the server. |
|
||||
| timeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, 120s is applied. |
|
||||
| use_client_oauth | string | false | Use OAuth tokens instead of client_id and client_secret. (default: false) |
|
||||
| show_hidden_models | string | false | Show or hide hidden models. (default: true) |
|
||||
| show_hidden_explores | string | false | Show or hide hidden explores. (default: true) |
|
||||
| show_hidden_fields | string | false | Show or hide hidden fields. (default: true) |
|
||||
|
||||
@@ -22,9 +22,6 @@ reliability, performance, and ease of use.
|
||||
- [`mysql-execute-sql`](../tools/mysql/mysql-execute-sql.md)
|
||||
Run parameterized SQL queries in MySQL.
|
||||
|
||||
- [`mysql-list-tables`](../tools/mysql/mysql-list-tables.md)
|
||||
List tables in a MySQL database.
|
||||
|
||||
## Requirements
|
||||
|
||||
### Database User
|
||||
@@ -45,9 +42,6 @@ sources:
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
# Optional TLS and other driver parameters. For example, enable preferred TLS:
|
||||
# queryParams:
|
||||
# tls: preferred
|
||||
queryTimeout: 30s # Optional: query timeout duration
|
||||
```
|
||||
|
||||
@@ -67,4 +61,3 @@ instead of hardcoding your secrets into the configuration file.
|
||||
| user | string | true | Name of the MySQL user to connect as (e.g. "my-mysql-user"). |
|
||||
| password | string | true | Password of the MySQL user (e.g. "my-password"). |
|
||||
| queryTimeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, no timeout is applied. |
|
||||
| queryParams | map<string,string> | false | Arbitrary DSN parameters passed to the driver (e.g. `tls: preferred`, `charset: utf8mb4`). Useful for enabling TLS or other connection options. |
|
||||
|
||||
@@ -23,9 +23,6 @@ reputation for reliability, feature robustness, and performance.
|
||||
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
|
||||
Run parameterized SQL statements in PostgreSQL.
|
||||
|
||||
- [`postgres-list-tables`](../tools/postgres/postgres-list-tables.md)
|
||||
List tables in a PostgreSQL database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [PostgreSQL using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/postgres_mcp/)
|
||||
|
||||
@@ -26,14 +26,6 @@ SQLite has the following notable characteristics:
|
||||
|
||||
- [`sqlite-sql`](../tools/sqlite/sqlite-sql.md)
|
||||
Run SQL queries against a local SQLite database.
|
||||
|
||||
- [`sqlite-execute-sql`](../tools/sqlite/sqlite-execute-sql.md)
|
||||
Run parameterized SQL statements in SQlite.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [SQLite using MCP](../../how-to/connect-ide/sqlite_mcp.md)
|
||||
Connect your IDE to SQlite using Toolbox.
|
||||
|
||||
## Requirements
|
||||
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
---
|
||||
title: "YugabyteDB"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
YugabyteDB is a high-performance, distributed SQL database.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
[YugabyteDB][yugabytedb] is a high-performance, distributed SQL database designed for global, internet-scale applications, with full PostgreSQL compatibility.
|
||||
|
||||
[yugabytedb]: https://www.yugabyte.com/
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-yb-source:
|
||||
kind: yugabytedb
|
||||
host: 127.0.0.1
|
||||
port: 5433
|
||||
database: yugabyte
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
loadBalance: true
|
||||
topologyKeys: cloud.region.zone1:1,cloud.region.zone2:2
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------------------------------|:--------:|:------------:|------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "yugabytedb". |
|
||||
| host | string | true | IP address to connect to. |
|
||||
| port | integer | true | Port to connect to. The default port is 5433. |
|
||||
| database | string | true | Name of the YugabyteDB database to connect to. The default database name is yugabyte. |
|
||||
| user | string | true | Name of the YugabyteDB user to connect as. The default user is yugabyte. |
|
||||
| password | string | true | Password of the YugabyteDB user. The default password is yugabyte. |
|
||||
| loadBalance | boolean | false | If true, enable uniform load balancing. The default loadBalance value is false. |
|
||||
| topologyKeys | string | false | Comma-separated geo-locations in the form cloud.region.zone:priority to enable topology-aware load balancing. Ignored if loadBalance is false. It is null by default. |
|
||||
| ybServersRefreshInterval | integer | false | The interval (in seconds) to refresh the servers list; ignored if loadBalance is false. The default value of ybServersRefreshInterval is 300. |
|
||||
| fallbackToTopologyKeysOnly | boolean | false | If set to true and topologyKeys are specified, only connect to nodes specified in topologyKeys. By defualt, this is set to false. |
|
||||
| failedHostReconnectDelaySecs | integer | false | Time (in seconds) to wait before trying to connect to failed nodes. The default value of is 5. |
|
||||
@@ -1,7 +0,0 @@
|
||||
---
|
||||
title: "AlloyDB for PostgreSQL"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools for AlloyDB for PostgreSQL.
|
||||
---
|
||||
@@ -1,37 +0,0 @@
|
||||
---
|
||||
title: "alloydb-get-cluster"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "alloydb-get-cluster" tool retrieves details for a specific AlloyDB cluster.
|
||||
aliases:
|
||||
- /resources/tools/alloydb-get-cluster
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `alloydb-get-cluster` tool retrieves detailed information for a single, specified AlloyDB cluster. It is compatible with [alloydb-admin](../../sources/alloydb-admin.md) source.
|
||||
|
||||
| Parameter | Type | Description | Required |
|
||||
| :--------- | :----- | :--------------------------------------------------------------------------------------- | :------- |
|
||||
| `projectId` | string | The GCP project ID to get cluster for. | Yes |
|
||||
| `locationId` | string | The location of the cluster (e.g., 'us-central1'). | Yes |
|
||||
| `clusterId` | string | The ID of the cluster to retrieve. | Yes |
|
||||
> **Note**
|
||||
> This tool authenticates using the credentials configured in its [alloydb-admin](../../sources/alloydb-admin.md) source which can be either [Application Default Credentials](https://cloud.google.com/docs/authentication/application-default-credentials) or client-side OAuth.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
get_specific_cluster:
|
||||
kind: alloydb-get-cluster
|
||||
source: my-alloydb-admin-source
|
||||
description: Use this tool to retrieve details for a specific AlloyDB cluster.
|
||||
```
|
||||
## Reference
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be alloydb-get-cluster. | |
|
||||
| source | string | true | The name of an `alloydb-admin` source. |
|
||||
| description | string | true | Description of the tool that is passed to the agent. |
|
||||
@@ -1,38 +0,0 @@
|
||||
---
|
||||
title: "alloydb-list-clusters"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "alloydb-list-clusters" tool lists the AlloyDB clusters in a given project and location.
|
||||
aliases:
|
||||
- /resources/tools/alloydb-list-clusters
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `alloydb-list-clusters` tool retrieves AlloyDB cluster information for all or specified locations in a given project. It is compatible with [alloydb-admin](../../sources/alloydb-admin.md) source.
|
||||
|
||||
`alloydb-list-clusters` tool lists the detailed information of AlloyDB cluster(cluster name, state, configuration, etc) for a given project and location. The tool takes the following input parameters:
|
||||
|
||||
| Parameter | Type | Description | Required |
|
||||
| :--------- | :----- | :--------------------------------------------------------------------------------------- | :------- |
|
||||
| `projectId` | string | The GCP project ID to list clusters for. | Yes |
|
||||
| `locationId` | string | The location to list clusters in (e.g., 'us-central1'). Use `-` for all locations. Default: `-`.| No |
|
||||
> **Note**
|
||||
> This tool authenticates using the credentials configured in its [alloydb-admin](../../sources/alloydb-admin.md) source which can be either [Application Default Credentials](https://cloud.google.com/docs/authentication/application-default-credentials) or client-side OAuth.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_clusters:
|
||||
kind: alloydb-list-clusters
|
||||
source: alloydb-admin-source
|
||||
description: Use this tool to list all AlloyDB clusters in a given project and location.
|
||||
```
|
||||
## Reference
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be alloydb-list-clusters. | |
|
||||
| source | string | true | The name of an `alloydb-admin` source. |
|
||||
| description | string | true | Description of the tool that is passed to the agent. |
|
||||
@@ -1,39 +0,0 @@
|
||||
---
|
||||
title: "alloydb-list-instances"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "alloydb-list-instances" tool lists the AlloyDB instances for a given project, cluster and location.
|
||||
aliases:
|
||||
- /resources/tools/alloydb-list-instances
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `alloydb-list-instances` tool retrieves AlloyDB instance information for all or specified clusters and locations in a given project. It is compatible with [alloydb-admin](../../sources/alloydb-admin.md) source.
|
||||
|
||||
`alloydb-list-instances` tool lists the detailed information of AlloyDB instances (instance name, type, IP address, state, configuration, etc) for a given project, cluster and location. The tool takes the following input parameters:
|
||||
|
||||
| Parameter | Type | Description | Required |
|
||||
| :--------- | :----- | :--------------------------------------------------------------------------------------- | :------- |
|
||||
| `projectId` | string | The GCP project ID to list instances for. | Yes |
|
||||
| `clusterId` | string | The ID of the cluster to list instances from. Use '-' to get results for all clusters. Default: `-`.| No |
|
||||
| `locationId` | string | The location of the cluster (e.g., 'us-central1'). Use '-' to get results for all locations. Default: `-`.| No |
|
||||
> **Note**
|
||||
> This tool authenticates using the credentials configured in its [alloydb-admin](../../sources/alloydb-admin.md) source which can be either [Application Default Credentials](https://cloud.google.com/docs/authentication/application-default-credentials) or client-side OAuth.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_instances:
|
||||
kind: alloydb-list-instances
|
||||
source: alloydb-admin-source
|
||||
description: Use this tool to list all AlloyDB instances for a given project, cluster and location.
|
||||
```
|
||||
## Reference
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be alloydb-list-instances. | |
|
||||
| source | string | true | The name of an `alloydb-admin` source. |
|
||||
| description | string | true | Description of the tool that is passed to the agent. |
|
||||
@@ -1,38 +0,0 @@
|
||||
---
|
||||
title: "alloydb-list-users"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "alloydb-list-users" tool lists all database users within an AlloyDB cluster.
|
||||
aliases:
|
||||
- /resources/tools/alloydb-list-users
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `alloydb-list-users` tool lists all database users within an AlloyDB cluster. It is compatible with [alloydb-admin](../../sources/alloydb-admin.md) source.
|
||||
The tool takes the following input parameters:
|
||||
|
||||
| Parameter | Type | Description | Required |
|
||||
| :--------- | :----- | :--------------------------------------------------------------------------------------- | :------- |
|
||||
| `projectId` | string | The GCP project ID to list users for. | Yes |
|
||||
| `clusterId` | string | The ID of the cluster to list users from. | Yes |
|
||||
| `locationId` | string | The location of the cluster (e.g., 'us-central1'). | Yes |
|
||||
> **Note**
|
||||
> This tool authenticates using the credentials configured in its [alloydb-admin](../../sources/alloydb-admin.md) source which can be either [Application Default Credentials](https://cloud.google.com/docs/authentication/application-default-credentials) or client-side OAuth.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_users:
|
||||
kind: alloydb-list-users
|
||||
source: alloydb-admin-source
|
||||
description: Use this tool to list all database users within an AlloyDB cluster
|
||||
```
|
||||
## Reference
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be alloydb-list-users. | |
|
||||
| source | string | true | The name of an `alloydb-admin` source. |
|
||||
| description | string | true | Description of the tool that is passed to the agent. |
|
||||
@@ -1,52 +0,0 @@
|
||||
---
|
||||
title: "bigquery-analyze-contribution"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "bigquery-analyze-contribution" tool performs contribution analysis in BigQuery.
|
||||
aliases:
|
||||
- /resources/tools/bigquery-analyze-contribution
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `bigquery-analyze-contribution` tool performs contribution analysis in BigQuery by creating a temporary `CONTRIBUTION_ANALYSIS` model and then querying it with `ML.GET_INSIGHTS` to find top contributors for a given metric.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [bigquery](../../sources/bigquery.md)
|
||||
|
||||
`bigquery-analyze-contribution` takes the following parameters:
|
||||
|
||||
- **input_data** (string, required): The data that contain the test and control data to analyze. This can be a fully qualified BigQuery table ID (e.g., `my-project.my_dataset.my_table`) or a SQL query that returns the data.
|
||||
- **contribution_metric** (string, required): The name of the column that contains the metric to analyze. This can be SUM(metric_column_name), SUM(numerator_metric_column_name)/SUM(denominator_metric_column_name) or SUM(metric_sum_column_name)/COUNT(DISTINCT categorical_column_name) depending the type of metric to analyze.
|
||||
- **is_test_col** (string, required): The name of the column that identifies whether a row is in the test or control group. The column must contain boolean values.
|
||||
- **dimension_id_cols** (array of strings, optional): An array of column names that uniquely identify each dimension.
|
||||
- **top_k_insights_by_apriori_support** (integer, optional): The number of top insights to return, ranked by apriori support. Default to '30'.
|
||||
- **pruning_method** (string, optional): The method to use for pruning redundant insights. Can be `'NO_PRUNING'` or `'PRUNE_REDUNDANT_INSIGHTS'`. Defaults to `'PRUNE_REDUNDANT_INSIGHTS'`.
|
||||
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
contribution_analyzer:
|
||||
kind: bigquery-analyze-contribution
|
||||
source: my-bigquery-source
|
||||
description: Use this tool to run contribution analysis on a dataset in BigQuery.
|
||||
```
|
||||
|
||||
## Sample Prompt
|
||||
You can prepare a sample table following https://cloud.google.com/bigquery/docs/get-contribution-analysis-insights.
|
||||
And use the following sample prompts to call this tool:
|
||||
|
||||
- What drives the changes in sales in the table `bqml_tutorial.iowa_liquor_sales_sum_data`? Use the project id myproject.
|
||||
- Analyze the contribution for the `total_sales` metric in the table `bqml_tutorial.iowa_liquor_sales_sum_data`. The test group is identified by the `is_test` column. The dimensions are `store_name`, `city`, `vendor_name`, `category_name` and `item_description`.
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|------------------------------------------------------------|
|
||||
| kind | string | true | Must be "bigquery-analyze-contribution". |
|
||||
| source | string | true | Name of the source the tool should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -15,19 +15,10 @@ It's compatible with the following sources:
|
||||
|
||||
- [bigquery](../../sources/bigquery.md)
|
||||
|
||||
`bigquery-list-table-ids` accepts the following parameters:
|
||||
- **`dataset`** (required): Specifies the dataset from which to list table IDs.
|
||||
- **`project`** (optional): Defines the Google Cloud project ID. If not provided,
|
||||
the tool defaults to the project from the source configuration.
|
||||
|
||||
The tool's behavior regarding these parameters is influenced by the
|
||||
`allowedDatasets` restriction on the `bigquery` source:
|
||||
- **Without `allowedDatasets` restriction:** The tool can list tables from any
|
||||
dataset specified by the `dataset` and `project` parameters.
|
||||
- **With `allowedDatasets` restriction:** Before listing tables, the tool verifies
|
||||
that the requested dataset is in the allowed list. If it is not, the request is
|
||||
denied. If only one dataset is specified in the `allowedDatasets` list, it
|
||||
will be used as the default value for the `dataset` parameter.
|
||||
`bigquery-list-table-ids` takes a required `dataset` parameter to specify the dataset
|
||||
from which to list table IDs. It also optionally accepts a `project` parameter to
|
||||
define the Google Cloud project ID. If the `project` parameter is not provided, the
|
||||
tool defaults to using the project defined in the source configuration.
|
||||
|
||||
## Example
|
||||
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
---
|
||||
title: "clickhouse-list-databases"
|
||||
type: docs
|
||||
weight: 3
|
||||
description: >
|
||||
A "clickhouse-list-databases" tool lists all databases in a ClickHouse instance.
|
||||
aliases:
|
||||
- /resources/tools/clickhouse-list-databases
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `clickhouse-list-databases` tool lists all available databases in a
|
||||
ClickHouse instance. It's compatible with the [clickhouse](../../sources/clickhouse.md) source.
|
||||
|
||||
This tool executes the `SHOW DATABASES` command and returns a list of all
|
||||
databases accessible to the configured user, making it useful for database
|
||||
discovery and exploration tasks.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_clickhouse_databases:
|
||||
kind: clickhouse-list-databases
|
||||
source: my-clickhouse-instance
|
||||
description: List all available databases in the ClickHouse instance
|
||||
```
|
||||
|
||||
## Return Value
|
||||
|
||||
The tool returns an array of objects, where each object contains:
|
||||
- `name`: The name of the database
|
||||
|
||||
Example response:
|
||||
```json
|
||||
[
|
||||
{"name": "default"},
|
||||
{"name": "system"},
|
||||
{"name": "analytics"},
|
||||
{"name": "user_data"}
|
||||
]
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------------|:------------------:|:------------:|-----------------------------------------------------------|
|
||||
| kind | string | true | Must be "clickhouse-list-databases". |
|
||||
| source | string | true | Name of the ClickHouse source to list databases from. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| authRequired | array of string | false | Authentication services required to use this tool. |
|
||||
| parameters | array of Parameter | false | Parameters for the tool (typically not used). |
|
||||
@@ -1,7 +0,0 @@
|
||||
---
|
||||
title: "Cloud SQL"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with Cloud SQL.
|
||||
---
|
||||
@@ -1,31 +0,0 @@
|
||||
---
|
||||
title: cloud-sql-create-users
|
||||
type: docs
|
||||
weight: 10
|
||||
description: >
|
||||
Create a new user in a Cloud SQL instance.
|
||||
---
|
||||
|
||||
The `cloud-sql-create-users` tool creates a new user in a specified Cloud SQL instance. It can create both built-in and IAM users.
|
||||
|
||||
{{< notice info >}}
|
||||
This tool uses a `source` of kind `cloud-sql-admin`.
|
||||
{{< /notice >}}
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
create-cloud-sql-user:
|
||||
kind: cloud-sql-create-users
|
||||
source: my-cloud-sql-admin-source
|
||||
description: "Creates a new user in a Cloud SQL instance. Both built-in and IAM users are supported. IAM users require an email account as the user name. IAM is the more secure and recommended way to manage users. The agent should always ask the user what type of user they want to create. For more information, see https://cloud.google.com/sql/docs/postgres/add-manage-iam-users"
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ------------ | :-------: | :----------: | ------------------------------------------------ |
|
||||
| kind | string | true | Must be "cloud-sql-create-users". |
|
||||
| description | string | false | A description of the tool. |
|
||||
| source | string | true | The name of the `cloud-sql-admin` source to use. |
|
||||
@@ -1,31 +0,0 @@
|
||||
---
|
||||
title: "cloud-sql-get-instance"
|
||||
type: docs
|
||||
weight: 10
|
||||
description: >
|
||||
Get a Cloud SQL instance resource.
|
||||
---
|
||||
|
||||
The `cloud-sql-get-instance` tool retrieves a Cloud SQL instance resource using the Cloud SQL Admin API.
|
||||
|
||||
{{< notice info >}}
|
||||
This tool uses a `source` of kind `cloud-sql-admin`.
|
||||
{{< /notice >}}
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
get-sql-instance:
|
||||
kind: cloud-sql-get-instance
|
||||
source: my-cloud-sql-admin-source
|
||||
description: "Gets a particular cloud sql instance."
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ----------- | :------: | :----------: | ------------------------------------------------ |
|
||||
| kind | string | true | Must be "cloud-sql-get-instance". |
|
||||
| source | string | true | The name of the `cloud-sql-admin` source to use. |
|
||||
| description | string | false | A description of the tool. |
|
||||
@@ -1,46 +0,0 @@
|
||||
---
|
||||
title: Cloud SQL List Instances
|
||||
type: docs
|
||||
weight: 1
|
||||
description: "List Cloud SQL instances in a project.\n"
|
||||
---
|
||||
|
||||
The `cloud-sql-list-instances` tool lists all Cloud SQL instances in a specified
|
||||
Google Cloud project.
|
||||
|
||||
{{< notice info >}}
|
||||
This tool uses the `cloud-sql-admin` source, which automatically handles authentication on behalf of the user.
|
||||
{{< /notice >}}
|
||||
|
||||
## Configuration
|
||||
|
||||
Here is an example of how to configure the `cloud-sql-list-instances` tool in your
|
||||
`tools.yaml` file:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-cloud-sql-admin-source:
|
||||
kind: cloud-sql-admin
|
||||
|
||||
tools:
|
||||
list_my_instances:
|
||||
kind: cloud-sql-list-instances
|
||||
source: my-cloud-sql-admin-source
|
||||
description: Use this tool to list all Cloud SQL instances in a project.
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
The `cloud-sql-list-instances` tool has one required parameter:
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| --------- | :------: | :----------: | ---------------------------- |
|
||||
| project | string | true | The Google Cloud project ID. |
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ------------ | :-------: | :----------: | ----------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "cloud-sql-list-instances". |
|
||||
| description | string | false | Description of the tool that is passed to the agent. |
|
||||
| source | string | true | The name of the `cloud-sql-admin` source to use for this tool. |
|
||||
@@ -1,39 +0,0 @@
|
||||
---
|
||||
title: "cloud-sql-wait-for-operation"
|
||||
type: docs
|
||||
weight: 10
|
||||
description: >
|
||||
Wait for a long-running Cloud SQL operation to complete.
|
||||
---
|
||||
|
||||
The `cloud-sql-wait-for-operation` tool is a utility tool that waits for a
|
||||
long-running Cloud SQL operation to complete. It does this by polling the Cloud
|
||||
SQL Admin API operation status endpoint until the operation is finished, using
|
||||
exponential backoff.
|
||||
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
cloudsql-operations-get:
|
||||
kind: cloud-sql-wait-for-operation
|
||||
source: my-cloud-sql-source
|
||||
description: "This will poll on operations API until the operation is done. For checking operation status we need projectId and operationId. Once instance is created give follow up steps on how to use the variables to bring data plane MCP server up in local and remote setup."
|
||||
delay: 1s
|
||||
maxDelay: 4m
|
||||
multiplier: 2
|
||||
maxRetries: 10
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ----------- | :------: | :----------: | ---------------------------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "cloud-sql-wait-for-operation". |
|
||||
| source | string | true | The name of a `cloud-sql-admin` source to use for authentication. |
|
||||
| description | string | false | A description of the tool. |
|
||||
| delay | duration | false | The initial delay between polling requests (e.g., `3s`). Defaults to 3 seconds. |
|
||||
| maxDelay | duration | false | The maximum delay between polling requests (e.g., `4m`). Defaults to 4 minutes. |
|
||||
| multiplier | float | false | The multiplier for the polling delay. The delay is multiplied by this value after each request. Defaults to 2.0. |
|
||||
| maxRetries | int | false | The maximum number of polling attempts before giving up. Defaults to 10. |
|
||||
@@ -1,412 +0,0 @@
|
||||
---
|
||||
title: "firestore-query"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Query a Firestore collection with parameterizable filters and Firestore native JSON value types
|
||||
aliases:
|
||||
- /resources/tools/firestore-query
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
The `firestore-query` tool allows you to query Firestore collections with dynamic, parameterizable filters that support Firestore's native JSON value types. This tool is designed for querying single collection, which is the standard pattern in Firestore. The collection path itself can be parameterized, making it flexible for various use cases. This tool is particularly useful when you need to create reusable query templates with parameters that can be substituted at runtime.
|
||||
|
||||
**Developer Note**: This tool serves as the general querying foundation that developers can use to create custom tools with specific query patterns.
|
||||
|
||||
## Key Features
|
||||
|
||||
- **Parameterizable Queries**: Use Go template syntax to create dynamic queries
|
||||
- **Dynamic Collection Paths**: The collection path can be parameterized for flexibility
|
||||
- **Native JSON Value Types**: Support for Firestore's typed values (stringValue, integerValue, doubleValue, etc.)
|
||||
- **Complex Filter Logic**: Support for AND/OR logical operators in filters
|
||||
- **Template Substitution**: Dynamic collection paths, filters, and ordering
|
||||
- **Query Analysis**: Optional query performance analysis with explain metrics (non-parameterizable)
|
||||
|
||||
## Configuration
|
||||
|
||||
### Basic Configuration
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
query_countries:
|
||||
kind: firestore-query
|
||||
source: my-firestore-source
|
||||
description: Query countries with dynamic filters
|
||||
collectionPath: "countries"
|
||||
filters: |
|
||||
{
|
||||
"field": "continent",
|
||||
"op": "==",
|
||||
"value": {"stringValue": "{{.continent}}"}
|
||||
}
|
||||
parameters:
|
||||
- name: continent
|
||||
type: string
|
||||
description: Continent to filter by
|
||||
required: true
|
||||
```
|
||||
|
||||
### Advanced Configuration with Complex Filters
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
advanced_query:
|
||||
kind: firestore-query
|
||||
source: my-firestore-source
|
||||
description: Advanced query with complex filters
|
||||
collectionPath: "{{.collection}}"
|
||||
filters: |
|
||||
{
|
||||
"or": [
|
||||
{"field": "status", "op": "==", "value": {"stringValue": "{{.status}}"}},
|
||||
{
|
||||
"and": [
|
||||
{"field": "priority", "op": ">", "value": {"integerValue": "{{.priority}}"}},
|
||||
{"field": "area", "op": "<", "value": {"doubleValue": {{.maxArea}}}},
|
||||
{"field": "active", "op": "==", "value": {"booleanValue": {{.isActive}}}}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
select:
|
||||
- name
|
||||
- status
|
||||
- priority
|
||||
orderBy:
|
||||
field: "{{.sortField}}"
|
||||
direction: "{{.sortDirection}}"
|
||||
limit: 100
|
||||
analyzeQuery: true
|
||||
parameters:
|
||||
- name: collection
|
||||
type: string
|
||||
description: Collection to query
|
||||
required: true
|
||||
- name: status
|
||||
type: string
|
||||
description: Status to filter by
|
||||
required: true
|
||||
- name: priority
|
||||
type: string
|
||||
description: Minimum priority value
|
||||
required: true
|
||||
- name: maxArea
|
||||
type: float
|
||||
description: Maximum area value
|
||||
required: true
|
||||
- name: isActive
|
||||
type: boolean
|
||||
description: Filter by active status
|
||||
required: true
|
||||
- name: sortField
|
||||
type: string
|
||||
description: Field to sort by
|
||||
required: false
|
||||
default: "createdAt"
|
||||
- name: sortDirection
|
||||
type: string
|
||||
description: Sort direction (ASCENDING or DESCENDING)
|
||||
required: false
|
||||
default: "DESCENDING"
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
### Configuration Parameters
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
|-----------|------|----------|-------------|
|
||||
| `kind` | string | Yes | Must be `firestore-query` |
|
||||
| `source` | string | Yes | Name of the Firestore source to use |
|
||||
| `description` | string | Yes | Description of what this tool does |
|
||||
| `collectionPath` | string | Yes | Path to the collection to query (supports templates) |
|
||||
| `filters` | string | No | JSON string defining query filters (supports templates) |
|
||||
| `select` | array | No | Fields to select from documents(supports templates - string or array) |
|
||||
| `orderBy` | object | No | Ordering configuration with `field` and `direction`(supports templates for the value of field or direction) |
|
||||
| `limit` | integer | No | Maximum number of documents to return (default: 100) (supports templates) |
|
||||
| `analyzeQuery` | boolean | No | Whether to analyze query performance (default: false) |
|
||||
| `parameters` | array | Yes | Parameter definitions for template substitution |
|
||||
|
||||
### Runtime Parameters
|
||||
|
||||
Runtime parameters are defined in the `parameters` array and can be used in templates throughout the configuration.
|
||||
|
||||
## Filter Format
|
||||
|
||||
### Simple Filter
|
||||
```json
|
||||
{
|
||||
"field": "age",
|
||||
"op": ">",
|
||||
"value": {"integerValue": "25"}
|
||||
}
|
||||
```
|
||||
|
||||
### AND Filter
|
||||
```json
|
||||
{
|
||||
"and": [
|
||||
{"field": "status", "op": "==", "value": {"stringValue": "active"}},
|
||||
{"field": "age", "op": ">=", "value": {"integerValue": "18"}}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### OR Filter
|
||||
```json
|
||||
{
|
||||
"or": [
|
||||
{"field": "role", "op": "==", "value": {"stringValue": "admin"}},
|
||||
{"field": "role", "op": "==", "value": {"stringValue": "moderator"}}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Nested Filters
|
||||
```json
|
||||
{
|
||||
"or": [
|
||||
{"field": "type", "op": "==", "value": {"stringValue": "premium"}},
|
||||
{
|
||||
"and": [
|
||||
{"field": "type", "op": "==", "value": {"stringValue": "standard"}},
|
||||
{"field": "credits", "op": ">", "value": {"integerValue": "1000"}}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Firestore Native Value Types
|
||||
|
||||
The tool supports all Firestore native JSON value types:
|
||||
|
||||
| Type | Format | Example |
|
||||
|------|--------|---------|
|
||||
| String | `{"stringValue": "text"}` | `{"stringValue": "{{.name}}"}` |
|
||||
| Integer | `{"integerValue": "123"}` or `{"integerValue": 123}` | `{"integerValue": "{{.age}}"}` or `{"integerValue": {{.age}}}` |
|
||||
| Double | `{"doubleValue": 45.67}` | `{"doubleValue": {{.price}}}` |
|
||||
| Boolean | `{"booleanValue": true}` | `{"booleanValue": {{.active}}}` |
|
||||
| Null | `{"nullValue": null}` | `{"nullValue": null}` |
|
||||
| Timestamp | `{"timestampValue": "RFC3339"}` | `{"timestampValue": "{{.date}}"}` |
|
||||
| GeoPoint | `{"geoPointValue": {"latitude": 0, "longitude": 0}}` | See below |
|
||||
| Array | `{"arrayValue": {"values": [...]}}` | See below |
|
||||
| Map | `{"mapValue": {"fields": {...}}}` | See below |
|
||||
|
||||
### Complex Type Examples
|
||||
|
||||
**GeoPoint:**
|
||||
```json
|
||||
{
|
||||
"field": "location",
|
||||
"op": "==",
|
||||
"value": {
|
||||
"geoPointValue": {
|
||||
"latitude": 37.7749,
|
||||
"longitude": -122.4194
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Array:**
|
||||
```json
|
||||
{
|
||||
"field": "tags",
|
||||
"op": "array-contains",
|
||||
"value": {"stringValue": "{{.tag}}"}
|
||||
}
|
||||
```
|
||||
|
||||
## Supported Operators
|
||||
|
||||
- `<` - Less than
|
||||
- `<=` - Less than or equal
|
||||
- `>` - Greater than
|
||||
- `>=` - Greater than or equal
|
||||
- `==` - Equal
|
||||
- `!=` - Not equal
|
||||
- `array-contains` - Array contains value
|
||||
- `array-contains-any` - Array contains any of the values
|
||||
- `in` - Value is in array
|
||||
- `not-in` - Value is not in array
|
||||
|
||||
## Examples
|
||||
|
||||
### Example 1: Query with Dynamic Collection Path
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
user_documents:
|
||||
kind: firestore-query
|
||||
source: my-firestore
|
||||
description: Query user-specific documents
|
||||
collectionPath: "users/{{.userId}}/documents"
|
||||
filters: |
|
||||
{
|
||||
"field": "type",
|
||||
"op": "==",
|
||||
"value": {"stringValue": "{{.docType}}"}
|
||||
}
|
||||
parameters:
|
||||
- name: userId
|
||||
type: string
|
||||
description: User ID
|
||||
required: true
|
||||
- name: docType
|
||||
type: string
|
||||
description: Document type to filter
|
||||
required: true
|
||||
```
|
||||
|
||||
### Example 2: Complex Geographic Query
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
location_search:
|
||||
kind: firestore-query
|
||||
source: my-firestore
|
||||
description: Search locations by area and population
|
||||
collectionPath: "cities"
|
||||
filters: |
|
||||
{
|
||||
"and": [
|
||||
{"field": "country", "op": "==", "value": {"stringValue": "{{.country}}"}},
|
||||
{"field": "population", "op": ">", "value": {"integerValue": "{{.minPopulation}}"}},
|
||||
{"field": "area", "op": "<", "value": {"doubleValue": {{.maxArea}}}}
|
||||
]
|
||||
}
|
||||
orderBy:
|
||||
field: "population"
|
||||
direction: "DESCENDING"
|
||||
limit: 50
|
||||
parameters:
|
||||
- name: country
|
||||
type: string
|
||||
description: Country code
|
||||
required: true
|
||||
- name: minPopulation
|
||||
type: string
|
||||
description: Minimum population (as string for large numbers)
|
||||
required: true
|
||||
- name: maxArea
|
||||
type: float
|
||||
description: Maximum area in square kilometers
|
||||
required: true
|
||||
```
|
||||
|
||||
### Example 3: Time-based Query with Analysis
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
activity_log:
|
||||
kind: firestore-query
|
||||
source: my-firestore
|
||||
description: Query activity logs within time range
|
||||
collectionPath: "logs"
|
||||
filters: |
|
||||
{
|
||||
"and": [
|
||||
{"field": "timestamp", "op": ">=", "value": {"timestampValue": "{{.startTime}}"}},
|
||||
{"field": "timestamp", "op": "<=", "value": {"timestampValue": "{{.endTime}}"}},
|
||||
{"field": "severity", "op": "in", "value": {"arrayValue": {"values": [
|
||||
{"stringValue": "ERROR"},
|
||||
{"stringValue": "CRITICAL"}
|
||||
]}}}
|
||||
]
|
||||
}
|
||||
select:
|
||||
- timestamp
|
||||
- message
|
||||
- severity
|
||||
- userId
|
||||
orderBy:
|
||||
field: "timestamp"
|
||||
direction: "DESCENDING"
|
||||
analyzeQuery: true
|
||||
parameters:
|
||||
- name: startTime
|
||||
type: string
|
||||
description: Start time in RFC3339 format
|
||||
required: true
|
||||
- name: endTime
|
||||
type: string
|
||||
description: End time in RFC3339 format
|
||||
required: true
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Invoking the Tool
|
||||
|
||||
```bash
|
||||
# Using curl
|
||||
curl -X POST http://localhost:5000/api/tool/your-tool-name/invoke \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"continent": "Europe",
|
||||
"minPopulation": "1000000",
|
||||
"maxArea": 500000.5,
|
||||
"isActive": true
|
||||
}'
|
||||
```
|
||||
|
||||
### Response Format
|
||||
|
||||
**Without analyzeQuery:**
|
||||
```json
|
||||
[
|
||||
{
|
||||
"id": "doc1",
|
||||
"path": "countries/doc1",
|
||||
"data": {
|
||||
"name": "France",
|
||||
"continent": "Europe",
|
||||
"population": 67000000,
|
||||
"area": 551695
|
||||
},
|
||||
"createTime": "2024-01-01T00:00:00Z",
|
||||
"updateTime": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
**With analyzeQuery:**
|
||||
```json
|
||||
{
|
||||
"documents": [...],
|
||||
"explainMetrics": {
|
||||
"planSummary": {
|
||||
"indexesUsed": [...]
|
||||
},
|
||||
"executionStats": {
|
||||
"resultsReturned": 10,
|
||||
"executionDuration": "15ms",
|
||||
"readOperations": 10
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Use Typed Values**: Always use Firestore's native JSON value types for proper type handling
|
||||
2. **String Numbers for Large Integers**: Use string representation for large integers to avoid precision loss
|
||||
3. **Template Security**: Validate all template parameters to prevent injection attacks
|
||||
4. **Index Optimization**: Use `analyzeQuery` to identify missing indexes
|
||||
5. **Limit Results**: Always set a reasonable `limit` to prevent excessive data retrieval
|
||||
6. **Field Selection**: Use `select` to retrieve only necessary fields
|
||||
|
||||
## Technical Notes
|
||||
|
||||
- Queries operate on a single collection (the standard Firestore pattern)
|
||||
- Maximum of 100 filters per query (configurable)
|
||||
- Template parameters must be properly escaped in JSON contexts
|
||||
- Complex nested queries may require composite indexes
|
||||
|
||||
## See Also
|
||||
|
||||
- [firestore-query-collection](firestore-query-collection.md) - Non-parameterizable query tool
|
||||
- [Firestore Source Configuration](../../sources/firestore.md)
|
||||
- [Firestore Query Documentation](https://firebase.google.com/docs/firestore/query-data/queries)
|
||||
@@ -33,12 +33,6 @@ tools:
|
||||
|
||||
It takes two parameters, the model_name looked up from get_models and the
|
||||
explore_name looked up from get_explores.
|
||||
|
||||
If this returns a suggestions field for a dimension, the contents of suggestions
|
||||
can be used as filters for this field. If this returns a suggest_explore and
|
||||
suggest_dimension, a query against that explore and dimension can be used to find
|
||||
valid filters for this field.
|
||||
|
||||
```
|
||||
|
||||
The response is a json array with the following elements:
|
||||
@@ -51,10 +45,7 @@ The response is a json array with the following elements:
|
||||
"label": "field label",
|
||||
"label_short": "field short label",
|
||||
"tags": ["tags", ...],
|
||||
"synonyms": ["synonyms", ...],
|
||||
"suggestions": ["suggestion", ...],
|
||||
"suggest_explore": "explore",
|
||||
"suggest_dimension": "dimension"
|
||||
"synonyms": ["synonyms", ...]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -21,16 +21,6 @@ It's compatible with the following sources:
|
||||
`looker-get-explores` accepts one parameter, the
|
||||
`model` id.
|
||||
|
||||
The return type is an array of maps, each map is formatted like:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "explore name",
|
||||
"description": "explore description",
|
||||
"label": "explore label",
|
||||
"group_label": "group label"
|
||||
}
|
||||
```
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
|
||||
@@ -45,10 +45,7 @@ The response is a json array with the following elements:
|
||||
"label": "field label",
|
||||
"label_short": "field short label",
|
||||
"tags": ["tags", ...],
|
||||
"synonyms": ["synonyms", ...],
|
||||
"suggestions": ["suggestion", ...],
|
||||
"suggest_explore": "explore",
|
||||
"suggest_dimension": "dimension"
|
||||
"synonyms": ["synonyms", ...]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -33,12 +33,6 @@ tools:
|
||||
|
||||
It takes two parameters, the model_name looked up from get_models and the
|
||||
explore_name looked up from get_explores.
|
||||
|
||||
If this returns a suggestions field for a measure, the contents of suggestions
|
||||
can be used as filters for this field. If this returns a suggest_explore and
|
||||
suggest_dimension, a query against that explore and dimension can be used to find
|
||||
valid filters for this field.
|
||||
|
||||
```
|
||||
|
||||
The response is a json array with the following elements:
|
||||
@@ -51,10 +45,7 @@ The response is a json array with the following elements:
|
||||
"label": "field label",
|
||||
"label_short": "field short label",
|
||||
"tags": ["tags", ...],
|
||||
"synonyms": ["synonyms", ...],
|
||||
"suggestions": ["suggestion", ...],
|
||||
"suggest_explore": "explore",
|
||||
"suggest_dimension": "dimension"
|
||||
"synonyms": ["synonyms", ...]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -45,10 +45,7 @@ The response is a json array with the following elements:
|
||||
"label": "field label",
|
||||
"label_short": "field short label",
|
||||
"tags": ["tags", ...],
|
||||
"synonyms": ["synonyms", ...],
|
||||
"suggestions": ["suggestion", ...],
|
||||
"suggest_explore": "explore",
|
||||
"suggest_dimension": "dimension"
|
||||
"synonyms": ["synonyms", ...]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -29,10 +29,6 @@ It's compatible with the following sources:
|
||||
7. an optional `limit`
|
||||
8. an optional `tz`
|
||||
|
||||
Starting in Looker v25.18, these queries can be identified in Looker's
|
||||
System Activity. In the History explore, use the field API Client Name
|
||||
to find MCP Toolbox queries.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
|
||||
@@ -29,10 +29,6 @@ It's compatible with the following sources:
|
||||
7. an optional `limit`
|
||||
8. an optional `tz`
|
||||
|
||||
Starting in Looker v25.18, these queries can be identified in Looker's
|
||||
System Activity. In the History explore, use the field API Client Name
|
||||
to find MCP Toolbox queries.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
---
|
||||
title: "mysql-list-tables"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "mysql-list-tables" tool lists schema information for all or specified tables in a MySQL database.
|
||||
aliases:
|
||||
- /resources/tools/mysql-list-tables
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `mysql-list-tables` tool retrieves schema information for all or specified tables in a MySQL database. It is compatible with any of the following sources:
|
||||
|
||||
- [cloud-sql-mysql](../../sources/cloud-sql-mysql.md)
|
||||
- [mysql](../../sources/mysql.md)
|
||||
|
||||
`mysql-list-tables` lists detailed schema information (object type, columns, constraints, indexes, triggers, owner, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, it lists all tables in user schemas. The output format can be set to `simple` which will return only the table names or `detailed` which is the default.
|
||||
|
||||
The tool takes the following input parameters:
|
||||
|
||||
| Parameter | Type | Description | Required |
|
||||
| :--------- | :----- | :--------------------------------------------------------------------------------------- | :------- |
|
||||
| `table_names` | string | Filters by a comma-separated list of names. By default, it lists all tables in user schemas. Default: `""` | No |
|
||||
| `output_format` | string | Indicate the output format of table schema. `simple` will return only the table names, `detailed` will return the full table information. Default: `detailed`. | No |
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
mysql_list_tables:
|
||||
kind: mysql-list-tables
|
||||
source: mysql-source
|
||||
description: Use this tool to retrieve schema information for all or specified tables. Output format can be simple (only table names) or detailed.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "mysql-list-tables". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the agent. |
|
||||
@@ -1,39 +0,0 @@
|
||||
---
|
||||
title: "postgres-list-tables"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "postgres-list-tables" tool lists schema information for all or specified tables in a Postgres database.
|
||||
aliases:
|
||||
- /resources/tools/postgres-list-tables
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `postgres-list-tables` tool retrieves schema information for all or specified tables in a Postgres database. It's compatible with any of the following sources:
|
||||
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
`postgres-list-tables` lists detailed schema information (object type, columns, constraints, indexes, triggers, owner, comment) as JSON for user-created tables (ordinary or partitioned). The tool takes the following input parameters:
|
||||
* `table_names` (optional): Filters by a comma-separated list of names. By default, it lists all tables in user schemas.
|
||||
* `output_format` (optional): Indicate the output format of table schema. `simple` will return only the table names, `detailed` will return the full table information. Default: `detailed`.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
postgres_list_tables:
|
||||
kind: postgres-list-tables
|
||||
source: postgres-source
|
||||
description: Use this tool to retrieve schema information for all or specified tables. Output format can be simple (only table names) or detailed.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "postgres-list-tables". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the agent. |
|
||||
@@ -1,208 +0,0 @@
|
||||
---
|
||||
title: "spanner-list-tables"
|
||||
type: docs
|
||||
weight: 3
|
||||
description: >
|
||||
A "spanner-list-tables" tool retrieves schema information about tables in a
|
||||
Google Cloud Spanner database.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `spanner-list-tables` tool retrieves comprehensive schema information about
|
||||
tables in a Cloud Spanner database. It automatically adapts to the database
|
||||
dialect (GoogleSQL or PostgreSQL) and returns detailed metadata including
|
||||
columns, constraints, and indexes. It's compatible with:
|
||||
|
||||
- [spanner](../../sources/spanner.md)
|
||||
|
||||
This tool is read-only and executes pre-defined SQL queries against the
|
||||
`INFORMATION_SCHEMA` tables to gather metadata. The tool automatically detects
|
||||
the database dialect from the source configuration and uses the appropriate SQL
|
||||
syntax.
|
||||
|
||||
## Features
|
||||
|
||||
- **Automatic Dialect Detection**: Adapts queries based on whether the database
|
||||
uses GoogleSQL or PostgreSQL dialect
|
||||
- **Comprehensive Schema Information**: Returns columns, data types, constraints,
|
||||
indexes, and table relationships
|
||||
- **Flexible Filtering**: Can list all tables or filter by specific table names
|
||||
- **Output Format Options**: Choose between simple (table names only) or detailed
|
||||
(full schema information) output
|
||||
|
||||
## Example
|
||||
|
||||
### Basic Usage - List All Tables
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-spanner-db:
|
||||
kind: spanner
|
||||
project: ${SPANNER_PROJECT}
|
||||
instance: ${SPANNER_INSTANCE}
|
||||
database: ${SPANNER_DATABASE}
|
||||
dialect: googlesql # or postgresql
|
||||
|
||||
tools:
|
||||
list_all_tables:
|
||||
kind: spanner-list-tables
|
||||
source: my-spanner-db
|
||||
description: Lists all tables with their complete schema information
|
||||
```
|
||||
|
||||
### List Specific Tables
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_specific_tables:
|
||||
kind: spanner-list-tables
|
||||
source: my-spanner-db
|
||||
description: |
|
||||
Lists schema information for specific tables.
|
||||
Example usage:
|
||||
{
|
||||
"table_names": "users,orders,products",
|
||||
"output_format": "detailed"
|
||||
}
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
The tool accepts two optional parameters:
|
||||
|
||||
| **parameter** | **type** | **default** | **description** |
|
||||
|-----------------|:--------:|:-----------:|------------------------------------------------------------------------------------------------------|
|
||||
| table_names | string | "" | Comma-separated list of table names to filter. If empty, lists all tables in user-accessible schemas |
|
||||
| output_format | string | "detailed" | Output format: "simple" returns only table names, "detailed" returns full schema information |
|
||||
|
||||
## Output Format
|
||||
|
||||
### Simple Format
|
||||
|
||||
When `output_format` is set to "simple", the tool returns a minimal JSON structure:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"schema_name": "public",
|
||||
"object_name": "users",
|
||||
"object_details": "{\"name\":\"users\"}"
|
||||
},
|
||||
{
|
||||
"schema_name": "public",
|
||||
"object_name": "orders",
|
||||
"object_details": "{\"name\":\"orders\"}"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
### Detailed Format
|
||||
|
||||
When `output_format` is set to "detailed" (default), the tool returns comprehensive schema information:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"schema_name": "public",
|
||||
"object_name": "users",
|
||||
"object_details": "{
|
||||
\"schema_name\": \"public\",
|
||||
\"object_name\": \"users\",
|
||||
\"object_type\": \"BASE TABLE\",
|
||||
\"columns\": [
|
||||
{
|
||||
\"column_name\": \"id\",
|
||||
\"data_type\": \"INT64\",
|
||||
\"ordinal_position\": 1,
|
||||
\"is_not_nullable\": true,
|
||||
\"column_default\": null
|
||||
},
|
||||
{
|
||||
\"column_name\": \"email\",
|
||||
\"data_type\": \"STRING(255)\",
|
||||
\"ordinal_position\": 2,
|
||||
\"is_not_nullable\": true,
|
||||
\"column_default\": null
|
||||
}
|
||||
],
|
||||
\"constraints\": [
|
||||
{
|
||||
\"constraint_name\": \"PK_users\",
|
||||
\"constraint_type\": \"PRIMARY KEY\",
|
||||
\"constraint_definition\": \"PRIMARY KEY (id)\",
|
||||
\"constraint_columns\": [\"id\"],
|
||||
\"foreign_key_referenced_table\": null,
|
||||
\"foreign_key_referenced_columns\": []
|
||||
}
|
||||
],
|
||||
\"indexes\": [
|
||||
{
|
||||
\"index_name\": \"idx_users_email\",
|
||||
\"index_type\": \"INDEX\",
|
||||
\"is_unique\": true,
|
||||
\"is_null_filtered\": false,
|
||||
\"interleaved_in_table\": null,
|
||||
\"index_key_columns\": [
|
||||
{\"column_name\": \"email\", \"ordering\": \"ASC\"}
|
||||
],
|
||||
\"storing_columns\": []
|
||||
}
|
||||
]
|
||||
}"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
## Use Cases
|
||||
|
||||
1. **Database Documentation**: Generate comprehensive documentation of your
|
||||
database schema
|
||||
2. **Schema Validation**: Verify that expected tables and columns exist
|
||||
3. **Migration Planning**: Understand the current schema before making changes
|
||||
4. **Development Tools**: Build tools that need to understand database structure
|
||||
5. **Audit and Compliance**: Track schema changes and ensure compliance with
|
||||
data governance policies
|
||||
|
||||
## Example with Agent Integration
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
spanner-db:
|
||||
kind: spanner
|
||||
project: my-project
|
||||
instance: my-instance
|
||||
database: my-database
|
||||
dialect: googlesql
|
||||
|
||||
tools:
|
||||
schema_inspector:
|
||||
kind: spanner-list-tables
|
||||
source: spanner-db
|
||||
description: |
|
||||
Use this tool to inspect database schema information.
|
||||
You can:
|
||||
- List all tables by leaving table_names empty
|
||||
- Get specific table schemas by providing comma-separated table names
|
||||
- Choose between simple (names only) or detailed (full schema) output
|
||||
|
||||
Examples:
|
||||
1. List all tables with details: {"output_format": "detailed"}
|
||||
2. Get specific tables: {"table_names": "users,orders", "output_format": "detailed"}
|
||||
3. Just get table names: {"output_format": "simple"}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|---------------|:--------:|:------------:|--------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "spanner-list-tables" |
|
||||
| source | string | true | Name of the Spanner source to query |
|
||||
| description | string | false | Description of the tool that is passed to the LLM |
|
||||
| authRequired | string[] | false | List of auth services required to invoke this tool |
|
||||
|
||||
## Notes
|
||||
|
||||
- This tool is read-only and does not modify any data
|
||||
- The tool automatically handles both GoogleSQL and PostgreSQL dialects
|
||||
- Large databases with many tables may take longer to query
|
||||
@@ -1,39 +0,0 @@
|
||||
---
|
||||
title: "sqlite-execute-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "sqlite-execute-sql" tool executes a single SQL statement against a SQLite database.
|
||||
aliases:
|
||||
- /resources/tools/sqlite-execute-sql
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `sqlite-execute-sql` tool executes a single SQL statement against a SQLite database.
|
||||
It's compatible with any of the following sources:
|
||||
|
||||
- [sqlite](../../sources/sqlite.md)
|
||||
|
||||
This tool is designed for direct execution of SQL statements. It takes a single `sql` input parameter and runs the SQL statement against the configured SQLite `source`.
|
||||
|
||||
> **Note:** This tool is intended for developer assistant workflows with
|
||||
> human-in-the-loop and shouldn't be used for production agents.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
execute_sql_tool:
|
||||
kind: sqlite-execute-sql
|
||||
source: my-sqlite-db
|
||||
description: Use this tool to execute a SQL statement.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|----------------------------------------------------|
|
||||
| kind | string | true | Must be "sqlite-execute-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -1,101 +0,0 @@
|
||||
---
|
||||
title: "yugabytedb-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "yugabytedb-sql" tool executes a pre-defined SQL statement against a YugabyteDB
|
||||
database.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `yugabytedb-sql` tool executes a pre-defined SQL statement against a YugabyteDB
|
||||
database.
|
||||
|
||||
The specified SQL statement is executed as a prepared statement,
|
||||
and specified parameters will inserted according to their position: e.g. `1`
|
||||
will be the first parameter specified, `$@` will be the second parameter, and so
|
||||
on. If template parameters are included, they will be resolved before execution
|
||||
of the prepared statement.
|
||||
|
||||
## Example
|
||||
|
||||
> **Note:** This tool uses parameterized queries to prevent SQL injections.
|
||||
> Query parameters can be used as substitutes for arbitrary expressions.
|
||||
> Parameters cannot be used as substitutes for identifiers, column names, table
|
||||
> names, or other parts of the query.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search_flights_by_number:
|
||||
kind: yugabytedb-sql
|
||||
source: my-yb-instance
|
||||
statement: |
|
||||
SELECT * FROM flights
|
||||
WHERE airline = $1
|
||||
AND flight_number = $2
|
||||
LIMIT 10
|
||||
description: |
|
||||
Use this tool to get information for a specific flight.
|
||||
Takes an airline code and flight number and returns info on the flight.
|
||||
Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number.
|
||||
A airline code is a code for an airline service consisting of two-character
|
||||
airline designator and followed by flight number, which is 1 to 4 digit number.
|
||||
For example, if given CY 0123, the airline is "CY", and flight_number is "123".
|
||||
Another example for this is DL 1234, the airline is "DL", and flight_number is "1234".
|
||||
If the tool returns more than one option choose the date closes to today.
|
||||
Example:
|
||||
{{
|
||||
"airline": "CY",
|
||||
"flight_number": "888",
|
||||
}}
|
||||
Example:
|
||||
{{
|
||||
"airline": "DL",
|
||||
"flight_number": "1234",
|
||||
}}
|
||||
parameters:
|
||||
- name: airline
|
||||
type: string
|
||||
description: Airline unique 2 letter identifier
|
||||
- name: flight_number
|
||||
type: string
|
||||
description: 1 to 4 digit number
|
||||
```
|
||||
|
||||
### Example with Template Parameters
|
||||
|
||||
> **Note:** This tool allows direct modifications to the SQL statement, including identifiers, column names,
|
||||
> and table names. **This makes it more vulnerable to SQL injections**. Using basic parameters
|
||||
> only (see above) is recommended for performance and safety reasons. For more details, please
|
||||
> check [templateParameters](_index#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_table:
|
||||
kind: yugabytedb-sql
|
||||
source: my-yb-instance
|
||||
statement: |
|
||||
SELECT * FROM {{.tableName}}
|
||||
description: |
|
||||
Use this tool to list all information from a specific table.
|
||||
Example:
|
||||
{{
|
||||
"tableName": "flights",
|
||||
}}
|
||||
templateParameters:
|
||||
- name: tableName
|
||||
type: string
|
||||
description: Table to select from
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|---------------------|:---------------------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "yugabytedb-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute on. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
@@ -79,7 +79,7 @@
|
||||
"# @markdown Please fill in the value below with your GCP project ID and then run the cell.\n",
|
||||
"\n",
|
||||
"# Please fill in these values.\n",
|
||||
"project_id = \"\" # @param {type:\"string\"}\n",
|
||||
"project_id = \"project-id\" # @param {type:\"string\"}\n",
|
||||
"\n",
|
||||
"# Quick input validations.\n",
|
||||
"assert project_id, \"⚠️ Please provide a Google Cloud project ID\"\n",
|
||||
@@ -767,7 +767,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.14.0\" # x-release-please-version\n",
|
||||
"version = \"0.11.0\" # x-release-please-version\n",
|
||||
"! curl -L -o /content/toolbox https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -114,7 +114,7 @@ In this section, we will download and install the Toolbox binary.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
export VERSION="0.14.0"
|
||||
export VERSION="0.11.0"
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -220,7 +220,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.14.0\" # x-release-please-version\n",
|
||||
"version = \"0.13.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -179,7 +179,7 @@ to use BigQuery, and then run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -1,140 +0,0 @@
|
||||
---
|
||||
title: "Gemini-CLI and OAuth"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
How to connect to Looker from Gemini-CLI with end-user credentials
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
Gemini-CLI can be configured to get an OAuth token from Looker, then send this
|
||||
token to MCP Toolbox as part of the request. MCP Toolbox can then use this token
|
||||
to authentincate with Looker. This means that there is no need to get a Looker
|
||||
Client ID and Client Secret. This also means that MCP Toolbox can be set up as a
|
||||
shared resource.
|
||||
|
||||
This configuration requires Toolbox v0.14.0 or later.
|
||||
|
||||
## Step 1: Register the OAuth App in Looker
|
||||
|
||||
You first need to register the OAuth application. Refer to the documentation
|
||||
[here](https://cloud.google.com/looker/docs/api-cors#registering_an_oauth_client_application).
|
||||
You may need to ask an administrator to do this for you.
|
||||
|
||||
1. Go to the API Explorer application, locate "Register OAuth App", and press
|
||||
the "Run It" button.
|
||||
1. Set the `client_guid` to "gemini-cli".
|
||||
1. Set the `redirect_uri` to "http://localhost:7777/oauth/callback".
|
||||
1. The `display_name` and `description` can be "Gemini-CLI" or anything
|
||||
meaningful.
|
||||
1. Set `enabled` to "true".
|
||||
1. Check the box confirming that you understand this API will change data.
|
||||
1. Click the "Run" button.
|
||||
|
||||

|
||||
|
||||
## Step 2: Install and configure Toolbox
|
||||
|
||||
In this section, we will download Toolbox and run the Toolbox server.
|
||||
|
||||
1. Download the latest version of Toolbox as a binary:
|
||||
|
||||
{{< notice tip >}}
|
||||
Select the
|
||||
[correct binary](https://github.com/googleapis/genai-toolbox/releases)
|
||||
corresponding to your OS and CPU architecture.
|
||||
{{< /notice >}}
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Create a file `looker_env` with the settings for your
|
||||
Looker instance.
|
||||
|
||||
```bash
|
||||
export LOOKER_BASE_URL=https://looker.example.com
|
||||
export LOOKER_VERIFY_SSL=true
|
||||
```
|
||||
|
||||
In some instances you may need to append `:19999` to
|
||||
the LOOKER_BASE_URL.
|
||||
|
||||
1. Load the looker_env file into your environment.
|
||||
|
||||
```bash
|
||||
source looker_env
|
||||
```
|
||||
|
||||
1. Run the Toolbox server using the prebuilt Looker tools.
|
||||
|
||||
```bash
|
||||
./toolbox --prebuilt looker
|
||||
```
|
||||
|
||||
The toolbox server will begin listening on localhost port 5000. Leave it
|
||||
running and continue in another terminal.
|
||||
|
||||
Later, when it is time to shut everything down, you can quit the toolbox
|
||||
server with Ctrl-C in this terminal window.
|
||||
|
||||
## Step 3: Configure Gemini-CLI
|
||||
|
||||
1. Edit the file `~/.gemini/settings.json`. Add the following, substituting your
|
||||
Looker server host name for `looker.example.com`.
|
||||
|
||||
```json
|
||||
"mcpServers": {
|
||||
"looker": {
|
||||
"httpUrl": "http://localhost:5000/mcp",
|
||||
"oauth": {
|
||||
"enabled": true,
|
||||
"clientId": "gemini-cli",
|
||||
"authorizationUrl": "https://looker.example.com/auth",
|
||||
"tokenUrl": "https://looker.example.com/api/token",
|
||||
"scopes": ["cors_api"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The `authorizationUrl` should point to the URL you use to access Looker via the
|
||||
web UI. The `tokenUrl` should point to the URL you use to access Looker via
|
||||
the API. In some cases you will need to use the port number `:19999` after
|
||||
the host name but before the `/api/token` part.
|
||||
|
||||
1. Start Gemini-CLI.
|
||||
|
||||
1. Authenticate with the command `/mcp auth looker`. Gemini-CLI will open up a
|
||||
browser where you will confirm that you want to access Looker with your
|
||||
account.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
1. Use Gemini-CLI with your tools.
|
||||
|
||||
## Using Toolbox as a Shared Service
|
||||
|
||||
Toolbox can be run on another server as a shared service accessed by multiple
|
||||
users. We strongly recommend running toolbox behind a web proxy such as `nginx`
|
||||
which will provide SSL encryption. Google Cloud Run is another good way to run
|
||||
toolbox. You will connect to a service like `https://toolbox.example.com/mcp`.
|
||||
The proxy server will handle the SSL encryption and certificates. Then it will
|
||||
foward the requests to `http://localhost:5000/mcp` running in that environment.
|
||||
The details of the config are beyond the scope of this document, but will be
|
||||
familiar to your system administrators.
|
||||
|
||||
To use the shared service, just change the `localhost:5000` in the `httpUrl` in
|
||||
`~/.gemini/settings.json` to the host name and possibly the port of the shared
|
||||
service.
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 26 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 75 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 81 KiB |
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.12.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user