Compare commits
4 Commits
sig
...
invoke-too
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
decff260fb | ||
|
|
dd61496420 | ||
|
|
9cd494b18a | ||
|
|
005a1e624c |
@@ -20,9 +20,9 @@ DESCRIPTIONS=(
|
||||
)
|
||||
|
||||
# Write the table header
|
||||
ROW_FMT="| %-105s | %-120s | %-67s | %-108s |\n"
|
||||
output_string+=$(printf "$ROW_FMT" "**OS/Architecture**" "**Description**" "**SHA256 Hash**" "**Signature**")$'\n'
|
||||
output_string+=$(printf "$ROW_FMT" "$(printf -- '-%0.s' {1..105})" "$(printf -- '-%0.s' {1..120})" "$(printf -- '-%0.s' {1..67})" "$(printf -- '-%0.s' {1..67})")$'\n'
|
||||
ROW_FMT="| %-105s | %-120s | %-67s |\n"
|
||||
output_string+=$(printf "$ROW_FMT" "**OS/Architecture**" "**Description**" "**SHA256 Hash**")$'\n'
|
||||
output_string+=$(printf "$ROW_FMT" "$(printf -- '-%0.s' {1..105})" "$(printf -- '-%0.s' {1..120})" "$(printf -- '-%0.s' {1..67})")$'\n'
|
||||
|
||||
|
||||
# Loop through all files matching the pattern "toolbox.*.*"
|
||||
@@ -43,19 +43,16 @@ do
|
||||
URL="https://storage.googleapis.com/genai-toolbox/$VERSION/$OS/$ARCH/toolbox"
|
||||
fi
|
||||
|
||||
# Generate the signature URL & link
|
||||
SIG_URL="${URL}.sig"
|
||||
SIG_LINK="[.sig]($SIG_URL)"
|
||||
|
||||
curl "$URL" --fail --output toolbox || exit 1
|
||||
|
||||
# Calculate the SHA256 checksum of the file
|
||||
SHA256=$(shasum -a 256 toolbox | awk '{print $1}')
|
||||
|
||||
# Write the table row
|
||||
output_string+=$(printf "$ROW_FMT" "[$OS/$ARCH]($URL)" "$description_text" "$SHA256" "$SIG_LINK")$'\n'
|
||||
output_string+=$(printf "$ROW_FMT" "[$OS/$ARCH]($URL)" "$description_text" "$SHA256")$'\n'
|
||||
|
||||
rm toolbox
|
||||
done
|
||||
|
||||
printf "$output_string\n"
|
||||
|
||||
|
||||
@@ -62,27 +62,6 @@ steps:
|
||||
postgressql \
|
||||
postgresexecutesql
|
||||
|
||||
- id: "alloydb"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "ALLOYDB_PROJECT=$PROJECT_ID"
|
||||
- "ALLOYDB_CLUSTER=$_ALLOYDB_POSTGRES_CLUSTER"
|
||||
- "ALLOYDB_INSTANCE=$_ALLOYDB_POSTGRES_INSTANCE"
|
||||
- "ALLOYDB_REGION=$_REGION"
|
||||
secretEnv: ["ALLOYDB_POSTGRES_USER"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"AlloyDB" \
|
||||
alloydb \
|
||||
alloydb
|
||||
|
||||
- id: "alloydb-pg"
|
||||
name: golang:1
|
||||
@@ -142,7 +121,8 @@ steps:
|
||||
- "BIGTABLE_PROJECT=$PROJECT_ID"
|
||||
- "BIGTABLE_INSTANCE=$_BIGTABLE_INSTANCE"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["CLIENT_ID"]
|
||||
secretEnv:
|
||||
["CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
@@ -174,26 +154,6 @@ steps:
|
||||
bigquery \
|
||||
bigquery
|
||||
|
||||
- id: "dataplex"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "DATAPLEX_PROJECT=$PROJECT_ID"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Dataplex" \
|
||||
dataplex \
|
||||
dataplex
|
||||
|
||||
- id: "postgres"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
@@ -294,7 +254,8 @@ steps:
|
||||
- "CLOUD_SQL_MYSQL_DATABASE=$_DATABASE_NAME"
|
||||
- "CLOUD_SQL_MYSQL_REGION=$_REGION"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["CLOUD_SQL_MYSQL_USER", "CLOUD_SQL_MYSQL_PASS", "CLIENT_ID"]
|
||||
secretEnv:
|
||||
["CLOUD_SQL_MYSQL_USER", "CLOUD_SQL_MYSQL_PASS", "CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
@@ -350,23 +311,23 @@ steps:
|
||||
mssql \
|
||||
mssql
|
||||
|
||||
# - id: "dgraph"
|
||||
# name: golang:1
|
||||
# waitFor: ["compile-test-binary"]
|
||||
# entrypoint: /bin/bash
|
||||
# env:
|
||||
# - "GOPATH=/gopath"
|
||||
# - "DGRAPH_URL=$_DGRAPHURL"
|
||||
# volumes:
|
||||
# - name: "go"
|
||||
# path: "/gopath"
|
||||
# args:
|
||||
# - -c
|
||||
# - |
|
||||
# .ci/test_with_coverage.sh \
|
||||
# "Dgraph" \
|
||||
# dgraph \
|
||||
# dgraph
|
||||
- id: "dgraph"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "DGRAPH_URL=$_DGRAPHURL"
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Dgraph" \
|
||||
dgraph \
|
||||
dgraph
|
||||
|
||||
- id: "http"
|
||||
name: golang:1
|
||||
@@ -406,7 +367,7 @@ steps:
|
||||
sqlite
|
||||
|
||||
- id: "couchbase"
|
||||
name: golang:1
|
||||
name : golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
@@ -414,8 +375,7 @@ steps:
|
||||
- "COUCHBASE_SCOPE=$_COUCHBASE_SCOPE"
|
||||
- "COUCHBASE_BUCKET=$_COUCHBASE_BUCKET"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv:
|
||||
["COUCHBASE_CONNECTION", "COUCHBASE_USER", "COUCHBASE_PASS", "CLIENT_ID"]
|
||||
secretEnv: ["COUCHBASE_CONNECTION", "COUCHBASE_USER", "COUCHBASE_PASS", "CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
@@ -428,7 +388,7 @@ steps:
|
||||
couchbase
|
||||
|
||||
- id: "redis"
|
||||
name: golang:1
|
||||
name : golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
@@ -445,9 +405,9 @@ steps:
|
||||
"Redis" \
|
||||
redis \
|
||||
redis
|
||||
|
||||
|
||||
- id: "valkey"
|
||||
name: golang:1
|
||||
name : golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
@@ -465,219 +425,7 @@ steps:
|
||||
"Valkey" \
|
||||
valkey \
|
||||
valkey
|
||||
|
||||
- id: "oceanbase"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "OCEANBASE_PORT=$_OCEANBASE_PORT"
|
||||
- "OCEANBASE_DATABASE=$_OCEANBASE_DATABASE"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["CLIENT_ID", "OCEANBASE_HOST", "OCEANBASE_USER", "OCEANBASE_PASSWORD"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"OceanBase" \
|
||||
oceanbase \
|
||||
oceanbase
|
||||
|
||||
- id: "firestore"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "FIRESTORE_PROJECT=$PROJECT_ID"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Firestore" \
|
||||
firestore \
|
||||
firestore
|
||||
|
||||
- id: "looker"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "FIRESTORE_PROJECT=$PROJECT_ID"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
- "LOOKER_VERIFY_SSL=$_LOOKER_VERIFY_SSL"
|
||||
secretEnv:
|
||||
[
|
||||
"CLIENT_ID",
|
||||
"LOOKER_BASE_URL",
|
||||
"LOOKER_CLIENT_ID",
|
||||
"LOOKER_CLIENT_SECRET",
|
||||
]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Looker" \
|
||||
looker \
|
||||
looker
|
||||
|
||||
- id: "alloydbwaitforoperation"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "API_KEY=$(gcloud auth print-access-token)"
|
||||
secretEnv: ["CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Alloydb Wait for Operation" \
|
||||
utility \
|
||||
utility/alloydbwaitforoperation
|
||||
|
||||
- id: "cloud-sql"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
secretEnv: ["CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Cloud SQL Wait for Operation" \
|
||||
cloudsql \
|
||||
cloudsql
|
||||
|
||||
- id: "tidb"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "TIDB_DATABASE=$_DATABASE_NAME"
|
||||
- "TIDB_HOST=$_TIDB_HOST"
|
||||
- "TIDB_PORT=$_TIDB_PORT"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["CLIENT_ID", "TIDB_USER", "TIDB_PASS"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"TiDB" \
|
||||
tidb \
|
||||
tidbsql tidbexecutesql
|
||||
|
||||
- id: "firebird"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "FIREBIRD_DATABASE=$_FIREBIRD_DATABASE_NAME"
|
||||
- "FIREBIRD_HOST=$_FIREBIRD_HOST"
|
||||
- "FIREBIRD_PORT=$_FIREBIRD_PORT"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["CLIENT_ID", "FIREBIRD_USER", "FIREBIRD_PASS"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Firebird" \
|
||||
firebird \
|
||||
firebirdsql firebirdexecutesql
|
||||
|
||||
- id: "clickhouse"
|
||||
name : golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "CLICKHOUSE_DATABASE=$_CLICKHOUSE_DATABASE"
|
||||
- "CLICKHOUSE_PORT=$_CLICKHOUSE_PORT"
|
||||
- "CLICKHOUSE_PROTOCOL=$_CLICKHOUSE_PROTOCOL"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["CLICKHOUSE_HOST", "CLICKHOUSE_USER", "CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"ClickHouse" \
|
||||
clickhouse \
|
||||
clickhouse
|
||||
|
||||
- id: "trino"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "TRINO_HOST=$_TRINO_HOST"
|
||||
- "TRINO_PORT=$_TRINO_PORT"
|
||||
- "TRINO_CATALOG=$_TRINO_CATALOG"
|
||||
- "TRINO_SCHEMA=$_TRINO_SCHEMA"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["CLIENT_ID", "TRINO_USER"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Trino" \
|
||||
trino \
|
||||
trinosql trinoexecutesql
|
||||
|
||||
- id: "yugabytedb"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "YUGABYTEDB_DATABASE=$_YUGABYTEDB_DATABASE"
|
||||
- "YUGABYTEDB_PORT=$_YUGABYTEDB_PORT"
|
||||
- "YUGABYTEDB_LOADBALANCE=$_YUGABYTEDB_LOADBALANCE"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["YUGABYTEDB_USER", "YUGABYTEDB_PASS", "YUGABYTEDB_HOST", "CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
./yugabytedb.test -test.v
|
||||
|
||||
|
||||
availableSecrets:
|
||||
secretManager:
|
||||
@@ -731,38 +479,6 @@ availableSecrets:
|
||||
env: REDIS_PASS
|
||||
- versionName: projects/$PROJECT_ID/secrets/memorystore_valkey_address/versions/latest
|
||||
env: VALKEY_ADDRESS
|
||||
- versionName: projects/$PROJECT_ID/secrets/looker_base_url/versions/latest
|
||||
env: LOOKER_BASE_URL
|
||||
- versionName: projects/$PROJECT_ID/secrets/looker_client_id/versions/latest
|
||||
env: LOOKER_CLIENT_ID
|
||||
- versionName: projects/$PROJECT_ID/secrets/looker_client_secret/versions/latest
|
||||
env: LOOKER_CLIENT_SECRET
|
||||
- versionName: projects/$PROJECT_ID/secrets/tidb_user/versions/latest
|
||||
env: TIDB_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/tidb_pass/versions/latest
|
||||
env: TIDB_PASS
|
||||
- versionName: projects/$PROJECT_ID/secrets/clickhouse_host/versions/latest
|
||||
env: CLICKHOUSE_HOST
|
||||
- versionName: projects/$PROJECT_ID/secrets/clickhouse_user/versions/latest
|
||||
env: CLICKHOUSE_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/firebird_user/versions/latest
|
||||
env: FIREBIRD_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/firebird_pass/versions/latest
|
||||
env: FIREBIRD_PASS
|
||||
- versionName: projects/$PROJECT_ID/secrets/trino_user/versions/latest
|
||||
env: TRINO_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/oceanbase_host/versions/latest
|
||||
env: OCEANBASE_HOST
|
||||
- versionName: projects/$PROJECT_ID/secrets/oceanbase_user/versions/latest
|
||||
env: OCEANBASE_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/oceanbase_pass/versions/latest
|
||||
env: OCEANBASE_PASSWORD
|
||||
- versionName: projects/$PROJECT_ID/secrets/yugabytedb_host/versions/latest
|
||||
env: YUGABYTEDB_HOST
|
||||
- versionName: projects/$PROJECT_ID/secrets/yugabytedb_user/versions/latest
|
||||
env: YUGABYTEDB_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/yugabytedb_pass/versions/latest
|
||||
env: YUGABYTEDB_PASS
|
||||
|
||||
|
||||
options:
|
||||
@@ -775,7 +491,6 @@ options:
|
||||
|
||||
substitutions:
|
||||
_DATABASE_NAME: test_database
|
||||
_FIREBIRD_DATABASE_NAME: /firebird/test_database.fdb
|
||||
_REGION: "us-central1"
|
||||
_CLOUD_SQL_POSTGRES_INSTANCE: "cloud-sql-pg-testing"
|
||||
_ALLOYDB_POSTGRES_CLUSTER: "alloydb-pg-testing"
|
||||
@@ -795,21 +510,4 @@ substitutions:
|
||||
_MSSQL_PORT: "1433"
|
||||
_DGRAPHURL: "https://play.dgraph.io"
|
||||
_COUCHBASE_BUCKET: "couchbase-bucket"
|
||||
_COUCHBASE_SCOPE: "couchbase-scope"
|
||||
_LOOKER_VERIFY_SSL: "true"
|
||||
_TIDB_HOST: 127.0.0.1
|
||||
_TIDB_PORT: "4000"
|
||||
_CLICKHOUSE_DATABASE: "default"
|
||||
_CLICKHOUSE_PORT: "8123"
|
||||
_CLICKHOUSE_PROTOCOL: "http"
|
||||
_FIREBIRD_HOST: 127.0.0.1
|
||||
_FIREBIRD_PORT: "3050"
|
||||
_TRINO_HOST: 127.0.0.1
|
||||
_TRINO_PORT: "8080"
|
||||
_TRINO_CATALOG: "memory"
|
||||
_TRINO_SCHEMA: "default"
|
||||
_OCEANBASE_PORT: "2883"
|
||||
_OCEANBASE_DATABASE: "oceanbase"
|
||||
_YUGABYTEDB_DATABASE: "yugabyte"
|
||||
_YUGABYTEDB_PORT: "5433"
|
||||
_YUGABYTEDB_LOADBALANCE: "false"
|
||||
_COUCHBASE_SCOPE: "couchbase-scope"
|
||||
@@ -2,10 +2,8 @@
|
||||
|
||||
# Arguments:
|
||||
# $1: Display name for logs (e.g., "Cloud SQL Postgres")
|
||||
# $2: Integration test's package name (e.g., cloudsqlpg)
|
||||
# $3, $4, ...: Tool package names for grep (e.g., postgressql), if the
|
||||
# integration test specifically check a separate package inside a folder, please
|
||||
# specify the full path instead (e.g., postgressql/postgresexecutesql)
|
||||
# $2: Source package name (e.g., cloudsqlpg)
|
||||
# $3, $4, ...: Tool package names for grep (e.g., postgressql)
|
||||
|
||||
DISPLAY_NAME="$1"
|
||||
SOURCE_PACKAGE_NAME="$2"
|
||||
@@ -59,4 +57,4 @@ if awk -v coverage="$coverage_numeric" 'BEGIN {exit !(coverage < 50)}'; then
|
||||
exit 1
|
||||
else
|
||||
echo "Coverage for ${DISPLAY_NAME} is sufficient."
|
||||
fi
|
||||
fi
|
||||
@@ -17,7 +17,6 @@ steps:
|
||||
waitFor: ['-']
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
docker buildx create --name container-builder --driver docker-container --bootstrap --use
|
||||
|
||||
@@ -27,41 +26,6 @@ steps:
|
||||
fi
|
||||
docker buildx build --platform linux/amd64,linux/arm64 --build-arg BUILD_TYPE=container.release --build-arg COMMIT_SHA=$(git rev-parse HEAD) $TAGS --push .
|
||||
|
||||
- id: "generate-token"
|
||||
name: "gcr.io/cloud-builders/gcloud"
|
||||
waitFor: ['-']
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
gcloud auth print-identity-token --audiences=sigstore > /workspace/token
|
||||
|
||||
- id: "get-docker-digest"
|
||||
name: "gcr.io/cloud-builders/gcloud"
|
||||
waitFor:
|
||||
- "build-docker"
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
IMAGE_DIGEST=$(\
|
||||
gcloud container images describe ${_DOCKER_URI}:$VERSION \
|
||||
--format='get(image_summary.fully_qualified_digest)'\
|
||||
)
|
||||
echo $IMAGE_DIGEST > /workspace/image_digest
|
||||
|
||||
- id: "sign-docker"
|
||||
name: "gcr.io/projectsigstore/cosign"
|
||||
waitFor:
|
||||
- "get-docker-digest"
|
||||
- "generate-token"
|
||||
env:
|
||||
- 'SIGSTORE_NO_CACHE=true'
|
||||
script: |
|
||||
#!/busybox/sh
|
||||
set -e
|
||||
IMAGE_DIGEST=$(cat /workspace/image_digest)
|
||||
cosign sign --identity-token=$(cat /workspace/token) $IMAGE_DIGEST -y
|
||||
|
||||
- id: "install-dependencies"
|
||||
name: golang:1
|
||||
waitFor: ['-']
|
||||
@@ -88,31 +52,14 @@ steps:
|
||||
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.linux.amd64
|
||||
|
||||
- id: "sign-linux-amd64"
|
||||
name: "gcr.io/projectsigstore/cosign"
|
||||
waitFor:
|
||||
- "build-linux-amd64"
|
||||
- "generate-token"
|
||||
env:
|
||||
- 'SIGSTORE_NO_CACHE=true'
|
||||
script: |
|
||||
#!/busybox/sh
|
||||
set -e
|
||||
cosign sign-blob --identity-token=$(cat /workspace/token) --bundle=toolbox.linux.amd64.sig ./toolbox.linux.amd64 -y
|
||||
|
||||
- id: "store-linux-amd64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
waitFor:
|
||||
- "sign-linux-amd64"
|
||||
- "build-linux-amd64"
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
export VERSION=v$(cat ./cmd/version.txt)
|
||||
|
||||
gcloud storage cp toolbox.linux.amd64
|
||||
gs://$_BUCKET_NAME/test/$VERSION/linux/amd64/toolbox
|
||||
|
||||
gcloud storage cp toolbox.linux.amd64.sig gs://$_BUCKET_NAME/test/$VERSION/linux/amd64/toolbox.sig
|
||||
gcloud storage cp toolbox.linux.amd64 gs://$_BUCKET_NAME/$VERSION/linux/amd64/toolbox
|
||||
|
||||
- id: "build-darwin-arm64"
|
||||
name: golang:1
|
||||
@@ -129,30 +76,14 @@ steps:
|
||||
CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.arm64
|
||||
|
||||
- id: "sign-darwin-arm64"
|
||||
name: "gcr.io/projectsigstore/cosign"
|
||||
waitFor:
|
||||
- "build-darwin-arm64"
|
||||
- "generate-token"
|
||||
env:
|
||||
- 'SIGSTORE_NO_CACHE=true'
|
||||
script: |
|
||||
#!/busybox/sh
|
||||
set -e
|
||||
cosign sign-blob --identity-token=$(cat /workspace/token) --bundle=toolbox.darwin.arm64.sig ./toolbox.darwin.arm64 -y
|
||||
|
||||
- id: "store-darwin-arm64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
waitFor:
|
||||
- "sign-darwin-arm64"
|
||||
- "build-darwin-arm64"
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
export VERSION=v$(cat ./cmd/version.txt)
|
||||
|
||||
gcloud storage cp toolbox.darwin.arm64 gs://$_BUCKET_NAME/test/$VERSION/darwin/arm64/toolbox
|
||||
|
||||
gcloud storage cp toolbox.darwin.arm64.sig gs://$_BUCKET_NAME/test/$VERSION/darwin/arm64/toolbox.sig
|
||||
gcloud storage cp toolbox.darwin.arm64 gs://$_BUCKET_NAME/$VERSION/darwin/arm64/toolbox
|
||||
|
||||
- id: "build-darwin-amd64"
|
||||
name: golang:1
|
||||
@@ -169,30 +100,14 @@ steps:
|
||||
CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.amd64
|
||||
|
||||
- id: "sign-darwin-amd64"
|
||||
name: "gcr.io/projectsigstore/cosign"
|
||||
waitFor:
|
||||
- "build-darwin-amd64"
|
||||
- "generate-token"
|
||||
env:
|
||||
- 'SIGSTORE_NO_CACHE=true'
|
||||
script: |
|
||||
#!/busybox/sh
|
||||
set -e
|
||||
cosign sign-blob --identity-token=$(cat /workspace/token) --bundle=toolbox.darwin.amd64.sig ./toolbox.darwin.amd64 -y
|
||||
|
||||
- id: "store-darwin-amd64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
waitFor:
|
||||
- "sign-darwin-amd64"
|
||||
- "build-darwin-amd64"
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
export VERSION=v$(cat ./cmd/version.txt)
|
||||
|
||||
gcloud storage cp toolbox.darwin.amd64 gs://$_BUCKET_NAME/test/$VERSION/darwin/amd64/toolbox
|
||||
|
||||
gcloud storage cp toolbox.darwin.amd64.sig gs://$_BUCKET_NAME/test/$VERSION/darwin/amd64/toolbox.sig
|
||||
gcloud storage cp toolbox.darwin.amd64 gs://$_BUCKET_NAME/$VERSION/darwin/amd64/toolbox
|
||||
|
||||
- id: "build-windows-amd64"
|
||||
name: golang:1
|
||||
@@ -209,30 +124,14 @@ steps:
|
||||
CGO_ENABLED=0 GOOS=windows GOARCH=amd64 \
|
||||
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.windows.amd64
|
||||
|
||||
- id: "sign-windows-amd64"
|
||||
name: "gcr.io/projectsigstore/cosign"
|
||||
waitFor:
|
||||
- "build-windows-amd64"
|
||||
- "generate-token"
|
||||
env:
|
||||
- 'SIGSTORE_NO_CACHE=true'
|
||||
script: |
|
||||
#!/busybox/sh
|
||||
set -e
|
||||
cosign sign-blob --identity-token=$(cat /workspace/token) --bundle=toolbox.windows.amd64.sig ./toolbox.windows.amd64 -y
|
||||
|
||||
- id: "store-windows-amd64"
|
||||
name: "gcr.io/cloud-builders/gcloud:latest"
|
||||
waitFor:
|
||||
- "sign-windows-amd64"
|
||||
- "build-windows-amd64"
|
||||
script: |
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
export VERSION=v$(cat ./cmd/version.txt)
|
||||
|
||||
gcloud storage cp toolbox.windows.amd64 gs://$_BUCKET_NAME/test/$VERSION/windows/amd64/toolbox.exe
|
||||
|
||||
gcloud storage cp toolbox.windows.amd64.sig gs://$_BUCKET_NAME/test/$VERSION/windows/amd64/toolbox.exe.sig
|
||||
gcloud storage cp toolbox.windows.amd64 gs://$_BUCKET_NAME/$VERSION/windows/amd64/toolbox.exe
|
||||
|
||||
options:
|
||||
automapSubstitutions: true
|
||||
@@ -245,5 +144,5 @@ substitutions:
|
||||
_AR_HOSTNAME: ${_REGION}-docker.pkg.dev
|
||||
_AR_REPO_NAME: toolbox
|
||||
_BUCKET_NAME: genai-toolbox
|
||||
_DOCKER_URI: ${_AR_HOSTNAME}/${PROJECT_ID}/${_AR_REPO_NAME}/test
|
||||
_DOCKER_URI: ${_AR_HOSTNAME}/${PROJECT_ID}/${_AR_REPO_NAME}/toolbox
|
||||
_PUSH_LATEST: "true"
|
||||
|
||||
24
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,24 +0,0 @@
|
||||
## Description
|
||||
|
||||
---
|
||||
> Should include a concise description of the changes (bug or feature), it's
|
||||
> impact, along with a summary of the solution
|
||||
|
||||
## PR Checklist
|
||||
|
||||
---
|
||||
> Thank you for opening a Pull Request! Before submitting your PR, there are a
|
||||
> few things you can do to make sure it goes smoothly:
|
||||
|
||||
- [ ] Make sure you reviewed
|
||||
[CONTRIBUTING.md](https://github.com/googleapis/genai-toolbox/blob/main/CONTRIBUTING.md)
|
||||
- [ ] Make sure to open an issue as a
|
||||
[bug/issue](https://github.com/googleapis/genai-toolbox/issues/new/choose)
|
||||
before writing your code! That way we can discuss the change, evaluate
|
||||
designs, and agree on the general idea
|
||||
- [ ] Ensure the tests and linter pass
|
||||
- [ ] Code coverage does not decrease (if any source code was changed)
|
||||
- [ ] Appropriate docs were updated (if necessary)
|
||||
- [ ] Make sure to add `!` if this involve a breaking change
|
||||
|
||||
🛠️ Fixes #<issue_number_goes_here>
|
||||
25
.github/blunderbuss.yml
vendored
@@ -1,24 +1,7 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
assign_issues:
|
||||
- kurtisvg
|
||||
- Yuan325
|
||||
- duwenxin99
|
||||
- averikitsch
|
||||
- anubhav756
|
||||
- dishaprakash
|
||||
- twishabansal
|
||||
assign_issues_by:
|
||||
- labels:
|
||||
- 'product: bigquery'
|
||||
@@ -26,11 +9,7 @@ assign_issues_by:
|
||||
- Genesis929
|
||||
- shobsi
|
||||
- jiaxunwu
|
||||
- labels:
|
||||
- 'product: looker'
|
||||
to:
|
||||
- drstrangelooker
|
||||
assign_prs:
|
||||
- kurtisvg
|
||||
- Yuan325
|
||||
- duwenxin99
|
||||
- averikitsch
|
||||
|
||||
@@ -20,5 +20,3 @@ sourceFileExtensions:
|
||||
- 'go'
|
||||
- 'yaml'
|
||||
- 'yml'
|
||||
ignoreFiles:
|
||||
- 'docs/en/getting-started/quickstart/**'
|
||||
32
.github/labels.yaml
vendored
@@ -50,7 +50,7 @@
|
||||
color: ffffc7
|
||||
description: Desirable enhancement or fix. May not be included in next release.
|
||||
|
||||
- name: 'do not merge'
|
||||
- name: do not merge
|
||||
color: d93f0b
|
||||
description: Indicates a pull request not ready for merge, due to either quality
|
||||
or timing.
|
||||
@@ -65,38 +65,28 @@
|
||||
color: ededed
|
||||
description: Release please has completed a release for this.
|
||||
|
||||
- name: 'blunderbuss: assign'
|
||||
color: 3DED97
|
||||
description: Have blunderbuss assign this to someone new.
|
||||
|
||||
- name: 'tests: run'
|
||||
color: 3DED97
|
||||
description: Label to trigger Github Action tests.
|
||||
|
||||
|
||||
- name: 'docs: deploy-preview'
|
||||
color: BFDADC
|
||||
description: Label to trigger Github Action docs preview.
|
||||
|
||||
- name: 'status: help wanted'
|
||||
- name: 'status: contribution welcome'
|
||||
color: 8befd7
|
||||
description: 'Status: Unplanned work open to contributions from the community.'
|
||||
- name: 'status: feedback wanted'
|
||||
color: 8befd7
|
||||
description: 'Status: waiting for feedback from community or issue author.'
|
||||
description: Status - Contributions welcome.
|
||||
|
||||
- name: 'status: waiting for response'
|
||||
- name: 'status: awaiting response'
|
||||
color: 8befd7
|
||||
description: 'Status: reviewer is awaiting feedback or responses from the author before proceeding.'
|
||||
description: Status - Awaiting response from author.
|
||||
|
||||
- name: 'release candidate'
|
||||
color: 32CD32
|
||||
description: 'Use label to signal PR should be included in the next release.'
|
||||
- name: 'status: awaiting codeowners'
|
||||
color: 8befd7
|
||||
description: Status - Awaiting response from code owners.
|
||||
|
||||
# Product Labels
|
||||
- name: 'product: bigquery'
|
||||
color: 5065c7
|
||||
description: 'Product: Assigned to the BigQuery team.'
|
||||
# Product Labels
|
||||
- name: 'product: looker'
|
||||
color: 5065c7
|
||||
description: 'Product: Assigned to the Looker team.'
|
||||
description: Product - Assigned to the BigQuery team.
|
||||
|
||||
|
||||
21
.github/release-please.yml
vendored
@@ -18,23 +18,18 @@ releaseType: simple
|
||||
versionFile: "cmd/version.txt"
|
||||
extraFiles: [
|
||||
"README.md",
|
||||
"docs/en/getting-started/colab_quickstart.ipynb",
|
||||
"docs/en/getting-started/introduction/_index.md",
|
||||
"docs/en/getting-started/local_quickstart.md",
|
||||
"docs/en/getting-started/mcp_quickstart/_index.md",
|
||||
"docs/en/getting-started/quickstart/shared/configure_toolbox.md",
|
||||
"docs/en/samples/alloydb/_index.md",
|
||||
"docs/en/samples/alloydb/mcp_quickstart.md",
|
||||
"docs/en/samples/alloydb/ai-nl/alloydb_ai_nl.ipynb",
|
||||
"docs/en/samples/bigquery/local_quickstart.md",
|
||||
"docs/en/samples/bigquery/mcp_quickstart/_index.md",
|
||||
"docs/en/getting-started/colab_quickstart.ipynb",
|
||||
"docs/en/samples/bigquery/colab_quickstart_bigquery.ipynb",
|
||||
"docs/en/samples/looker/looker_gemini.md",
|
||||
"docs/en/samples/looker/looker_gemini_oauth/_index.md",
|
||||
"docs/en/samples/looker/looker_mcp_inspector/_index.md",
|
||||
"docs/en/how-to/connect-ide/looker_mcp.md",
|
||||
"docs/en/how-to/connect-ide/mysql_mcp.md",
|
||||
"docs/en/how-to/connect-ide/mssql_mcp.md",
|
||||
"docs/en/how-to/connect-ide/bigquery_mcp.md",
|
||||
"docs/en/how-to/connect-ide/spanner_mcp.md",
|
||||
"docs/en/how-to/connect-ide/alloydb_pg_mcp.md",
|
||||
"docs/en/how-to/connect-ide/cloud_sql_mysql_mcp.md",
|
||||
"docs/en/how-to/connect-ide/cloud_sql_pg_mcp.md",
|
||||
"docs/en/how-to/connect-ide/postgres_mcp.md",
|
||||
"docs/en/how-to/connect-ide/neo4j_mcp.md",
|
||||
"docs/en/how-to/connect-ide/sqlite_mcp.md",
|
||||
"docs/en/how-to/connect-ide/cloud_sql_mssql_mcp.md",
|
||||
]
|
||||
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
runs-on: 'ubuntu-latest'
|
||||
|
||||
steps:
|
||||
- uses: 'actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b' # v7
|
||||
- uses: 'actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea' # v7
|
||||
with:
|
||||
script: |-
|
||||
// parse test names
|
||||
|
||||
4
.github/workflows/docs_deploy.yaml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
|
||||
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
node-version: "22"
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4
|
||||
with:
|
||||
path: ~/.npm
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
|
||||
4
.github/workflows/docs_preview_clean.yaml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
group: "preview-${{ github.event.number }}"
|
||||
cancel-in-progress: true
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
ref: gh-pages
|
||||
|
||||
@@ -48,7 +48,7 @@ jobs:
|
||||
git push
|
||||
|
||||
- name: Comment
|
||||
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
|
||||
7
.github/workflows/docs_preview_deploy.yaml
vendored
@@ -49,9 +49,8 @@ jobs:
|
||||
group: "preview-${{ github.event.number }}"
|
||||
cancel-in-progress: true
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
# Checkout the PR's HEAD commit (supports forks).
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
|
||||
|
||||
@@ -67,7 +66,7 @@ jobs:
|
||||
node-version: "22"
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4
|
||||
with:
|
||||
path: ~/.npm
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
@@ -90,7 +89,7 @@ jobs:
|
||||
commit_message: "stage: PR-${{ github.event.number }}: ${{ github.event.head_commit.message }}"
|
||||
|
||||
- name: Comment
|
||||
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
|
||||
4
.github/workflows/lint.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
steps:
|
||||
- name: Remove PR Label
|
||||
if: "${{ github.event.action == 'labeled' && github.event.label.name == 'tests: run' }}"
|
||||
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7.1.0
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
with:
|
||||
go-version: "1.22"
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||
|
||||
2
.github/workflows/schedule_reporter.yml
vendored
@@ -26,4 +26,4 @@ jobs:
|
||||
contents: 'read'
|
||||
uses: ./.github/workflows/cloud_build_failure_reporter.yml
|
||||
with:
|
||||
trigger_names: "toolbox-test-nightly,toolbox-test-on-merge,toolbox-continuous-release"
|
||||
trigger_names: "toolbox-test-nightly,toolbox-test-on-merge"
|
||||
|
||||
2
.github/workflows/sync-labels.yaml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
issues: 'write'
|
||||
pull-requests: 'write'
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: micnncim/action-label-syncer@3abd5ab72fda571e69fffd97bd4e0033dd5f495c # v1.3.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
14
.github/workflows/tests.yaml
vendored
@@ -16,7 +16,7 @@ name: tests
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
- 'main'
|
||||
pull_request:
|
||||
pull_request_target:
|
||||
types: [labeled]
|
||||
@@ -35,13 +35,13 @@ jobs:
|
||||
os: [macos-latest, windows-latest, ubuntu-latest]
|
||||
fail-fast: false
|
||||
permissions:
|
||||
contents: "read"
|
||||
issues: "write"
|
||||
pull-requests: "write"
|
||||
contents: 'read'
|
||||
issues: 'write'
|
||||
pull-requests: 'write'
|
||||
steps:
|
||||
- name: Remove PR label
|
||||
if: "${{ github.event.action == 'labeled' && github.event.label.name == 'tests: run' }}"
|
||||
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7.1.0
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
go-version: "1.22"
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||
@@ -76,8 +76,6 @@ jobs:
|
||||
|
||||
- name: Run tests with coverage
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
env:
|
||||
GOTOOLCHAIN: go1.25.0+auto
|
||||
run: |
|
||||
source_dir="./internal/sources/*"
|
||||
tool_dir="./internal/tools/*"
|
||||
|
||||
7
.gitignore
vendored
@@ -4,9 +4,6 @@
|
||||
# vscode
|
||||
.vscode/
|
||||
|
||||
# idea
|
||||
.idea/
|
||||
|
||||
# npm
|
||||
node_modules
|
||||
|
||||
@@ -17,7 +14,3 @@ node_modules
|
||||
|
||||
# coverage
|
||||
.coverage
|
||||
|
||||
# executable
|
||||
genai-toolbox
|
||||
toolbox
|
||||
|
||||
@@ -8,8 +8,6 @@ defaultContentLanguageInSubdir = false
|
||||
enableGitInfo = true
|
||||
enableRobotsTXT = true
|
||||
|
||||
ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quickstart/go"]
|
||||
|
||||
[languages]
|
||||
[languages.en]
|
||||
languageName ="English"
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
{{ $file := .Get 0 }}
|
||||
{{ (printf "%s%s" .Page.File.Dir $file) | readFile | replaceRE "^---[\\s\\S]+?---" "" | safeHTML }}
|
||||
@@ -1,47 +0,0 @@
|
||||
{{ $notebookFile := .Get 0 }}
|
||||
{{ with .Page.Resources.Get $notebookFile }}
|
||||
|
||||
{{ $content := .Content | transform.Unmarshal }}
|
||||
{{ range $content.cells }}
|
||||
|
||||
{{ if eq .cell_type "markdown" }}
|
||||
<div class="notebook-markdown">
|
||||
{{ $markdown := "" }}
|
||||
{{ range .source }}{{ $markdown = print $markdown . }}{{ end }}
|
||||
{{ $markdown | markdownify }}
|
||||
</div>
|
||||
{{ end }}
|
||||
|
||||
{{ if eq .cell_type "code" }}
|
||||
<div class="notebook-code">
|
||||
{{ $code := "" }}
|
||||
{{ range .source }}{{ $code = print $code . }}{{ end }}
|
||||
{{ highlight $code "python" "" }}
|
||||
|
||||
{{ range .outputs }}
|
||||
<div class="notebook-output">
|
||||
{{ with .text }}
|
||||
<pre class="notebook-stream"><code>{{- range . }}{{ . }}{{ end -}}</code></pre>
|
||||
{{ end }}
|
||||
|
||||
{{ with .data }}
|
||||
{{ with index . "image/png" }}
|
||||
<img src="data:image/png;base64,{{ . }}" alt="Notebook output image">
|
||||
{{ end }}
|
||||
{{ with index . "image/jpeg" }}
|
||||
<img src="data:image/jpeg;base64,{{ . }}" alt="Notebook output image">
|
||||
{{ end }}
|
||||
{{ with index . "text/html" }}
|
||||
{{ $html := "" }}
|
||||
{{ range . }}{{ $html = print $html . }}{{ end }}
|
||||
{{ $html | safeHTML }}
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
</div>
|
||||
{{ end }}
|
||||
</div>
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
{{ else }}
|
||||
<p style="color: red;">Error: Notebook '{{ $notebookFile }}' not found in page resources.</p>
|
||||
{{ end }}
|
||||
@@ -1,49 +0,0 @@
|
||||
{{/*
|
||||
snippet.html
|
||||
Usage:
|
||||
{{< regionInclude "filename.md" "region_name" >}}
|
||||
{{< regionInclude "filename.python" "region_name" "python" >}}
|
||||
*/}}
|
||||
|
||||
{{ $file := .Get 0 }}
|
||||
{{ $region := .Get 1 }}
|
||||
{{ $lang := .Get 2 | default "text" }}
|
||||
{{ $path := printf "%s%s" .Page.File.Dir $file }}
|
||||
|
||||
{{ if or (not $file) (eq $file "") }}
|
||||
{{ errorf "The file parameter (first argument) is required and must be non-empty in %s" .Page.File.Path }}
|
||||
{{ end }}
|
||||
{{ if or (not $region) (eq $region "") }}
|
||||
{{ errorf "The region parameter (second argument) is required and must be non-empty in %s" .Page.File.Path }}
|
||||
{{ end }}
|
||||
{{ if not (fileExists $path) }}
|
||||
{{ errorf "File %q not found (referenced in %s)" $path .Page.File.Path }}
|
||||
{{ end }}
|
||||
|
||||
{{ $content := readFile $path }}
|
||||
{{ $start_tag := printf "[START %s]" $region }}
|
||||
{{ $end_tag := printf "[END %s]" $region }}
|
||||
|
||||
{{ $snippet := "" }}
|
||||
{{ $in_snippet := false }}
|
||||
{{ range split $content "\n" }}
|
||||
{{ if $in_snippet }}
|
||||
{{ if in . $end_tag }}
|
||||
{{ $in_snippet = false }}
|
||||
{{ else }}
|
||||
{{ $snippet = printf "%s%s\n" $snippet . }}
|
||||
{{ end }}
|
||||
{{ else if in . $start_tag }}
|
||||
{{ $in_snippet = true }}
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
|
||||
{{ if eq (trim $snippet "") "" }}
|
||||
{{ errorf "Region %q not found or empty in file %s (referenced in %s)" $region $file .Page.File.Path }}
|
||||
{{ end }}
|
||||
|
||||
{{ if eq $lang "text" }}
|
||||
{{ $snippet | markdownify }}
|
||||
{{ else }}
|
||||
{{ highlight (trim $snippet "\n") $lang "" }}
|
||||
{{ end }}
|
||||
159
CHANGELOG.md
@@ -1,164 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## [0.14.0](https://github.com/googleapis/genai-toolbox/compare/v0.13.0...v0.14.0) (2025-09-05)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **bigquery:** Move `useClientOAuth` config from tool to source ([#1279](https://github.com/googleapis/genai-toolbox/issues/1279)) ([8d20a48](https://github.com/googleapis/genai-toolbox/commit/8d20a48f13bcda853d41bdf80a162de12b076d1b))
|
||||
* **tools/bigquerysql:** remove `useClientOAuth` from tools config ([#1312](https://github.com/googleapis/genai-toolbox/issues/1312))
|
||||
|
||||
### Features
|
||||
|
||||
* **clickhouse:** Add ClickHouse Source and Tools ([#1088](https://github.com/googleapis/genai-toolbox/issues/1088)) ([75a04a5](https://github.com/googleapis/genai-toolbox/commit/75a04a55dd2259bed72fe95119a7a51a906c0b21))
|
||||
* **prebuilt/alloydb-postgres:** Support ipType and IAM users ([#1324](https://github.com/googleapis/genai-toolbox/issues/1324)) ([0b2121e](https://github.com/googleapis/genai-toolbox/commit/0b2121ea72eb81348dcd9c740a62ccd32e71fe37))
|
||||
* **server/mcp:** Support toolbox auth in mcp ([#1140](https://github.com/googleapis/genai-toolbox/issues/1140)) ([ca353e0](https://github.com/googleapis/genai-toolbox/commit/ca353e0b66fedc00e9110df57db18632aef49018))
|
||||
* **source/mysql:** Support `queryParams` in MySQL source ([#1299](https://github.com/googleapis/genai-toolbox/issues/1299)) ([3ae2526](https://github.com/googleapis/genai-toolbox/commit/3ae2526e0fe36b57b05a9b54f1d99f3fc68d9657))
|
||||
* **tools/bigquery:** Support end-user credential passthrough on multiple BQ tools ([#1314](https://github.com/googleapis/genai-toolbox/issues/1314)) ([88f4b30](https://github.com/googleapis/genai-toolbox/commit/88f4b3028df3b6a400936cdf8a035bf55021924c))
|
||||
* **tools/looker:** Add description for looker-get-models tool ([#1266](https://github.com/googleapis/genai-toolbox/issues/1266)) ([89af3a4](https://github.com/googleapis/genai-toolbox/commit/89af3a4ca332f029615b2a739d1f6cd50519638d))
|
||||
* **tools/looker:** Authenticate via end user credentials ([#1257](https://github.com/googleapis/genai-toolbox/issues/1257)) ([8755e3d](https://github.com/googleapis/genai-toolbox/commit/8755e3db3476abb35629b3cca9c78db7366757a4))
|
||||
* **tools/looker:** Report field suggestions to agent ([#1267](https://github.com/googleapis/genai-toolbox/issues/1267)) ([2cad82e](https://github.com/googleapis/genai-toolbox/commit/2cad82e5107566dd6c9b75e34e9976af63af0bb5))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Do not print usage on runtime error ([#1315](https://github.com/googleapis/genai-toolbox/issues/1315)) ([afba7a5](https://github.com/googleapis/genai-toolbox/commit/afba7a57cdd4fe7c1b0741dbf8f8c78b14a68089))
|
||||
* Update env var to allow empty string ([#1260](https://github.com/googleapis/genai-toolbox/issues/1260)) ([03aa9fa](https://github.com/googleapis/genai-toolbox/commit/03aa9fabacda06f860c9f178485126bddb7d5782))
|
||||
* **tools/firestore:** Add document/collection path validation ([#1229](https://github.com/googleapis/genai-toolbox/issues/1229)) ([14c2249](https://github.com/googleapis/genai-toolbox/commit/14c224939a2f9bb349fa00a7d5227877198530c2))
|
||||
* **tools/looker-get-dashboards:** Fix Looker client OAuth check ([#1338](https://github.com/googleapis/genai-toolbox/issues/1338)) ([36225aa](https://github.com/googleapis/genai-toolbox/commit/36225aa6db7f8426ad87930866530fde4e9bf0cd))
|
||||
* **tools/oceanbase:** Fix encoded text with mysql driver ([#1283](https://github.com/googleapis/genai-toolbox/issues/1283)) ([d16f89f](https://github.com/googleapis/genai-toolbox/commit/d16f89fbb6e49c03998f114ef7dc2b584b5e4967)), closes [#1161](https://github.com/googleapis/genai-toolbox/issues/1161)
|
||||
|
||||
## [0.13.0](https://github.com/googleapis/genai-toolbox/compare/v0.12.0...v0.13.0) (2025-08-27)
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **prebuilt/alloydb:** Add bearer token support for alloydb-wait-for-operation ([#1183](https://github.com/googleapis/genai-toolbox/issues/1183))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add capability to set default for environment variable in config ([#1248](https://github.com/googleapis/genai-toolbox/issues/1248)) ([5bcd52e](https://github.com/googleapis/genai-toolbox/commit/5bcd52e7dcd0773ded723585f4abe29d044e1540))
|
||||
* **firebird:** Add Firebird SQL 2.5+ source and tool ([#1011](https://github.com/googleapis/genai-toolbox/issues/1011)) ([4f6b806](https://github.com/googleapis/genai-toolbox/commit/4f6b806de947efc4e12bdb50dff7781aedb7b966))
|
||||
* **oceanbase:** Add Oceanbase source and tool ([#895](https://github.com/googleapis/genai-toolbox/issues/895)) ([6fc4982](https://github.com/googleapis/genai-toolbox/commit/6fc49826d43f46c84028e752ebebddf3d94b3d13))
|
||||
* **server/mcp:** Support `ping` mechanism ([#1178](https://github.com/googleapis/genai-toolbox/issues/1178)) ([5dcc66c](https://github.com/googleapis/genai-toolbox/commit/5dcc66c84fa72c75ec50a9ac5198018212ec2979))
|
||||
* **server:** Fail-fast on environment variable substitution ([#1177](https://github.com/googleapis/genai-toolbox/issues/1177)) ([212aaba](https://github.com/googleapis/genai-toolbox/commit/212aaba74c8b431de8a5f7b9822a0af4afcaaa0e))
|
||||
* **server:** Implement Tool call auth error propagation ([#1235](https://github.com/googleapis/genai-toolbox/issues/1235)) ([b94a021](https://github.com/googleapis/genai-toolbox/commit/b94a021ca11c6637cf8038449483b5e75f2012b3))
|
||||
* **sources/bigquery:** Add support for user-credential passthrough ([#1067](https://github.com/googleapis/genai-toolbox/issues/1067)) ([650e2e2](https://github.com/googleapis/genai-toolbox/commit/650e2e26f51bff75ce66343f64944d0a89a58b69))
|
||||
* **tool/looker:** Add support for `description` field in looker tool ([#1199](https://github.com/googleapis/genai-toolbox/issues/1199)) ([97f0dd2](https://github.com/googleapis/genai-toolbox/commit/97f0dd2acf26caf28ecad65abea8779c196a27f1))
|
||||
* **tools/bigquery-ask-data-insights:** Add bigquery `ask-data-insights` tool ([#932](https://github.com/googleapis/genai-toolbox/issues/932)) ([7651357](https://github.com/googleapis/genai-toolbox/commit/7651357d424a2b6656d8b6818cebc5c8a86ed053))
|
||||
* **tools/bigquery-forecast:** Add bigqueryforecast tool ([#1148](https://github.com/googleapis/genai-toolbox/issues/1148)) ([2ad0ccf](https://github.com/googleapis/genai-toolbox/commit/2ad0ccf83df542340087742468d6762f81eedee6))
|
||||
* **tools/firestore-add-documents:** Add firestore-add-documents tool ([#1107](https://github.com/googleapis/genai-toolbox/issues/1107)) ([ee4a70a](https://github.com/googleapis/genai-toolbox/commit/ee4a70a0e82b346b07b5b4c60dfa060da2273f50))
|
||||
* **tools/firestore-update-document:** Add firestore-update-document tool ([#1191](https://github.com/googleapis/genai-toolbox/issues/1191)) ([0010123](https://github.com/googleapis/genai-toolbox/commit/00101232a39c70288aac5715649c184858d351e3))
|
||||
* **tools/looker:** Control over whether hidden objects are surfaced ([#1222](https://github.com/googleapis/genai-toolbox/issues/1222)) ([bc91559](https://github.com/googleapis/genai-toolbox/commit/bc91559cc4e5b20385b84cc562b624fabf7e47a8))
|
||||
* **trino:** Add Trino source and tools ([#948](https://github.com/googleapis/genai-toolbox/issues/948)) ([7dd123b](https://github.com/googleapis/genai-toolbox/commit/7dd123b3d76b8eb2b74b5d960959c1f90684b37e))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **tools/looker:** Lookergetdashboards uses proper Authorized helper func ([#1255](https://github.com/googleapis/genai-toolbox/issues/1255)) ([00866bc](https://github.com/googleapis/genai-toolbox/commit/00866bc7fc33115c547213e60316ae889735fdbb))
|
||||
* **tools/mongodb-find-one:** ProjectPayload unmarshaling ([#1167](https://github.com/googleapis/genai-toolbox/issues/1167)) ([8ea6a98](https://github.com/googleapis/genai-toolbox/commit/8ea6a98bd9096ba97722e5f807366887e864004f))
|
||||
* **tools/mysql:** Fix encoded text for mysql ([#1161](https://github.com/googleapis/genai-toolbox/issues/1161)) ([a37cfa8](https://github.com/googleapis/genai-toolbox/commit/a37cfa841d151b9995d4fab73cfc5e4d30d2cc57)), closes [#840](https://github.com/googleapis/genai-toolbox/issues/840)
|
||||
|
||||
## [0.12.0](https://github.com/googleapis/genai-toolbox/compare/v0.11.0...v0.12.0) (2025-08-14)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **prebuiltconfig:** Introduce additional parameter to limit context in list_tables ([#1151](https://github.com/googleapis/genai-toolbox/issues/1151)) ([497d3b1](https://github.com/googleapis/genai-toolbox/commit/497d3b126da252a4b59806ca2ca3c56e78efc13d))
|
||||
* **prebuiltconfig/alloydb-admin:** Add list cluster, instance and users ([#1126](https://github.com/googleapis/genai-toolbox/issues/1126)) ([b42c139](https://github.com/googleapis/genai-toolbox/commit/b42c139158650fb1f3b696965e840c52e2016bf0))
|
||||
* **prebuiltconfig/alloydb-admin:** Add tool to create user via Built in user type or IAM ([#1130](https://github.com/googleapis/genai-toolbox/issues/1130)) ([f5bcb9c](https://github.com/googleapis/genai-toolbox/commit/f5bcb9c755a2c1747d0beeda568b6217d7420e7a))
|
||||
* **source/http:** Add User Agent to `http` invocations ([#1102](https://github.com/googleapis/genai-toolbox/issues/1102)) ([6f55b78](https://github.com/googleapis/genai-toolbox/commit/6f55b78e96b8c7aa9aca601cfae4d62f3e1eb42b))
|
||||
* **sources/postgres:** Add support for `queryParams` ([#1047](https://github.com/googleapis/genai-toolbox/issues/1047)) ([7b57251](https://github.com/googleapis/genai-toolbox/commit/7b5725140279de21fece45e860945b7a7d23e7d0)), closes [#963](https://github.com/googleapis/genai-toolbox/issues/963)
|
||||
* **tools/bigquery-execute-sql:** Add dry run support ([#1057](https://github.com/googleapis/genai-toolbox/issues/1057)) ([1cac9b5](https://github.com/googleapis/genai-toolbox/commit/1cac9b5b378153c7dc65ff3dfb4ebd852b715a10))
|
||||
* **tools/dataplex-search-aspect-types:** Add support for `dataplex-search-aspect-types` tool ([#1061](https://github.com/googleapis/genai-toolbox/issues/1061)) ([d940187](https://github.com/googleapis/genai-toolbox/commit/d940187c851666cc201f519665fb4f2e1478465c))
|
||||
* **tools/looker:** Add `looker-make-look` tool to create Looks ([#1099](https://github.com/googleapis/genai-toolbox/issues/1099)) ([61d9489](https://github.com/googleapis/genai-toolbox/commit/61d94893448f633a5f2b9d7f0744ab40704af824))
|
||||
* **tools/looker:** Add visualizations to `query-url` tool ([#1090](https://github.com/googleapis/genai-toolbox/issues/1090)) ([5bf2758](https://github.com/googleapis/genai-toolbox/commit/5bf275846a268a8d305d6392fa4e8e79e365f00d))
|
||||
* **tools/looker:** New Looker tools for dashboards ([#1118](https://github.com/googleapis/genai-toolbox/issues/1118)) ([42be3f5](https://github.com/googleapis/genai-toolbox/commit/42be3f550ceab34baf43fe2a246ded7a09cff8e3))
|
||||
* **ui:** Add login with google button for automatic id token retrieval ([#1044](https://github.com/googleapis/genai-toolbox/issues/1044)) ([d91bdfc](https://github.com/googleapis/genai-toolbox/commit/d91bdfcbdcbf5fcae6e17770c88c5ffba4115d67))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Correct the capitalization of `map` manifests ([#1139](https://github.com/googleapis/genai-toolbox/issues/1139)) ([0b0457c](https://github.com/googleapis/genai-toolbox/commit/0b0457c8e6b78f53a2f1929c05d46fb31421fbca))
|
||||
* Remove unnecessary fields from `map` parameter manifests ([#1138](https://github.com/googleapis/genai-toolbox/issues/1138)) ([fbe8c1a](https://github.com/googleapis/genai-toolbox/commit/fbe8c1a9c0f28797443bf9cb32d63bfbc1072881))
|
||||
* **tools/looker:** Add authorized invocation feature to all Looker tools ([#1091](https://github.com/googleapis/genai-toolbox/issues/1091)) ([3b1cce7](https://github.com/googleapis/genai-toolbox/commit/3b1cce72e7ff4f6b3a0a31db0564dc45b8302caa))
|
||||
* Update ui info log to reflect port ([#1125](https://github.com/googleapis/genai-toolbox/issues/1125)) ([6d691d5](https://github.com/googleapis/genai-toolbox/commit/6d691d582f18137de504d39f372c5104b7392bff))
|
||||
|
||||
## [0.11.0](https://github.com/googleapis/genai-toolbox/compare/v0.11.0...v0.11.0) (2025-08-05)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **tools/bigquery-sql:** Ensure invoke always returns a non-null value ([#1020](https://github.com/googleapis/genai-toolbox/issues/1020)) ([9af55b6](https://github.com/googleapis/genai-toolbox/commit/9af55b651d836f268eda342ea27380e7c9967c94))
|
||||
* **tools/bigquery-execute-sql:** Update the return messages ([#1034](https://github.com/googleapis/genai-toolbox/issues/1034)) ([051e686](https://github.com/googleapis/genai-toolbox/commit/051e686476a781ca49f7617764d507916a1188b8))
|
||||
|
||||
### Features
|
||||
|
||||
* Add TiDB source and tool ([#829](https://github.com/googleapis/genai-toolbox/issues/829)) ([6eaf36a](https://github.com/googleapis/genai-toolbox/commit/6eaf36ac8505d523fa4f5a4ac3c97209fd688cef))
|
||||
* Interactive web UI for Toolbox ([#1065](https://github.com/googleapis/genai-toolbox/issues/1065)) ([8749b03](https://github.com/googleapis/genai-toolbox/commit/8749b030035e65361047c4ead13dfacb8e9a9b59))
|
||||
* **prebuiltconfigs/cloud-sql-postgres:** Introduce additional parameter to limit context in list tables ([#1062](https://github.com/googleapis/genai-toolbox/issues/1062)) ([c3a58e1](https://github.com/googleapis/genai-toolbox/commit/c3a58e1d1678dc14d8de5006511df597fd75faa3))
|
||||
* **tools/looker-query-url:** Add support for `looker-query-url` tool ([#1015](https://github.com/googleapis/genai-toolbox/issues/1015)) ([327ddf0](https://github.com/googleapis/genai-toolbox/commit/327ddf0439058aa5ecd2c7ae8251fcde6aeff18c))
|
||||
* **tools/dataplex-lookup-entry:** Add support for `dataplex-lookup-entry` tool ([#1009](https://github.com/googleapis/genai-toolbox/issues/1009)) ([5fa1660](https://github.com/googleapis/genai-toolbox/commit/5fa1660fc8631989b4d13abea205b6426bb506a5))
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **tools/bigquery,mssql,mysql,postgres,spanner,tidb:** Add query logging to execute-sql tools ([#1069](https://github.com/googleapis/genai-toolbox/issues/1069)) ([0527532]([https://github.com/googleapis/genai-toolbox/commit/0527532bd7085ef9eb8f9c30f430a2f2f35cef32))
|
||||
|
||||
## [0.10.0](https://github.com/googleapis/genai-toolbox/compare/v0.9.0...v0.10.0) (2025-07-25)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add `Map` parameters support ([#928](https://github.com/googleapis/genai-toolbox/issues/928)) ([4468bc9](https://github.com/googleapis/genai-toolbox/commit/4468bc920bbf27dce4ab160197587b7c12fcd20f))
|
||||
* Add Dataplex source and tool ([#847](https://github.com/googleapis/genai-toolbox/issues/847)) ([30c16a5](https://github.com/googleapis/genai-toolbox/commit/30c16a559e8d49a9a717935269e69b97ec25519a))
|
||||
* Add Looker source and tool ([#923](https://github.com/googleapis/genai-toolbox/issues/923)) ([c67e01b](https://github.com/googleapis/genai-toolbox/commit/c67e01bcf998e7b884be30ebb1fd277c89ed6ffc))
|
||||
* Add support for null optional parameter ([#802](https://github.com/googleapis/genai-toolbox/issues/802)) ([a817b12](https://github.com/googleapis/genai-toolbox/commit/a817b120ca5e09ce80eb8d7544ebbe81fc28b082)), closes [#736](https://github.com/googleapis/genai-toolbox/issues/736)
|
||||
* **prebuilt/alloydb-admin-config:** Add alloydb control plane as a prebuilt config ([#937](https://github.com/googleapis/genai-toolbox/issues/937)) ([0b28b72](https://github.com/googleapis/genai-toolbox/commit/0b28b72aa0ca2cdc87afbddbeb7f4dbb9688593d))
|
||||
* **prebuilt/mysql,prebuilt/mssql:** Add generic mysql and mssql prebuilt tools ([#983](https://github.com/googleapis/genai-toolbox/issues/983)) ([c600c30](https://github.com/googleapis/genai-toolbox/commit/c600c30374443b6106c1f10b60cd334fd202789b))
|
||||
* **server/mcp:** Support MCP version 2025-06-18 ([#898](https://github.com/googleapis/genai-toolbox/issues/898)) ([313d3ca](https://github.com/googleapis/genai-toolbox/commit/313d3ca0d084a3a6e7ac9a21a862aa31bf3edadd))
|
||||
* **sources/mssql:** Add support for encrypt connection parameter ([#874](https://github.com/googleapis/genai-toolbox/issues/874)) ([14a868f](https://github.com/googleapis/genai-toolbox/commit/14a868f2a0780b94c2ca104419b2ff098778303b))
|
||||
* **sources/firestore:** Add Firestore as Source ([#786](https://github.com/googleapis/genai-toolbox/issues/786)) ([2bb790e](https://github.com/googleapis/genai-toolbox/commit/2bb790e4f8194b677fe0ba40122d409d0e3e687e))
|
||||
* **sources/mongodb:** Add MongoDB Source ([#969](https://github.com/googleapis/genai-toolbox/issues/969)) ([74dbd61](https://github.com/googleapis/genai-toolbox/commit/74dbd6124daab6192dd880dbd1d15f36861abf74))
|
||||
* **tools/alloydb-wait-for-operation:** Add wait for operation tool with exponential backoff ([#920](https://github.com/googleapis/genai-toolbox/issues/920)) ([3f6ec29](https://github.com/googleapis/genai-toolbox/commit/3f6ec2944ede18ee02b10157cc048145bdaec87a))
|
||||
* **tools/mongodb-aggregate:** Add MongoDB `aggregate` Tools ([#977](https://github.com/googleapis/genai-toolbox/issues/977)) ([bd399bb](https://github.com/googleapis/genai-toolbox/commit/bd399bb0fb7134469345ed9a1111ea4209440867))
|
||||
* **tools/mongodb-delete:** Add MongoDB `delete` Tools ([#974](https://github.com/googleapis/genai-toolbox/issues/974)) ([78e9752](https://github.com/googleapis/genai-toolbox/commit/78e9752f620e065246f3e7b9d37062e492247c8a))
|
||||
* **tools/mongodb-find:** Add MongoDB `find` Tools ([#970](https://github.com/googleapis/genai-toolbox/issues/970)) ([a747475](https://github.com/googleapis/genai-toolbox/commit/a7474752d8d7ea7af1e80a3c4533d2fd4154d897))
|
||||
* **tools/mongodb-insert:** Add MongoDB `insert` Tools ([#975](https://github.com/googleapis/genai-toolbox/issues/975)) ([4c63f0c](https://github.com/googleapis/genai-toolbox/commit/4c63f0c1e402817a0c8fec611635e99290308d0e))
|
||||
* **tools/mongodb-update:** Add MongoDB `update` Tools ([#972](https://github.com/googleapis/genai-toolbox/issues/972)) ([dfde52c](https://github.com/googleapis/genai-toolbox/commit/dfde52ca9a8e25e2f3944f52b4c2e307072b6c37))
|
||||
* **tools/neo4j-execute-cypher:** Add neo4j-execute-cypher for Neo4j sources ([#946](https://github.com/googleapis/genai-toolbox/issues/946)) ([81d0505](https://github.com/googleapis/genai-toolbox/commit/81d05053b2e08338fd6eabe4849c309064f76b6b))
|
||||
* **tools/neo4j-schema:** Add neo4j-schema tool ([#978](https://github.com/googleapis/genai-toolbox/issues/978)) ([be7db3d](https://github.com/googleapis/genai-toolbox/commit/be7db3dff263625ce64fdb726e81164996b7a708))
|
||||
* **tools/wait:** Create wait for tool ([#885](https://github.com/googleapis/genai-toolbox/issues/885)) ([ed5ef4c](https://github.com/googleapis/genai-toolbox/commit/ed5ef4caea10ba1dbc49c0fc0a0d2b91cf341d3b))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Fix document preview pipeline for forked PRs ([#950](https://github.com/googleapis/genai-toolbox/issues/950)) ([481cc60](https://github.com/googleapis/genai-toolbox/commit/481cc608bae807d9e92497bc8863066916f7ef21))
|
||||
* **prebuilt/firestore:** Mark database field as required in the firestore prebuilt tools ([#959](https://github.com/googleapis/genai-toolbox/issues/959)) ([15417d4](https://github.com/googleapis/genai-toolbox/commit/15417d4e0c7b173e81edbbeb672e53884d186104))
|
||||
* **prebuilt/cloud-sql-mssql:** Correct source reference for execute_sql tool in cloud-sql-mssql.yaml prebuilt config ([#938](https://github.com/googleapis/genai-toolbox/issues/938)) ([d16728e](https://github.com/googleapis/genai-toolbox/commit/d16728e5c603eab37700876a6ddacbf709fd5823))
|
||||
* **prebuilt/cloud-sql-mysql:** Update list_table tool ([#924](https://github.com/googleapis/genai-toolbox/issues/924)) ([2083ba5](https://github.com/googleapis/genai-toolbox/commit/2083ba50483951e9ee6101bb832aa68823cd96a5))
|
||||
* Replace 'float' with 'number' in McpManifest ([#985](https://github.com/googleapis/genai-toolbox/issues/985)) ([59e23e1](https://github.com/googleapis/genai-toolbox/commit/59e23e17250a516e3931996114f32ac6526a4f8e))
|
||||
* **server/api:** Add logger to context in tool invoke handler ([#891](https://github.com/googleapis/genai-toolbox/issues/891)) ([8ce311f](https://github.com/googleapis/genai-toolbox/commit/8ce311f256481e8f11ecb4aa505b95a562f394ef))
|
||||
* **sources/looker:** Add agent tag to Looker API calls. ([#966](https://github.com/googleapis/genai-toolbox/issues/966)) ([f55dd6f](https://github.com/googleapis/genai-toolbox/commit/f55dd6fcd099f23bd89df62b268c4a53d16f3bac))
|
||||
* **tools/bigquery-execute-sql:** Ensure invoke always returns a non-null value ([#925](https://github.com/googleapis/genai-toolbox/issues/925)) ([9a55b80](https://github.com/googleapis/genai-toolbox/commit/9a55b804821a6ccfcd157bcfaee7e599c4a5cb63))
|
||||
* **tools/mysqlsql:** Unmarshal json data from database during invoke ([#979](https://github.com/googleapis/genai-toolbox/issues/979)) ([ccc3498](https://github.com/googleapis/genai-toolbox/commit/ccc3498cf0a4c43eb909e3850b9e6f582cd48f2a)), closes [#840](https://github.com/googleapis/genai-toolbox/issues/840)
|
||||
|
||||
## [0.9.0](https://github.com/googleapis/genai-toolbox/compare/v0.8.0...v0.9.0) (2025-07-11)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Dynamic reloading for toolbox config ([#800](https://github.com/googleapis/genai-toolbox/issues/800)) ([4c240ac](https://github.com/googleapis/genai-toolbox/commit/4c240ac3c961cd14738c998ba2d10d5235ef523e))
|
||||
* **sources/mysql:** Add queryTimeout support to MySQL source ([#830](https://github.com/googleapis/genai-toolbox/issues/830)) ([391cb5b](https://github.com/googleapis/genai-toolbox/commit/391cb5bfe845e554411240a1d9838df5331b25fa))
|
||||
* **tools/bigquery:** Add optional projectID parameter to bigquery tools ([#799](https://github.com/googleapis/genai-toolbox/issues/799)) ([c6ab74c](https://github.com/googleapis/genai-toolbox/commit/c6ab74c5dad53a0e7885a18438ab3be36b9b7cb3))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Cleanup unassigned err log ([#857](https://github.com/googleapis/genai-toolbox/issues/857)) ([c081ace](https://github.com/googleapis/genai-toolbox/commit/c081ace46bb24cb3fd2adb21d519489be0d3f3c3))
|
||||
* Fix docs preview deployment pipeline ([#787](https://github.com/googleapis/genai-toolbox/issues/787)) ([0a93b04](https://github.com/googleapis/genai-toolbox/commit/0a93b0482c8d3c64b324e67408d408f5576ecaf3))
|
||||
* **tools:** Nil parameter error when arrays are used ([#801](https://github.com/googleapis/genai-toolbox/issues/801)) ([2bdcc08](https://github.com/googleapis/genai-toolbox/commit/2bdcc0841ab37d18e2f0d6fe63fb6f10da3e302b))
|
||||
* Trigger reload on additional fsnotify operations ([#854](https://github.com/googleapis/genai-toolbox/issues/854)) ([aa8dbec](https://github.com/googleapis/genai-toolbox/commit/aa8dbec97095cf0d7ac771c8084a84e2d3d8ce4e))
|
||||
|
||||
## [0.8.0](https://github.com/googleapis/genai-toolbox/compare/v0.7.0...v0.8.0) (2025-07-02)
|
||||
|
||||
|
||||
|
||||
101
CONTRIBUTING.md
@@ -25,42 +25,33 @@ This project follows
|
||||
|
||||
## Contribution process
|
||||
|
||||
> [!NOTE]
|
||||
> New contributions should always include both unit and integration tests.
|
||||
|
||||
### Code reviews
|
||||
|
||||
All submissions, including submissions by project members, require review. We
|
||||
use GitHub pull requests for this purpose. Consult
|
||||
[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
|
||||
information on using pull requests.
|
||||
|
||||
### Code reviews
|
||||
|
||||
* Within 2-5 days, a reviewer will review your PR. They may approve it, or request
|
||||
changes.
|
||||
* When requesting changes, reviewers should self-assign the PR to ensure
|
||||
Within 2-5 days, a reviewer will review your PR. They may approve it, or request
|
||||
changes. When requesting changes, reviewers should self-assign the PR to ensure
|
||||
they are aware of any updates.
|
||||
* If additional changes are needed, push additional commits to your PR branch -
|
||||
this helps the reviewer know which parts of the PR have changed.
|
||||
* Commits will be
|
||||
If additional changes are needed, push additional commits to your PR branch -
|
||||
this helps the reviewer know which parts of the PR have changed. Commits will be
|
||||
squashed when merged.
|
||||
* Please follow up with changes promptly.
|
||||
* If a PR is awaiting changes by the
|
||||
Please follow up with changes promptly. If a PR is awaiting changes by the
|
||||
author for more than 10 days, maintainers may mark that PR as Draft. PRs that
|
||||
are inactive for more than 30 days may be closed.
|
||||
|
||||
## Adding a New Database Source or Tool
|
||||
### Adding a New Database Source and Tool
|
||||
|
||||
Please create an
|
||||
We recommend creating an
|
||||
[issue](https://github.com/googleapis/genai-toolbox/issues) before
|
||||
implementation to ensure we can accept the contribution and no duplicated work. This issue
|
||||
should include an overview of the API design. If you have any questions, reach out on our
|
||||
[Discord](https://discord.gg/Dmm69peqjh) to chat directly with the team.
|
||||
implementation to ensure we can accept the contribution and no duplicated work.
|
||||
If you have any questions, reach out on our
|
||||
[Discord](https://discord.gg/Dmm69peqjh) to chat directly with the team. New
|
||||
contributions should be added with both unit tests and integration tests.
|
||||
|
||||
> [!NOTE]
|
||||
> New tools can be added for [pre-existing data sources](https://github.com/googleapis/genai-toolbox/tree/main/internal/sources). However, any new database source should also include at least one new tool type.
|
||||
|
||||
### Adding a New Database Source
|
||||
#### 1. Implement the New Data Source
|
||||
|
||||
We recommend looking at an [example source
|
||||
implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/sources/postgres/postgres.go).
|
||||
@@ -87,13 +78,10 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s
|
||||
* **Implement `init()`** to register the new Source.
|
||||
* **Implement Unit Tests** in a file named `newdb_test.go`.
|
||||
|
||||
### Adding a New Tool
|
||||
|
||||
> [!NOTE]
|
||||
> Please follow the tool naming convention detailed [here](./DEVELOPER.md#tool-naming-conventions).
|
||||
#### 2. Implement the New Tool
|
||||
|
||||
We recommend looking at an [example tool
|
||||
implementation](https://github.com/googleapis/genai-toolbox/tree/main/internal/tools/postgres/postgressql).
|
||||
implementation](https://github.com/googleapis/genai-toolbox/tree/main/internal/tools/postgressql).
|
||||
|
||||
* **Create a new directory** under `internal/tools` for your tool type (e.g.,
|
||||
`internal/tools/newdb` or `internal/tools/newdb<tool_name>`).
|
||||
@@ -123,14 +111,12 @@ tools.
|
||||
* **Implement `init()`** to register the new Tool.
|
||||
* **Implement Unit Tests** in a file named `newdb_test.go`.
|
||||
|
||||
### Adding Integration Tests
|
||||
#### 3. Add Integration Tests
|
||||
|
||||
* **Add a test file** under a new directory `tests/newdb`.
|
||||
* **Add pre-defined integration test suites** in the
|
||||
`/tests/newdb/newdb_test.go` that are **required** to be run as long as your
|
||||
code contains related features. Please check each test suites for the config
|
||||
defaults, if your source require test suites config updates, please refer to
|
||||
[config option](./tests/option.go):
|
||||
code contains related features:
|
||||
|
||||
1. [RunToolGetTest][tool-get]: tests for the `GET` endpoint that returns the
|
||||
tool's manifest.
|
||||
@@ -148,8 +134,8 @@ tools.
|
||||
5. (Optional) [RunToolInvokeWithTemplateParameters][temp-param]: tests for [template
|
||||
parameters][temp-param-doc]. Only run this test if template
|
||||
parameters apply to your tool.
|
||||
|
||||
* **Add the new database to the integration test workflow** in
|
||||
|
||||
* **Add the new database to the test config** in
|
||||
[integration.cloudbuild.yaml](.ci/integration.cloudbuild.yaml).
|
||||
|
||||
[tool-get]:
|
||||
@@ -165,7 +151,7 @@ tools.
|
||||
[temp-param-doc]:
|
||||
https://googleapis.github.io/genai-toolbox/resources/tools/#template-parameters
|
||||
|
||||
### Adding Documentation
|
||||
#### 4. Add Documentation
|
||||
|
||||
* **Update the documentation** to include information about your new data source
|
||||
and tool. This includes:
|
||||
@@ -175,7 +161,7 @@ tools.
|
||||
|
||||
* **(Optional) Add samples** to the `docs/en/samples/<newdb>` directory.
|
||||
|
||||
### (Optional) Adding Prebuilt Tools
|
||||
#### (Optional) 5. Add Prebuilt Tools
|
||||
|
||||
You can provide developers with a set of "build-time" tools to aid common
|
||||
software development user journeys like viewing and creating tables/collections
|
||||
@@ -189,45 +175,8 @@ and data.
|
||||
[internal/prebuiltconfigs/prebuiltconfigs_test.go](internal/prebuiltconfigs/prebuiltconfigs_test.go)
|
||||
and [cmd/root_test.go](cmd/root_test.go).
|
||||
|
||||
## Submitting a Pull Request
|
||||
#### 6. Submit a Pull Request
|
||||
|
||||
Submit a pull request to the repository with your changes. Be sure to include a
|
||||
detailed description of your changes and any requests for long term testing
|
||||
resources.
|
||||
|
||||
* **Title:** All pull request title should follow the formatting of
|
||||
[Conventional
|
||||
Commit](https://www.conventionalcommits.org/) guidelines: `<type>[optional
|
||||
scope]: description`. For example, if you are adding a new field in postgres
|
||||
source, the title should be `feat(source/postgres): add support for
|
||||
"new-field" field in postgres source`.
|
||||
|
||||
Here are some commonly used `type` in this GitHub repo.
|
||||
|
||||
| **type** | **description** |
|
||||
|-----------------|-------------------------------------------------------------------------------------------------------|
|
||||
| Breaking Change | Anything with this type of a `!` after the type/scope introduces a breaking change. |
|
||||
| feat | Adding a new feature to the codebase. |
|
||||
| fix | Fixing a bug or typo in the codebase. This does not include fixing docs. |
|
||||
| test | Changes made to test files. |
|
||||
| ci | Changes made to the cicd configuration files or scripts. |
|
||||
| docs | Documentation-related PRs, including fixes on docs. |
|
||||
| chore | Other small tasks or updates that don't fall into any of the above types. |
|
||||
| refactor | Change src code but unlike feat, there are no tests broke and no line lost coverage. |
|
||||
| revert | Revert changes made in another commit. |
|
||||
| style | Update src code, with only formatting and whitespace updates (e.g. code formatter or linter changes). |
|
||||
|
||||
Pull requests should always add scope whenever possible. The scope is
|
||||
formatted as `<scope-type>/<scope-kind>` (e.g., `sources/postgres`, or
|
||||
`tools/mssql-sql`).
|
||||
|
||||
Ideally, **each PR covers only one scope**, if this is
|
||||
inevitable, multiple scopes can be seaparated with a comma (e.g.
|
||||
`sources/postgres,sources/alloydbpg`). If the PR covers multiple `scope-type`
|
||||
(such as adding a new database), you can disregard the `scope-type`, e.g.
|
||||
`feat(new-db): adding support for new-db source and tool`.
|
||||
|
||||
* **PR Description:** PR description should **always** be included. It should
|
||||
include a concise description of the changes, it's impact, along with a
|
||||
summary of the solution. If the PR is related to a specific issue, the issue
|
||||
number should be mentioned in the PR description (e.g. `Fixes #1`).
|
||||
* **Submit a pull request** to the repository with your changes. Be sure to
|
||||
include a detailed description of your changes and any requests for long term
|
||||
testing resources.
|
||||
|
||||
43
DEVELOPER.md
@@ -44,47 +44,6 @@ Before you begin, ensure you have the following:
|
||||
curl http://127.0.0.1:5000
|
||||
```
|
||||
|
||||
### Tool Naming Conventions
|
||||
|
||||
This section details the purpose and conventions for MCP Toolbox's tools naming
|
||||
properties, **tool name** and **tool kind**.
|
||||
|
||||
```
|
||||
cancel_hotel: <- tool name
|
||||
kind: postgres-sql <- tool kind
|
||||
source: my_pg_source
|
||||
```
|
||||
|
||||
#### Tool Name
|
||||
|
||||
Tool name is the identifier used by a Large Language Model (LLM) to invoke a
|
||||
specific tool.
|
||||
* Custom tools: The user can define any name they want. The below guidelines
|
||||
do not apply.
|
||||
* Pre-built tools: The tool name is predefined and cannot be changed. It
|
||||
should follow the guidelines.
|
||||
|
||||
The following guidelines apply to tool names:
|
||||
* Should use underscores over hyphens (e.g., `list_collections` instead of
|
||||
`list-collections`).
|
||||
* Should not have the product name in the name (e.g., `list_collections` instead
|
||||
of `firestore_list_collections`).
|
||||
* Superficial changes are NOT considered as breaking (e.g., changing tool name).
|
||||
* Non-superficial changes MAY be considered breaking (e.g. adding new parameters
|
||||
to a function) until they can be validated through extensive testing to ensure
|
||||
they do not negatively impact agent's performances.
|
||||
|
||||
#### Tool Kind
|
||||
|
||||
Tool kind serves as a category or type that a user can assign to a tool.
|
||||
|
||||
The following guidelines apply to tool kinds:
|
||||
* Should user hyphens over underscores (e.g. `firestore-list-collections` or
|
||||
`firestore_list_colelctions`).
|
||||
* Should use product name in name (e.g. `firestore-list-collections` over
|
||||
`list-collections`).
|
||||
* Changes to tool kind are breaking changes and should be avoided.
|
||||
|
||||
## Testing
|
||||
|
||||
### Infrastructure
|
||||
@@ -149,7 +108,7 @@ variables for each source.
|
||||
* AlloyDB - setup in the test project
|
||||
* AI Natural Language ([setup
|
||||
instructions](https://cloud.google.com/alloydb/docs/ai/use-natural-language-generate-sql-queries))
|
||||
has been configured for `alloydb-ai-nl` tool tests
|
||||
has been configured for `alloydb-a`-nl` tool tests
|
||||
* The Cloud Build service account is a user
|
||||
* Bigtable - setup in the test project
|
||||
* The Cloud Build service account is a user
|
||||
|
||||
363
README.md
@@ -2,9 +2,7 @@
|
||||
|
||||
# MCP Toolbox for Databases
|
||||
|
||||
[](https://googleapis.github.io/genai-toolbox/)
|
||||
[](https://discord.gg/Dmm69peqjh)
|
||||
[](https://medium.com/@mcp_toolbox)
|
||||
[](https://discord.gg/Dmm69peqjh)
|
||||
[](https://goreportcard.com/report/github.com/googleapis/genai-toolbox)
|
||||
|
||||
> [!NOTE]
|
||||
@@ -33,17 +31,13 @@ documentation](https://googleapis.github.io/genai-toolbox/).
|
||||
- [Getting Started](#getting-started)
|
||||
- [Installing the server](#installing-the-server)
|
||||
- [Running the server](#running-the-server)
|
||||
- [Homebrew Users](#homebrew-users)
|
||||
- [Integrating your application](#integrating-your-application)
|
||||
- [Configuration](#configuration)
|
||||
- [Sources](#sources)
|
||||
- [Tools](#tools)
|
||||
- [Toolsets](#toolsets)
|
||||
- [Versioning](#versioning)
|
||||
- [Pre-1.0.0 Versioning](#pre-100-versioning)
|
||||
- [Post-1.0.0 Versioning](#post-100-versioning)
|
||||
- [Contributing](#contributing)
|
||||
- [Community](#community)
|
||||
|
||||
<!-- /TOC -->
|
||||
|
||||
@@ -117,7 +111,7 @@ To install Toolbox as a binary:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.14.0
|
||||
export VERSION=0.8.0
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -130,23 +124,12 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.14.0
|
||||
export VERSION=0.8.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Homebrew</summary>
|
||||
|
||||
To install Toolbox using Homebrew on macOS or Linux:
|
||||
|
||||
```sh
|
||||
brew install mcp-toolbox
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Compile from source</summary>
|
||||
|
||||
@@ -154,7 +137,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.14.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.8.0
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -165,65 +148,11 @@ go install github.com/googleapis/genai-toolbox@v0.14.0
|
||||
[Configure](#configuration) a `tools.yaml` to define your tools, and then
|
||||
execute `toolbox` to start the server:
|
||||
|
||||
<details open>
|
||||
<summary>Binary</summary>
|
||||
|
||||
To run Toolbox from binary:
|
||||
|
||||
```sh
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
ⓘ **NOTE:**
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
|
||||
<summary>Container image</summary>
|
||||
|
||||
To run the server after pulling the [container image](#installing-the-server):
|
||||
|
||||
```sh
|
||||
export VERSION=0.11.0 # Use the version you pulled
|
||||
docker run -p 5000:5000 \
|
||||
-v $(pwd)/tools.yaml:/app/tools.yaml \
|
||||
us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION \
|
||||
--tools-file "/app/tools.yaml"
|
||||
```
|
||||
|
||||
ⓘ **NOTE:**
|
||||
The `-v` flag mounts your local `tools.yaml` into the container, and `-p` maps the container's port `5000` to your host's port `5000`.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
|
||||
<summary>Source</summary>
|
||||
|
||||
To run the server directly from source, navigate to the project root directory and run:
|
||||
|
||||
```sh
|
||||
go run .
|
||||
```
|
||||
|
||||
ⓘ **NOTE:**
|
||||
This command runs the project from source, and is more suitable for development and testing. It does **not** compile a binary into your `$GOPATH`. If you want to compile a binary instead, refer the [Developer Documentation](./DEVELOPER.md#building-the-binary).
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
|
||||
<summary>Homebrew</summary>
|
||||
|
||||
If you installed Toolbox using [Homebrew](https://brew.sh/), the `toolbox` binary is available in your system path. You can start the server with the same command:
|
||||
|
||||
```sh
|
||||
toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
</details>
|
||||
> [!NOTE]
|
||||
> Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
|
||||
You can use `toolbox help` for a full list of flags! To stop the server, send a
|
||||
terminate signal (`ctrl+c` on most platforms).
|
||||
@@ -232,7 +161,6 @@ For more detailed documentation on deploying to different environments, check
|
||||
out the resources in the [How-to
|
||||
section](https://googleapis.github.io/genai-toolbox/how-to/)
|
||||
|
||||
|
||||
### Integrating your application
|
||||
|
||||
Once your server is up and running, you can load the tools into your
|
||||
@@ -442,256 +370,7 @@ For more detailed instructions on using the Toolbox Core SDK, see the
|
||||
</details>
|
||||
</details>
|
||||
</blockquote>
|
||||
<details>
|
||||
<summary>Go (<a href="https://github.com/googleapis/mcp-toolbox-sdk-go">Github</a>)</summary>
|
||||
<br>
|
||||
<blockquote>
|
||||
|
||||
<details open>
|
||||
<summary>Core</summary>
|
||||
|
||||
1. Install [Toolbox Go SDK][toolbox-go]:
|
||||
|
||||
```bash
|
||||
go get github.com/googleapis/mcp-toolbox-sdk-go
|
||||
```
|
||||
|
||||
1. Load tools:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"context"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Make sure to add the error checks
|
||||
// update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000";
|
||||
ctx := context.Background()
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
|
||||
// Framework agnostic tools
|
||||
tools, err := client.LoadToolset("toolsetName", ctx)
|
||||
}
|
||||
```
|
||||
|
||||
For more detailed instructions on using the Toolbox Go SDK, see the
|
||||
[project's README][toolbox-core-go-readme].
|
||||
|
||||
[toolbox-go]: https://pkg.go.dev/github.com/googleapis/mcp-toolbox-sdk-go/core
|
||||
[toolbox-core-go-readme]: https://github.com/googleapis/mcp-toolbox-sdk-go/blob/main/core/README.md
|
||||
|
||||
</details>
|
||||
<details>
|
||||
<summary>LangChain Go</summary>
|
||||
|
||||
1. Install [Toolbox Go SDK][toolbox-go]:
|
||||
|
||||
```bash
|
||||
go get github.com/googleapis/mcp-toolbox-sdk-go
|
||||
```
|
||||
|
||||
2. Load tools:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"github.com/tmc/langchaingo/llms"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Make sure to add the error checks
|
||||
// update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000"
|
||||
ctx := context.Background()
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
|
||||
// Framework agnostic tool
|
||||
tool, err := client.LoadTool("toolName", ctx)
|
||||
|
||||
// Fetch the tool's input schema
|
||||
inputschema, err := tool.InputSchema()
|
||||
|
||||
var paramsSchema map[string]any
|
||||
_ = json.Unmarshal(inputschema, ¶msSchema)
|
||||
|
||||
// Use this tool with LangChainGo
|
||||
langChainTool := llms.Tool{
|
||||
Type: "function",
|
||||
Function: &llms.FunctionDefinition{
|
||||
Name: tool.Name(),
|
||||
Description: tool.Description(),
|
||||
Parameters: paramsSchema,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
<details>
|
||||
<summary>Genkit</summary>
|
||||
|
||||
1. Install [Toolbox Go SDK][toolbox-go]:
|
||||
|
||||
```bash
|
||||
go get github.com/googleapis/mcp-toolbox-sdk-go
|
||||
```
|
||||
|
||||
2. Load tools:
|
||||
|
||||
```go
|
||||
package main
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/firebase/genkit/go/ai"
|
||||
"github.com/firebase/genkit/go/genkit"
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/tbgenkit"
|
||||
"github.com/invopop/jsonschema"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Make sure to add the error checks
|
||||
// Update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000"
|
||||
ctx := context.Background()
|
||||
g, err := genkit.Init(ctx)
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
|
||||
// Framework agnostic tool
|
||||
tool, err := client.LoadTool("toolName", ctx)
|
||||
|
||||
// Convert the tool using the tbgenkit package
|
||||
// Use this tool with Genkit Go
|
||||
genkitTool, err := tbgenkit.ToGenkitTool(tool, g)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to convert tool: %v\n", err)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</details>
|
||||
<details>
|
||||
<summary>Go GenAI</summary>
|
||||
|
||||
1. Install [Toolbox Go SDK][toolbox-go]:
|
||||
|
||||
```bash
|
||||
go get github.com/googleapis/mcp-toolbox-sdk-go
|
||||
```
|
||||
|
||||
2. Load tools:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"google.golang.org/genai"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Make sure to add the error checks
|
||||
// Update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000"
|
||||
ctx := context.Background()
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
|
||||
// Framework agnostic tool
|
||||
tool, err := client.LoadTool("toolName", ctx)
|
||||
|
||||
// Fetch the tool's input schema
|
||||
inputschema, err := tool.InputSchema()
|
||||
|
||||
var schema *genai.Schema
|
||||
_ = json.Unmarshal(inputschema, &schema)
|
||||
|
||||
funcDeclaration := &genai.FunctionDeclaration{
|
||||
Name: tool.Name(),
|
||||
Description: tool.Description(),
|
||||
Parameters: schema,
|
||||
}
|
||||
|
||||
// Use this tool with Go GenAI
|
||||
genAITool := &genai.Tool{
|
||||
FunctionDeclarations: []*genai.FunctionDeclaration{funcDeclaration},
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</details>
|
||||
<details>
|
||||
<summary>OpenAI Go</summary>
|
||||
|
||||
1. Install [Toolbox Go SDK][toolbox-go]:
|
||||
|
||||
```bash
|
||||
go get github.com/googleapis/mcp-toolbox-sdk-go
|
||||
```
|
||||
|
||||
2. Load tools:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
openai "github.com/openai/openai-go"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Make sure to add the error checks
|
||||
// Update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000"
|
||||
ctx := context.Background()
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
|
||||
// Framework agnostic tool
|
||||
tool, err := client.LoadTool("toolName", ctx)
|
||||
|
||||
// Fetch the tool's input schema
|
||||
inputschema, err := tool.InputSchema()
|
||||
|
||||
var paramsSchema openai.FunctionParameters
|
||||
_ = json.Unmarshal(inputschema, ¶msSchema)
|
||||
|
||||
// Use this tool with OpenAI Go
|
||||
openAITool := openai.ChatCompletionToolParam{
|
||||
Function: openai.FunctionDefinitionParam{
|
||||
Name: tool.Name(),
|
||||
Description: openai.String(tool.Description()),
|
||||
Parameters: paramsSchema,
|
||||
},
|
||||
}
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
</details>
|
||||
</details>
|
||||
</blockquote>
|
||||
|
||||
</details>
|
||||
|
||||
## Configuration
|
||||
@@ -772,26 +451,12 @@ my_second_toolset = client.load_toolset("my_second_toolset")
|
||||
|
||||
## Versioning
|
||||
|
||||
This project uses [semantic versioning](https://semver.org/) (`MAJOR.MINOR.PATCH`).
|
||||
Since the project is in a pre-release stage (version `0.x.y`), we follow the
|
||||
standard conventions for initial development:
|
||||
This project uses [semantic versioning](https://semver.org/), including a
|
||||
`MAJOR.MINOR.PATCH` version number that increments with:
|
||||
|
||||
### Pre-1.0.0 Versioning
|
||||
While the major version is `0`, the public API should be considered unstable.
|
||||
The version will be incremented as follows:
|
||||
|
||||
- **`0.MINOR.PATCH`**: The **MINOR** version is incremented when we add
|
||||
new functionality or make breaking, incompatible API changes.
|
||||
- **`0.MINOR.PATCH`**: The **PATCH** version is incremented for
|
||||
backward-compatible bug fixes.
|
||||
|
||||
### Post-1.0.0 Versioning
|
||||
Once the project reaches a stable `1.0.0` release, the versioning will follow
|
||||
the more common convention:
|
||||
|
||||
- **`MAJOR.MINOR.PATCH`**: Incremented for incompatible API changes.
|
||||
- **`MAJOR.MINOR.PATCH`**: Incremented for new, backward-compatible functionality.
|
||||
- **`MAJOR.MINOR.PATCH`**: Incremented for backward-compatible bug fixes.
|
||||
- MAJOR version when we make incompatible API changes
|
||||
- MINOR version when we add functionality in a backward compatible manner
|
||||
- PATCH version when we make backward compatible bug fixes
|
||||
|
||||
The public API that this applies to is the CLI associated with Toolbox, the
|
||||
interactions with official SDKs, and the definitions in the `tools.yaml` file.
|
||||
@@ -804,7 +469,3 @@ to get started.
|
||||
Please note that this project is released with a Contributor Code of Conduct.
|
||||
By participating in this project you agree to abide by its terms. See
|
||||
[Contributor Code of Conduct](CODE_OF_CONDUCT.md) for more information.
|
||||
|
||||
## Community
|
||||
|
||||
Join our [discord community](https://discord.gg/GQrFB3Ec3W) to connect with our developers!
|
||||
|
||||
138
cmd/root.go
@@ -42,130 +42,49 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
|
||||
// Import tool packages for side effect of registration
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetcluster"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistclusters"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistinstances"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistusers"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydbainl"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryanalyzecontribution"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryconversationalanalytics"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryforecast"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygetdatasetinfo"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygettableinfo"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylistdatasetids"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylisttableids"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigtable"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouseexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselistdatabases"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhousesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreateusers"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistinstances"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/couchbase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexlookupentry"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchaspecttypes"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchentries"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dgraph"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firebird/firebirdexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firebird/firebirdsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreadddocuments"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoredeletedocuments"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetdocuments"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetrules"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorelistcollections"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequery"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequerycollection"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreupdatedocument"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/http"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardelement"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdashboards"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdimensions"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetexplores"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetfilters"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetlooks"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmeasures"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmodels"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetparameters"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakedashboard"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakelook"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquery"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquerysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerqueryurl"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrunlook"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbaggregate"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeletemany"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeleteone"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfind"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfindone"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertmany"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertone"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdatemany"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdateone"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jcypher"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jexecutecypher"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbaseexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbasesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/redis"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannersql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqliteexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinoexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinosql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/alloydbwaitforoperation"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/wait"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlitesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/valkey"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/yugabytedbsql"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/alloydbadmin"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/alloydbpg"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/bigtable"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/clickhouse"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudmonitoring"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmssql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/couchbase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/dataplex"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/dgraph"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/firebird"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/firestore"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/http"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/looker"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mongodb"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mssql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/neo4j"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/oceanbase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/postgres"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/redis"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/spanner"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/tidb"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/trino"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/valkey"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/yugabytedb"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -240,9 +159,6 @@ func NewCommand(opts ...Option) *Command {
|
||||
o(cmd)
|
||||
}
|
||||
|
||||
// Do not print Usage on runtime error
|
||||
cmd.SilenceUsage = true
|
||||
|
||||
// Set server version
|
||||
cmd.cfg.Version = versionString
|
||||
|
||||
@@ -266,13 +182,7 @@ func NewCommand(opts ...Option) *Command {
|
||||
flags.BoolVar(&cmd.cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.")
|
||||
flags.StringVar(&cmd.cfg.TelemetryOTLP, "telemetry-otlp", "", "Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318')")
|
||||
flags.StringVar(&cmd.cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
|
||||
|
||||
// Fetch prebuilt tools sources to customize the help description
|
||||
prebuiltHelp := fmt.Sprintf(
|
||||
"Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. Allowed: '%s'.",
|
||||
strings.Join(prebuiltconfigs.GetPrebuiltSources(), "', '"),
|
||||
)
|
||||
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", prebuiltHelp)
|
||||
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", "Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. Allowed: 'alloydb-postgres', 'bigquery', 'cloud-sql-mysql', 'cloud-sql-postgres', 'cloud-sql-mssql', 'postgres', 'spanner', 'spanner-postgres'.")
|
||||
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
|
||||
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
|
||||
flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
|
||||
@@ -292,40 +202,32 @@ type ToolsFile struct {
|
||||
}
|
||||
|
||||
// parseEnv replaces environment variables ${ENV_NAME} with their values.
|
||||
// also support ${ENV_NAME:default_value}.
|
||||
func parseEnv(input string) (string, error) {
|
||||
re := regexp.MustCompile(`\$\{(\w+)(:(\w*))?\}`)
|
||||
func parseEnv(input string) string {
|
||||
re := regexp.MustCompile(`\$\{(\w+)\}`)
|
||||
|
||||
var err error
|
||||
output := re.ReplaceAllStringFunc(input, func(match string) string {
|
||||
return re.ReplaceAllStringFunc(input, func(match string) string {
|
||||
parts := re.FindStringSubmatch(match)
|
||||
if len(parts) < 2 {
|
||||
// technically shouldn't happen
|
||||
return match
|
||||
}
|
||||
|
||||
// extract the variable name
|
||||
variableName := parts[1]
|
||||
if value, found := os.LookupEnv(variableName); found {
|
||||
return value
|
||||
}
|
||||
if parts[2] != "" {
|
||||
return parts[3]
|
||||
}
|
||||
err = fmt.Errorf("environment variable not found: %q", variableName)
|
||||
return ""
|
||||
return match
|
||||
})
|
||||
return output, err
|
||||
}
|
||||
|
||||
// parseToolsFile parses the provided yaml into appropriate configs.
|
||||
func parseToolsFile(ctx context.Context, raw []byte) (ToolsFile, error) {
|
||||
var toolsFile ToolsFile
|
||||
// Replace environment variables if found
|
||||
output, err := parseEnv(string(raw))
|
||||
if err != nil {
|
||||
return toolsFile, fmt.Errorf("error parsing environment variables: %s", err)
|
||||
}
|
||||
raw = []byte(output)
|
||||
|
||||
raw = []byte(parseEnv(string(raw)))
|
||||
// Parse contents
|
||||
err = yaml.UnmarshalContext(ctx, raw, &toolsFile, yaml.Strict())
|
||||
err := yaml.UnmarshalContext(ctx, raw, &toolsFile, yaml.Strict())
|
||||
if err != nil {
|
||||
return toolsFile, err
|
||||
}
|
||||
@@ -587,14 +489,13 @@ func watchChanges(ctx context.Context, watchDirs map[string]bool, watchedFiles m
|
||||
return
|
||||
}
|
||||
|
||||
// only check for events which indicate user saved a new tools file
|
||||
// multiple operations checked due to various file update methods across editors
|
||||
if !e.Has(fsnotify.Write | fsnotify.Create | fsnotify.Rename) {
|
||||
// only check for write events which indicate user saved a new tools file
|
||||
if !e.Has(fsnotify.Write) {
|
||||
continue
|
||||
}
|
||||
|
||||
cleanedFilename := filepath.Clean(e.Name)
|
||||
logger.DebugContext(ctx, fmt.Sprintf("%s event detected in %s", e.Op, cleanedFilename))
|
||||
logger.DebugContext(ctx, fmt.Sprintf("WRITE event detected in %s", cleanedFilename))
|
||||
|
||||
folderChanged := watchingFolder &&
|
||||
(strings.HasSuffix(cleanedFilename, ".yaml") || strings.HasSuffix(cleanedFilename, ".yml"))
|
||||
@@ -824,6 +725,11 @@ func run(cmd *Command) error {
|
||||
cmd.logger.WarnContext(ctx, "`authSources` is deprecated, use `authServices` instead")
|
||||
cmd.cfg.AuthServiceConfigs = authSourceConfigs
|
||||
}
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to parse tool file at %q: %w", cmd.tools_file, err)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
instrumentation, err := telemetry.CreateTelemetryInstrumentation(versionString)
|
||||
if err != nil {
|
||||
@@ -861,7 +767,7 @@ func run(cmd *Command) error {
|
||||
}
|
||||
cmd.logger.InfoContext(ctx, "Server ready to serve!")
|
||||
if cmd.cfg.UI {
|
||||
cmd.logger.InfoContext(ctx, fmt.Sprintf("Toolbox UI is up and running at: http://%s:%d/ui", cmd.cfg.Address, cmd.cfg.Port))
|
||||
cmd.logger.InfoContext(ctx, "Toolbox UI is up and running at: http://localhost:5000/ui")
|
||||
}
|
||||
|
||||
go func() {
|
||||
|
||||
295
cmd/root_test.go
@@ -206,72 +206,6 @@ func TestServerConfigFlags(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseEnv(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
env map[string]string
|
||||
in string
|
||||
want string
|
||||
err bool
|
||||
errString string
|
||||
}{
|
||||
{
|
||||
desc: "without default without env",
|
||||
in: "${FOO}",
|
||||
want: "",
|
||||
err: true,
|
||||
errString: `environment variable not found: "FOO"`,
|
||||
},
|
||||
{
|
||||
desc: "without default with env",
|
||||
env: map[string]string{
|
||||
"FOO": "bar",
|
||||
},
|
||||
in: "${FOO}",
|
||||
want: "bar",
|
||||
},
|
||||
{
|
||||
desc: "with empty default",
|
||||
in: "${FOO:}",
|
||||
want: "",
|
||||
},
|
||||
{
|
||||
desc: "with default",
|
||||
in: "${FOO:bar}",
|
||||
want: "bar",
|
||||
},
|
||||
{
|
||||
desc: "with default with env",
|
||||
env: map[string]string{
|
||||
"FOO": "hello",
|
||||
},
|
||||
in: "${FOO:bar}",
|
||||
want: "hello",
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
if tc.env != nil {
|
||||
for k, v := range tc.env {
|
||||
t.Setenv(k, v)
|
||||
}
|
||||
}
|
||||
got, err := parseEnv(tc.in)
|
||||
if tc.err {
|
||||
if err == nil {
|
||||
t.Fatalf("expected error not found")
|
||||
}
|
||||
if tc.errString != err.Error() {
|
||||
t.Fatalf("incorrect error string: got %s, want %s", err, tc.errString)
|
||||
}
|
||||
}
|
||||
if tc.want != got {
|
||||
t.Fatalf("unexpected want: got %s, want %s", got, tc.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestToolFileFlag(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
@@ -886,14 +820,13 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
||||
|
||||
func TestEnvVarReplacement(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
t.Setenv("TestHeader", "ACTUAL_HEADER")
|
||||
t.Setenv("API_KEY", "ACTUAL_API_KEY")
|
||||
t.Setenv("clientId", "ACTUAL_CLIENT_ID")
|
||||
t.Setenv("clientId2", "ACTUAL_CLIENT_ID_2")
|
||||
t.Setenv("toolset_name", "ACTUAL_TOOLSET_NAME")
|
||||
t.Setenv("cat_string", "cat")
|
||||
t.Setenv("food_string", "food")
|
||||
t.Setenv("TestHeader", "ACTUAL_HEADER")
|
||||
os.Setenv("TestHeader", "ACTUAL_HEADER")
|
||||
os.Setenv("API_KEY", "ACTUAL_API_KEY")
|
||||
os.Setenv("clientId", "ACTUAL_CLIENT_ID")
|
||||
os.Setenv("clientId2", "ACTUAL_CLIENT_ID_2")
|
||||
os.Setenv("toolset_name", "ACTUAL_TOOLSET_NAME")
|
||||
os.Setenv("cat_string", "cat")
|
||||
os.Setenv("food_string", "food")
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
@@ -1219,8 +1152,7 @@ func TestSingleEdit(t *testing.T) {
|
||||
t.Fatalf("error writing to file: %v", err)
|
||||
}
|
||||
|
||||
// only check substring of DEBUG message due to some OS/editors firing different operations
|
||||
detectedFileChange := regexp.MustCompile(fmt.Sprintf(`event detected in %s"`, regexEscapedPathFile))
|
||||
detectedFileChange := regexp.MustCompile(fmt.Sprintf(`DEBUG "WRITE event detected in %s"`, regexEscapedPathFile))
|
||||
_, err = testutils.WaitForString(ctx, detectedFileChange, pr)
|
||||
if err != nil {
|
||||
t.Fatalf("timeout or error waiting for file to detect write: %s", err)
|
||||
@@ -1228,105 +1160,14 @@ func TestSingleEdit(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestPrebuiltTools(t *testing.T) {
|
||||
// Get prebuilt configs
|
||||
alloydb_admin_config, _ := prebuiltconfigs.Get("alloydb-postgres-admin")
|
||||
alloydb_config, _ := prebuiltconfigs.Get("alloydb-postgres")
|
||||
bigquery_config, _ := prebuiltconfigs.Get("bigquery")
|
||||
clickhouse_config, _ := prebuiltconfigs.Get("clickhouse")
|
||||
cloudsqlpg_config, _ := prebuiltconfigs.Get("cloud-sql-postgres")
|
||||
cloudsqlmysql_config, _ := prebuiltconfigs.Get("cloud-sql-mysql")
|
||||
cloudsqlmssql_config, _ := prebuiltconfigs.Get("cloud-sql-mssql")
|
||||
dataplex_config, _ := prebuiltconfigs.Get("dataplex")
|
||||
firestoreconfig, _ := prebuiltconfigs.Get("firestore")
|
||||
mysql_config, _ := prebuiltconfigs.Get("mysql")
|
||||
mssql_config, _ := prebuiltconfigs.Get("mssql")
|
||||
looker_config, _ := prebuiltconfigs.Get("looker")
|
||||
postgresconfig, _ := prebuiltconfigs.Get("postgres")
|
||||
spanner_config, _ := prebuiltconfigs.Get("spanner")
|
||||
spannerpg_config, _ := prebuiltconfigs.Get("spanner-postgres")
|
||||
sqlite_config, _ := prebuiltconfigs.Get("sqlite")
|
||||
neo4jconfig, _ := prebuiltconfigs.Get("neo4j")
|
||||
|
||||
// Set environment variables
|
||||
t.Setenv("API_KEY", "your_api_key")
|
||||
|
||||
t.Setenv("BIGQUERY_PROJECT", "your_gcp_project_id")
|
||||
t.Setenv("DATAPLEX_PROJECT", "your_gcp_project_id")
|
||||
t.Setenv("FIRESTORE_PROJECT", "your_gcp_project_id")
|
||||
t.Setenv("FIRESTORE_DATABASE", "your_firestore_db_name")
|
||||
|
||||
t.Setenv("SPANNER_PROJECT", "your_gcp_project_id")
|
||||
t.Setenv("SPANNER_INSTANCE", "your_spanner_instance")
|
||||
t.Setenv("SPANNER_DATABASE", "your_spanner_db")
|
||||
|
||||
t.Setenv("ALLOYDB_POSTGRES_PROJECT", "your_gcp_project_id")
|
||||
t.Setenv("ALLOYDB_POSTGRES_REGION", "your_gcp_region")
|
||||
t.Setenv("ALLOYDB_POSTGRES_CLUSTER", "your_alloydb_cluster")
|
||||
t.Setenv("ALLOYDB_POSTGRES_INSTANCE", "your_alloydb_instance")
|
||||
t.Setenv("ALLOYDB_POSTGRES_DATABASE", "your_alloydb_db")
|
||||
t.Setenv("ALLOYDB_POSTGRES_USER", "your_alloydb_user")
|
||||
t.Setenv("ALLOYDB_POSTGRES_PASSWORD", "your_alloydb_password")
|
||||
|
||||
t.Setenv("CLICKHOUSE_PROTOCOL", "your_clickhouse_protocol")
|
||||
t.Setenv("CLICKHOUSE_DATABASE", "your_clickhouse_database")
|
||||
t.Setenv("CLICKHOUSE_PASSWORD", "your_clickhouse_password")
|
||||
t.Setenv("CLICKHOUSE_USER", "your_clickhouse_user")
|
||||
t.Setenv("CLICKHOUSE_HOST", "your_clickhosue_host")
|
||||
t.Setenv("CLICKHOUSE_PORT", "8123")
|
||||
|
||||
t.Setenv("CLOUD_SQL_POSTGRES_PROJECT", "your_pg_project")
|
||||
t.Setenv("CLOUD_SQL_POSTGRES_INSTANCE", "your_pg_instance")
|
||||
t.Setenv("CLOUD_SQL_POSTGRES_DATABASE", "your_pg_db")
|
||||
t.Setenv("CLOUD_SQL_POSTGRES_REGION", "your_pg_region")
|
||||
t.Setenv("CLOUD_SQL_POSTGRES_USER", "your_pg_user")
|
||||
t.Setenv("CLOUD_SQL_POSTGRES_PASS", "your_pg_pass")
|
||||
|
||||
t.Setenv("CLOUD_SQL_MYSQL_PROJECT", "your_gcp_project_id")
|
||||
t.Setenv("CLOUD_SQL_MYSQL_REGION", "your_gcp_region")
|
||||
t.Setenv("CLOUD_SQL_MYSQL_INSTANCE", "your_instance")
|
||||
t.Setenv("CLOUD_SQL_MYSQL_DATABASE", "your_cloudsql_mysql_db")
|
||||
t.Setenv("CLOUD_SQL_MYSQL_USER", "your_cloudsql_mysql_user")
|
||||
t.Setenv("CLOUD_SQL_MYSQL_PASSWORD", "your_cloudsql_mysql_password")
|
||||
|
||||
t.Setenv("CLOUD_SQL_MSSQL_PROJECT", "your_gcp_project_id")
|
||||
t.Setenv("CLOUD_SQL_MSSQL_REGION", "your_gcp_region")
|
||||
t.Setenv("CLOUD_SQL_MSSQL_INSTANCE", "your_cloudsql_mssql_instance")
|
||||
t.Setenv("CLOUD_SQL_MSSQL_DATABASE", "your_cloudsql_mssql_db")
|
||||
t.Setenv("CLOUD_SQL_MSSQL_IP_ADDRESS", "127.0.0.1")
|
||||
t.Setenv("CLOUD_SQL_MSSQL_USER", "your_cloudsql_mssql_user")
|
||||
t.Setenv("CLOUD_SQL_MSSQL_PASSWORD", "your_cloudsql_mssql_password")
|
||||
t.Setenv("CLOUD_SQL_POSTGRES_PASSWORD", "your_cloudsql_pg_password")
|
||||
|
||||
t.Setenv("POSTGRES_HOST", "localhost")
|
||||
t.Setenv("POSTGRES_PORT", "5432")
|
||||
t.Setenv("POSTGRES_DATABASE", "your_postgres_db")
|
||||
t.Setenv("POSTGRES_USER", "your_postgres_user")
|
||||
t.Setenv("POSTGRES_PASSWORD", "your_postgres_password")
|
||||
|
||||
t.Setenv("MYSQL_HOST", "localhost")
|
||||
t.Setenv("MYSQL_PORT", "3306")
|
||||
t.Setenv("MYSQL_DATABASE", "your_mysql_db")
|
||||
t.Setenv("MYSQL_USER", "your_mysql_user")
|
||||
t.Setenv("MYSQL_PASSWORD", "your_mysql_password")
|
||||
|
||||
t.Setenv("MSSQL_HOST", "localhost")
|
||||
t.Setenv("MSSQL_PORT", "1433")
|
||||
t.Setenv("MSSQL_DATABASE", "your_mssql_db")
|
||||
t.Setenv("MSSQL_USER", "your_mssql_user")
|
||||
t.Setenv("MSSQL_PASSWORD", "your_mssql_password")
|
||||
|
||||
t.Setenv("LOOKER_BASE_URL", "https://your_company.looker.com")
|
||||
t.Setenv("LOOKER_CLIENT_ID", "your_looker_client_id")
|
||||
t.Setenv("LOOKER_CLIENT_SECRET", "your_looker_client_secret")
|
||||
t.Setenv("LOOKER_VERIFY_SSL", "true")
|
||||
|
||||
t.Setenv("SQLITE_DATABASE", "test.db")
|
||||
|
||||
t.Setenv("NEO4J_URI", "bolt://localhost:7687")
|
||||
t.Setenv("NEO4J_DATABASE", "neo4j")
|
||||
t.Setenv("NEO4J_USERNAME", "your_neo4j_user")
|
||||
t.Setenv("NEO4J_PASSWORD", "your_neo4j_password")
|
||||
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
@@ -1336,22 +1177,12 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
in []byte
|
||||
wantToolset server.ToolsetConfigs
|
||||
}{
|
||||
{
|
||||
name: "alloydb postgres admin prebuilt tools",
|
||||
in: alloydb_admin_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"alloydb_postgres_admin_tools": tools.ToolsetConfig{
|
||||
Name: "alloydb_postgres_admin_tools",
|
||||
ToolNames: []string{"create_cluster", "wait_for_operation", "create_instance", "list_clusters", "list_instances", "list_users", "create_user", "get_cluster"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "alloydb prebuilt tools",
|
||||
in: alloydb_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"alloydb_postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "alloydb_postgres_database_tools",
|
||||
"alloydb-postgres-database-tools": tools.ToolsetConfig{
|
||||
Name: "alloydb-postgres-database-tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
@@ -1360,19 +1191,9 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
name: "bigquery prebuilt tools",
|
||||
in: bigquery_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"bigquery_database_tools": tools.ToolsetConfig{
|
||||
Name: "bigquery_database_tools",
|
||||
ToolNames: []string{"analyze_contribution", "ask_data_insights", "execute_sql", "forecast", "get_dataset_info", "get_table_info", "list_dataset_ids", "list_table_ids"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "clickhouse prebuilt tools",
|
||||
in: clickhouse_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"clickhouse_database_tools": tools.ToolsetConfig{
|
||||
Name: "clickhouse_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_databases"},
|
||||
"bigquery-database-tools": tools.ToolsetConfig{
|
||||
Name: "bigquery-database-tools",
|
||||
ToolNames: []string{"execute_sql", "get_dataset_info", "get_table_info", "list_dataset_ids", "list_table_ids"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1380,8 +1201,8 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
name: "cloudsqlpg prebuilt tools",
|
||||
in: cloudsqlpg_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_postgres_database_tools",
|
||||
"cloud-sql-postgres-database-tools": tools.ToolsetConfig{
|
||||
Name: "cloud-sql-postgres-database-tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
@@ -1390,8 +1211,8 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
name: "cloudsqlmysql prebuilt tools",
|
||||
in: cloudsqlmysql_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_mysql_database_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_mysql_database_tools",
|
||||
"cloud-sql-mysql-database-tools": tools.ToolsetConfig{
|
||||
Name: "cloud-sql-mysql-database-tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
@@ -1400,68 +1221,18 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
name: "cloudsqlmssql prebuilt tools",
|
||||
in: cloudsqlmssql_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_mssql_database_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_mssql_database_tools",
|
||||
"cloud-sql-mssql-database-tools": tools.ToolsetConfig{
|
||||
Name: "cloud-sql-mssql-database-tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "dataplex prebuilt tools",
|
||||
in: dataplex_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"dataplex_tools": tools.ToolsetConfig{
|
||||
Name: "dataplex_tools",
|
||||
ToolNames: []string{"search_entries", "lookup_entry", "search_aspect_types"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "firestore prebuilt tools",
|
||||
in: firestoreconfig,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"firestore_database_tools": tools.ToolsetConfig{
|
||||
Name: "firestore_database_tools",
|
||||
ToolNames: []string{"get_documents", "add_documents", "update_document", "list_collections", "delete_documents", "query_collection", "get_rules", "validate_rules"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "mysql prebuilt tools",
|
||||
in: mysql_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"mysql_database_tools": tools.ToolsetConfig{
|
||||
Name: "mysql_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "mssql prebuilt tools",
|
||||
in: mssql_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"mssql_database_tools": tools.ToolsetConfig{
|
||||
Name: "mssql_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "looker prebuilt tools",
|
||||
in: looker_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"looker_tools": tools.ToolsetConfig{
|
||||
Name: "looker_tools",
|
||||
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look", "make_look", "get_dashboards", "make_dashboard", "add_dashboard_element"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "postgres prebuilt tools",
|
||||
in: postgresconfig,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "postgres_database_tools",
|
||||
"postgres-database-tools": tools.ToolsetConfig{
|
||||
Name: "postgres-database-tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
@@ -1480,32 +1251,12 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
name: "spanner pg prebuilt tools",
|
||||
in: spannerpg_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"spanner_postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "spanner_postgres_database_tools",
|
||||
"spanner-postgres-database-tools": tools.ToolsetConfig{
|
||||
Name: "spanner-postgres-database-tools",
|
||||
ToolNames: []string{"execute_sql", "execute_sql_dql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "sqlite prebuilt tools",
|
||||
in: sqlite_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"sqlite_database_tools": tools.ToolsetConfig{
|
||||
Name: "sqlite_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "neo4j prebuilt tools",
|
||||
in: neo4jconfig,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"neo4j_database_tools": tools.ToolsetConfig{
|
||||
Name: "neo4j_database_tools",
|
||||
ToolNames: []string{"execute_cypher", "get_schema"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
|
||||
@@ -1 +1 @@
|
||||
0.14.0
|
||||
0.8.0
|
||||
|
||||
@@ -4,12 +4,12 @@ type: docs
|
||||
notoc: false
|
||||
weight: 1
|
||||
description: >
|
||||
All of Toolbox's documentation.
|
||||
All of Toolbox's documentation.
|
||||
---
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="getting-started/introduction/"/>
|
||||
<meta http-equiv="refresh" content="0;url=getting-started/introduction/"/>
|
||||
<meta http-equiv="refresh" content="0;url=getting-started/introduction"/>
|
||||
</head>
|
||||
</html>
|
||||
|
||||
@@ -7,11 +7,11 @@ description: Frequently asked questions about Toolbox.
|
||||
|
||||
## How can I deploy or run Toolbox?
|
||||
|
||||
MCP Toolbox for Databases is open-source and can be run or deployed to a
|
||||
MCP Toolbox for Databases is open-source and can be ran or deployed to a
|
||||
multitude of environments. For convenience, we release [compiled binaries and
|
||||
docker images][release-notes] (but you can always compile yourself as well!).
|
||||
|
||||
For detailed instructions, check out these resources:
|
||||
For detailed instructions, check our these resources:
|
||||
|
||||
- [Quickstart: How to Run Locally](../getting-started/local_quickstart.md)
|
||||
- [Deploy to Cloud Run](../how-to/deploy_toolbox.md)
|
||||
|
||||
@@ -3,7 +3,7 @@ title: "Telemetry"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
An overview of telemetry and observability in Toolbox.
|
||||
An overview of telemetry and observability in Toolbox.
|
||||
---
|
||||
|
||||
## About
|
||||
@@ -158,7 +158,7 @@ enabled:
|
||||
|
||||
- [Cloud Logging API](https://cloud.google.com/logging/docs/api/enable-api)
|
||||
- [Cloud Monitoring API](https://cloud.google.com/monitoring/api/enable-api)
|
||||
- [Cloud Trace API](https://console.cloud.google.com/apis/enableflow?apiid=cloudtrace.googleapis.com)
|
||||
- [Cloud Trace API](https://cloud.google.com/apis/enableflow?apiid=cloudtrace.googleapis.com)
|
||||
{{< /notice >}}
|
||||
|
||||
#### OTLP Exporter
|
||||
@@ -177,7 +177,7 @@ It receives telemetry data, transforms it, and then exports data to backends
|
||||
that can store it permanently. Toolbox provide an option to export telemetry
|
||||
data to user's choice of backend(s) that are compatible with the Open Telemetry
|
||||
Protocol (OTLP). If you would like to use a collector, please refer to this
|
||||
[Export Telemetry using the Otel Collector](../../how-to/export_telemetry.md).
|
||||
[Export Telemetry using the Otel Collector](../how-to/export_telemetry.md).
|
||||
|
||||
### Flags
|
||||
|
||||
|
||||
@@ -190,18 +190,6 @@
|
||||
"!sudo lsof -i :5432"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Optional: Enable Vertex AI API for Google Cloud\n",
|
||||
"\n",
|
||||
"If you're using a model hosted on **Vertex AI**, run the following command to enable the API:\n",
|
||||
"\n",
|
||||
"```bash\n",
|
||||
"!gcloud services enable aiplatform.googleapis.com\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
@@ -234,7 +222,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.14.0\" # x-release-please-version\n",
|
||||
"version = \"0.8.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
title: "Configuration"
|
||||
type: docs
|
||||
weight: 6
|
||||
weight: 4
|
||||
description: >
|
||||
How to configure Toolbox's tools.yaml file.
|
||||
---
|
||||
@@ -22,11 +22,6 @@ etc., you could use environment variables instead with the format `${ENV_NAME}`.
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
```
|
||||
A default value can be specified like `${ENV_NAME:default}`.
|
||||
|
||||
```yaml
|
||||
port: ${DB_PORT:3306}
|
||||
```
|
||||
|
||||
### Sources
|
||||
|
||||
@@ -50,7 +45,7 @@ For more details on configuring different types of sources, see the
|
||||
|
||||
### Tools
|
||||
|
||||
The `tools` section of your `tools.yaml` defines the actions your agent can
|
||||
The `tools` section of your `tools.yaml` define your the actions your agent can
|
||||
take: what kind of tool it is, which source(s) it affects, what parameters it
|
||||
uses, etc.
|
||||
|
||||
@@ -81,7 +76,7 @@ toolsets:
|
||||
my_first_toolset:
|
||||
- my_first_tool
|
||||
- my_second_tool
|
||||
my_second_toolset:
|
||||
my_second_toolset:
|
||||
- my_second_tool
|
||||
- my_third_tool
|
||||
```
|
||||
|
||||
@@ -86,7 +86,7 @@ To install Toolbox as a binary:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.14.0
|
||||
export VERSION=0.8.0
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -97,17 +97,10 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.14.0
|
||||
export VERSION=0.8.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Homebrew" lang="en" %}}
|
||||
To install Toolbox using Homebrew on macOS or Linux:
|
||||
|
||||
```sh
|
||||
brew install mcp-toolbox
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Compile from source" lang="en" %}}
|
||||
|
||||
@@ -115,7 +108,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.14.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.8.0
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
@@ -130,34 +123,15 @@ execute `toolbox` to start the server:
|
||||
```sh
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the
|
||||
`--disable-reload` flag.
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
|
||||
#### Launching Toolbox UI
|
||||
|
||||
To launch Toolbox's interactive UI, use the `--ui` flag. This allows you to test tools and toolsets
|
||||
with features such as authorized parameters. To learn more, visit [Toolbox UI](../../how-to/toolbox-ui/index.md).
|
||||
|
||||
```sh
|
||||
./toolbox --ui
|
||||
```
|
||||
|
||||
#### Homebrew Users
|
||||
|
||||
If you installed Toolbox using Homebrew, the `toolbox` binary is available in your system path. You can start the server with the same command:
|
||||
|
||||
```sh
|
||||
toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
You can use `toolbox help` for a full list of flags! To stop the server, send a
|
||||
terminate signal (`ctrl+c` on most platforms).
|
||||
|
||||
For more detailed documentation on deploying to different environments, check
|
||||
out the resources in the [How-to section](../../how-to/)
|
||||
out the resources in the [How-to section](../../how-to/_index.md)
|
||||
|
||||
### Integrating your application
|
||||
|
||||
@@ -165,7 +139,6 @@ Once your server is up and running, you can load the tools into your
|
||||
application. See below the list of Client SDKs for using various frameworks:
|
||||
|
||||
#### Python
|
||||
|
||||
{{< tabpane text=true persist=header >}}
|
||||
{{% tab header="Core" lang="en" %}}
|
||||
|
||||
@@ -178,7 +151,7 @@ from toolbox_core import ToolboxClient
|
||||
|
||||
# update the url to point to your server
|
||||
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
async with ToolboxClient("<http://127.0.0.1:5000>") as client:
|
||||
|
||||
# these tools can be passed to your application!
|
||||
tools = await client.load_toolset("toolset_name")
|
||||
@@ -199,7 +172,7 @@ from toolbox_langchain import ToolboxClient
|
||||
|
||||
# update the url to point to your server
|
||||
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
async with ToolboxClient("<http://127.0.0.1:5000>") as client:
|
||||
|
||||
# these tools can be passed to your application!
|
||||
tools = client.load_toolset()
|
||||
@@ -220,7 +193,7 @@ from toolbox_llamaindex import ToolboxClient
|
||||
|
||||
# update the url to point to your server
|
||||
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
async with ToolboxClient("<http://127.0.0.1:5000>") as client:
|
||||
|
||||
# these tools can be passed to your application
|
||||
|
||||
@@ -330,7 +303,7 @@ const toolboxTools = await client.loadToolset('toolsetName');
|
||||
const getTool = (toolboxTool) => tool({
|
||||
name: toolboxTool.getName(),
|
||||
description: toolboxTool.getDescription(),
|
||||
parameters: toolboxTool.getParamSchema(),
|
||||
parameters: toolboxTool.getParams(),
|
||||
execute: toolboxTool
|
||||
});;
|
||||
|
||||
@@ -343,255 +316,4 @@ const tools = toolboxTools.map(getTool);
|
||||
{{< /tabpane >}}
|
||||
|
||||
For more detailed instructions on using the Toolbox Core SDK, see the
|
||||
[project's README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
|
||||
|
||||
#### Go
|
||||
|
||||
Once you've installed the [Toolbox Go
|
||||
SDK](https://pkg.go.dev/github.com/googleapis/mcp-toolbox-sdk-go/core), you can load
|
||||
tools:
|
||||
|
||||
{{< tabpane text=true persist=header >}}
|
||||
{{% tab header="Core" lang="en" %}}
|
||||
|
||||
{{< highlight go >}}
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000"
|
||||
ctx := context.Background()
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Framework agnostic tools
|
||||
tools, err := client.LoadToolset("toolsetName", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v", err)
|
||||
}
|
||||
}
|
||||
{{< /highlight >}}
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="LangChain Go" lang="en" %}}
|
||||
|
||||
{{< highlight go >}}
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"log"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"github.com/tmc/langchaingo/llms"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Make sure to add the error checks
|
||||
// update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000"
|
||||
ctx := context.Background()
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Framework agnostic tool
|
||||
tool, err := client.LoadTool("toolName", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v", err)
|
||||
}
|
||||
|
||||
// Fetch the tool's input schema
|
||||
inputschema, err := tool.InputSchema()
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to fetch inputSchema: %v", err)
|
||||
}
|
||||
|
||||
var paramsSchema map[string]any
|
||||
_ = json.Unmarshal(inputschema, ¶msSchema)
|
||||
|
||||
// Use this tool with LangChainGo
|
||||
langChainTool := llms.Tool{
|
||||
Type: "function",
|
||||
Function: &llms.FunctionDefinition{
|
||||
Name: tool.Name(),
|
||||
Description: tool.Description(),
|
||||
Parameters: paramsSchema,
|
||||
},
|
||||
}
|
||||
}
|
||||
{{< /highlight >}}
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Genkit Go" lang="en" %}}
|
||||
|
||||
{{< highlight go >}}
|
||||
package main
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"log"
|
||||
|
||||
"github.com/firebase/genkit/go/ai"
|
||||
"github.com/firebase/genkit/go/genkit"
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/tbgenkit"
|
||||
"github.com/invopop/jsonschema"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Make sure to add the error checks
|
||||
// Update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000"
|
||||
ctx := context.Background()
|
||||
g, err := genkit.Init(ctx)
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Framework agnostic tool
|
||||
tool, err := client.LoadTool("toolName", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v", err)
|
||||
}
|
||||
|
||||
// Convert the tool using the tbgenkit package
|
||||
// Use this tool with Genkit Go
|
||||
genkitTool, err := tbgenkit.ToGenkitTool(tool, g)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to convert tool: %v\n", err)
|
||||
}
|
||||
}
|
||||
{{< /highlight >}}
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Go GenAI" lang="en" %}}
|
||||
|
||||
{{< highlight go >}}
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"log"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"google.golang.org/genai"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Make sure to add the error checks
|
||||
// Update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000"
|
||||
ctx := context.Background()
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Framework agnostic tool
|
||||
tool, err := client.LoadTool("toolName", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v", err)
|
||||
}
|
||||
|
||||
// Fetch the tool's input schema
|
||||
inputschema, err := tool.InputSchema()
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to fetch inputSchema: %v", err)
|
||||
}
|
||||
|
||||
var schema *genai.Schema
|
||||
_ = json.Unmarshal(inputschema, &schema)
|
||||
|
||||
funcDeclaration := &genai.FunctionDeclaration{
|
||||
Name: tool.Name(),
|
||||
Description: tool.Description(),
|
||||
Parameters: schema,
|
||||
}
|
||||
|
||||
// Use this tool with Go GenAI
|
||||
genAITool := &genai.Tool{
|
||||
FunctionDeclarations: []*genai.FunctionDeclaration{funcDeclaration},
|
||||
}
|
||||
}
|
||||
{{< /highlight >}}
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="OpenAI Go" lang="en" %}}
|
||||
|
||||
{{< highlight go >}}
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"log"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
openai "github.com/openai/openai-go"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Make sure to add the error checks
|
||||
// Update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000"
|
||||
ctx := context.Background()
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Framework agnostic tool
|
||||
tool, err := client.LoadTool("toolName", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v", err)
|
||||
}
|
||||
|
||||
// Fetch the tool's input schema
|
||||
inputschema, err := tool.InputSchema()
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to fetch inputSchema: %v", err)
|
||||
}
|
||||
|
||||
var paramsSchema openai.FunctionParameters
|
||||
_ = json.Unmarshal(inputschema, ¶msSchema)
|
||||
|
||||
// Use this tool with OpenAI Go
|
||||
openAITool := openai.ChatCompletionToolParam{
|
||||
Function: openai.FunctionDefinitionParam{
|
||||
Name: tool.Name(),
|
||||
Description: openai.String(tool.Description()),
|
||||
Parameters: paramsSchema,
|
||||
},
|
||||
}
|
||||
}
|
||||
{{< /highlight >}}
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
For more detailed instructions on using the Toolbox Go SDK, see the
|
||||
[project's README](https://github.com/googleapis/mcp-toolbox-sdk-go/blob/main/core/README.md).
|
||||
|
||||
For end-to-end samples on using the Toolbox Go SDK with orchestration
|
||||
frameworks, see the [project's
|
||||
samples](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main/core/samples)
|
||||
[project's README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
|
||||
|
Before Width: | Height: | Size: 76 KiB After Width: | Height: | Size: 154 KiB |
@@ -1,9 +1,9 @@
|
||||
---
|
||||
title: "Python Quickstart (Local)"
|
||||
title: "Quickstart (Local)"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
How to get started running Toolbox locally with [Python](https://github.com/googleapis/mcp-toolbox-sdk-python), PostgreSQL, and [Agent Development Kit](https://google.github.io/adk-docs/),
|
||||
How to get started running Toolbox locally with Python, PostgreSQL, and [Agent Development Kit](https://google.github.io/adk-docs/),
|
||||
[LangGraph](https://www.langchain.com/langgraph), [LlamaIndex](https://www.llamaindex.ai/) or [GoogleGenAI](https://pypi.org/project/google-genai/).
|
||||
---
|
||||
|
||||
@@ -14,22 +14,252 @@ description: >
|
||||
This guide assumes you have already done the following:
|
||||
|
||||
1. Installed [Python 3.9+][install-python] (including [pip][install-pip] and
|
||||
your preferred virtual environment tool for managing dependencies e.g. [venv][install-venv]).
|
||||
1. Installed [PostgreSQL 16+ and the `psql` client][install-postgres].
|
||||
your preferred virtual environment tool for managing dependencies e.g. [venv][install-venv])
|
||||
1. Installed [PostgreSQL 16+ and the `psql` client][install-postgres]
|
||||
|
||||
[install-python]: https://wiki.python.org/moin/BeginnersGuide/Download
|
||||
[install-pip]: https://pip.pypa.io/en/stable/installation/
|
||||
[install-venv]: https://packaging.python.org/en/latest/tutorials/installing-packages/#creating-virtual-environments
|
||||
[install-postgres]: https://www.postgresql.org/download/
|
||||
|
||||
### Cloud Setup (Optional)
|
||||
{{< regionInclude "quickstart/shared/cloud_setup.md" "cloud_setup" >}}
|
||||
|
||||
## Step 1: Set up your database
|
||||
{{< regionInclude "quickstart/shared/database_setup.md" "database_setup" >}}
|
||||
|
||||
In this section, we will create a database, insert some data that needs to be
|
||||
accessed by our agent, and create a database user for Toolbox to connect with.
|
||||
|
||||
1. Connect to postgres using the `psql` command:
|
||||
|
||||
```bash
|
||||
psql -h 127.0.0.1 -U postgres
|
||||
```
|
||||
|
||||
Here, `postgres` denotes the default postgres superuser.
|
||||
|
||||
{{< notice info >}}
|
||||
|
||||
#### **Having trouble connecting?**
|
||||
|
||||
* **Password Prompt:** If you are prompted for a password for the `postgres`
|
||||
user and do not know it (or a blank password doesn't work), your PostgreSQL
|
||||
installation might require a password or a different authentication method.
|
||||
* **`FATAL: role "postgres" does not exist`:** This error means the default
|
||||
`postgres` superuser role isn't available under that name on your system.
|
||||
* **`Connection refused`:** Ensure your PostgreSQL server is actually running.
|
||||
You can typically check with `sudo systemctl status postgresql` and start it
|
||||
with `sudo systemctl start postgresql` on Linux systems.
|
||||
|
||||
<br/>
|
||||
|
||||
#### **Common Solution**
|
||||
|
||||
For password issues or if the `postgres` role seems inaccessible directly, try
|
||||
switching to the `postgres` operating system user first. This user often has
|
||||
permission to connect without a password for local connections (this is called
|
||||
peer authentication).
|
||||
|
||||
```bash
|
||||
sudo -i -u postgres
|
||||
psql -h 127.0.0.1
|
||||
```
|
||||
|
||||
Once you are in the `psql` shell using this method, you can proceed with the
|
||||
database creation steps below. Afterwards, type `\q` to exit `psql`, and then
|
||||
`exit` to return to your normal user shell.
|
||||
|
||||
If desired, once connected to `psql` as the `postgres` OS user, you can set a
|
||||
password for the `postgres` *database* user using: `ALTER USER postgres WITH
|
||||
PASSWORD 'your_chosen_password';`. This would allow direct connection with `-U
|
||||
postgres` and a password next time.
|
||||
{{< /notice >}}
|
||||
|
||||
1. Create a new database and a new user:
|
||||
|
||||
{{< notice tip >}}
|
||||
For a real application, it's best to follow the principle of least permission
|
||||
and only grant the privileges your application needs.
|
||||
{{< /notice >}}
|
||||
|
||||
```sql
|
||||
CREATE USER toolbox_user WITH PASSWORD 'my-password';
|
||||
|
||||
CREATE DATABASE toolbox_db;
|
||||
GRANT ALL PRIVILEGES ON DATABASE toolbox_db TO toolbox_user;
|
||||
|
||||
ALTER DATABASE toolbox_db OWNER TO toolbox_user;
|
||||
```
|
||||
|
||||
1. End the database session:
|
||||
|
||||
```bash
|
||||
\q
|
||||
```
|
||||
|
||||
(If you used `sudo -i -u postgres` and then `psql`, remember you might also
|
||||
need to type `exit` after `\q` to leave the `postgres` user's shell
|
||||
session.)
|
||||
|
||||
1. Connect to your database with your new user:
|
||||
|
||||
```bash
|
||||
psql -h 127.0.0.1 -U toolbox_user -d toolbox_db
|
||||
```
|
||||
|
||||
1. Create a table using the following command:
|
||||
|
||||
```sql
|
||||
CREATE TABLE hotels(
|
||||
id INTEGER NOT NULL PRIMARY KEY,
|
||||
name VARCHAR NOT NULL,
|
||||
location VARCHAR NOT NULL,
|
||||
price_tier VARCHAR NOT NULL,
|
||||
checkin_date DATE NOT NULL,
|
||||
checkout_date DATE NOT NULL,
|
||||
booked BIT NOT NULL
|
||||
);
|
||||
```
|
||||
|
||||
1. Insert data into the table.
|
||||
|
||||
```sql
|
||||
INSERT INTO hotels(id, name, location, price_tier, checkin_date, checkout_date, booked)
|
||||
VALUES
|
||||
(1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-22', '2024-04-20', B'0'),
|
||||
(2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', B'0'),
|
||||
(3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', B'0'),
|
||||
(4, 'Radisson Blu Lucerne', 'Lucerne', 'Midscale', '2024-04-24', '2024-04-05', B'0'),
|
||||
(5, 'Best Western Bern', 'Bern', 'Upper Midscale', '2024-04-23', '2024-04-01', B'0'),
|
||||
(6, 'InterContinental Geneva', 'Geneva', 'Luxury', '2024-04-23', '2024-04-28', B'0'),
|
||||
(7, 'Sheraton Zurich', 'Zurich', 'Upper Upscale', '2024-04-27', '2024-04-02', B'0'),
|
||||
(8, 'Holiday Inn Basel', 'Basel', 'Upper Midscale', '2024-04-24', '2024-04-09', B'0'),
|
||||
(9, 'Courtyard Zurich', 'Zurich', 'Upscale', '2024-04-03', '2024-04-13', B'0'),
|
||||
(10, 'Comfort Inn Bern', 'Bern', 'Midscale', '2024-04-04', '2024-04-16', B'0');
|
||||
```
|
||||
|
||||
1. End the database session:
|
||||
|
||||
```bash
|
||||
\q
|
||||
```
|
||||
|
||||
## Step 2: Install and configure Toolbox
|
||||
{{< regionInclude "quickstart/shared/configure_toolbox.md" "configure_toolbox" >}}
|
||||
|
||||
In this section, we will download Toolbox, configure our tools in a
|
||||
`tools.yaml`, and then run the Toolbox server.
|
||||
|
||||
1. Download the latest version of Toolbox as a binary:
|
||||
|
||||
{{< notice tip >}}
|
||||
Select the
|
||||
[correct binary](https://github.com/googleapis/genai-toolbox/releases)
|
||||
corresponding to your OS and CPU architecture.
|
||||
{{< /notice >}}
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.8.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Write the following into a `tools.yaml` file. Be sure to update any fields
|
||||
such as `user`, `password`, or `database` that you may have customized in the
|
||||
previous step.
|
||||
|
||||
{{< notice tip >}}
|
||||
In practice, use environment variable replacement with the format ${ENV_NAME}
|
||||
instead of hardcoding your secrets into the configuration file.
|
||||
{{< /notice >}}
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-pg-source:
|
||||
kind: postgres
|
||||
host: 127.0.0.1
|
||||
port: 5432
|
||||
database: toolbox_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
tools:
|
||||
search-hotels-by-name:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on name.
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: The name of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
|
||||
search-hotels-by-location:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on location.
|
||||
parameters:
|
||||
- name: location
|
||||
type: string
|
||||
description: The location of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%';
|
||||
book-hotel:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: >-
|
||||
Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to book.
|
||||
statement: UPDATE hotels SET booked = B'1' WHERE id = $1;
|
||||
update-hotel:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: >-
|
||||
Update a hotel's check-in and check-out dates by its ID. Returns a message
|
||||
indicating whether the hotel was successfully updated or not.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to update.
|
||||
- name: checkin_date
|
||||
type: string
|
||||
description: The new check-in date of the hotel.
|
||||
- name: checkout_date
|
||||
type: string
|
||||
description: The new check-out date of the hotel.
|
||||
statement: >-
|
||||
UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3
|
||||
as date) WHERE id = $1;
|
||||
cancel-hotel:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Cancel a hotel by its ID.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to cancel.
|
||||
statement: UPDATE hotels SET booked = B'0' WHERE id = $1;
|
||||
toolsets:
|
||||
my-toolset:
|
||||
- search-hotels-by-name
|
||||
- search-hotels-by-location
|
||||
- book-hotel
|
||||
- update-hotel
|
||||
- cancel-hotel
|
||||
```
|
||||
|
||||
For more info on tools, check out the `Resources` section of the docs.
|
||||
|
||||
1. Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
|
||||
|
||||
```bash
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
|
||||
## Step 3: Connect your agent to Toolbox
|
||||
|
||||
@@ -99,28 +329,310 @@ pip install google-genai
|
||||
code to create an agent:
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="ADK" lang="python" >}}
|
||||
from google.adk.agents import Agent
|
||||
from google.adk.runners import Runner
|
||||
from google.adk.sessions import InMemorySessionService
|
||||
from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService
|
||||
from google.genai import types
|
||||
from toolbox_core import ToolboxSyncClient
|
||||
|
||||
{{< include "quickstart/python/adk/quickstart.py" >}}
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
# TODO(developer): replace this with your Google API key
|
||||
|
||||
os.environ['GOOGLE_API_KEY'] = 'your-api-key'
|
||||
|
||||
async def main():
|
||||
with ToolboxSyncClient("<http://127.0.0.1:5000>") as toolbox_client:
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
root_agent = Agent(
|
||||
model='gemini-2.0-flash-001',
|
||||
name='hotel_agent',
|
||||
description='A helpful AI assistant.',
|
||||
instruction=prompt,
|
||||
tools=toolbox_client.load_toolset("my-toolset"),
|
||||
)
|
||||
|
||||
session_service = InMemorySessionService()
|
||||
artifacts_service = InMemoryArtifactService()
|
||||
session = await session_service.create_session(
|
||||
state={}, app_name='hotel_agent', user_id='123'
|
||||
)
|
||||
runner = Runner(
|
||||
app_name='hotel_agent',
|
||||
agent=root_agent,
|
||||
artifact_service=artifacts_service,
|
||||
session_service=session_service,
|
||||
)
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in it's name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
for query in queries:
|
||||
content = types.Content(role='user', parts=[types.Part(text=query)])
|
||||
events = runner.run(session_id=session.id,
|
||||
user_id='123', new_message=content)
|
||||
|
||||
responses = (
|
||||
part.text
|
||||
for event in events
|
||||
for part in event.content.parts
|
||||
if part.text is not None
|
||||
)
|
||||
|
||||
for text in responses:
|
||||
print(text)
|
||||
|
||||
asyncio.run(main())
|
||||
{{< /tab >}}
|
||||
{{< tab header="LangChain" lang="python" >}}
|
||||
import asyncio
|
||||
|
||||
{{< include "quickstart/python/langchain/quickstart.py" >}}
|
||||
from langgraph.prebuilt import create_react_agent
|
||||
|
||||
# TODO(developer): replace this with another import if needed
|
||||
|
||||
from langchain_google_vertexai import ChatVertexAI
|
||||
|
||||
# from langchain_google_genai import ChatGoogleGenerativeAI
|
||||
|
||||
# from langchain_anthropic import ChatAnthropic
|
||||
|
||||
from langgraph.checkpoint.memory import MemorySaver
|
||||
|
||||
from toolbox_langchain import ToolboxClient
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in it's name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
async def run_application():
|
||||
# TODO(developer): replace this with another model if needed
|
||||
model = ChatVertexAI(model_name="gemini-2.0-flash-001")
|
||||
# model = ChatGoogleGenerativeAI(model="gemini-2.0-flash-001")
|
||||
# model = ChatAnthropic(model="claude-3-5-sonnet-20240620")
|
||||
|
||||
# Load the tools from the Toolbox server
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
tools = await client.aload_toolset()
|
||||
|
||||
agent = create_react_agent(model, tools, checkpointer=MemorySaver())
|
||||
|
||||
config = {"configurable": {"thread_id": "thread-1"}}
|
||||
for query in queries:
|
||||
inputs = {"messages": [("user", prompt + query)]}
|
||||
response = agent.invoke(inputs, stream_mode="values", config=config)
|
||||
print(response["messages"][-1].content)
|
||||
|
||||
asyncio.run(run_application())
|
||||
{{< /tab >}}
|
||||
{{< tab header="LlamaIndex" lang="python" >}}
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
{{< include "quickstart/python/llamaindex/quickstart.py" >}}
|
||||
from llama_index.core.agent.workflow import AgentWorkflow
|
||||
|
||||
from llama_index.core.workflow import Context
|
||||
|
||||
# TODO(developer): replace this with another import if needed
|
||||
|
||||
from llama_index.llms.google_genai import GoogleGenAI
|
||||
|
||||
# from llama_index.llms.anthropic import Anthropic
|
||||
|
||||
from toolbox_llamaindex import ToolboxClient
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in it's name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
async def run_application():
|
||||
# TODO(developer): replace this with another model if needed
|
||||
llm = GoogleGenAI(
|
||||
model="gemini-2.0-flash-001",
|
||||
vertexai_config={"project": "project-id", "location": "us-central1"},
|
||||
)
|
||||
# llm = GoogleGenAI(
|
||||
# api_key=os.getenv("GOOGLE_API_KEY"),
|
||||
# model="gemini-2.0-flash-001",
|
||||
# )
|
||||
# llm = Anthropic(
|
||||
# model="claude-3-7-sonnet-latest",
|
||||
# api_key=os.getenv("ANTHROPIC_API_KEY")
|
||||
# )
|
||||
|
||||
# Load the tools from the Toolbox server
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
tools = await client.aload_toolset()
|
||||
|
||||
agent = AgentWorkflow.from_tools_or_functions(
|
||||
tools,
|
||||
llm=llm,
|
||||
system_prompt=prompt,
|
||||
)
|
||||
ctx = Context(agent)
|
||||
for query in queries:
|
||||
response = await agent.run(user_msg=query, ctx=ctx)
|
||||
print(f"---- {query} ----")
|
||||
print(str(response))
|
||||
|
||||
asyncio.run(run_application())
|
||||
{{< /tab >}}
|
||||
{{< tab header="Core" lang="python" >}}
|
||||
import asyncio
|
||||
|
||||
{{< include "quickstart/python/core/quickstart.py" >}}
|
||||
from google import genai
|
||||
from google.genai.types import (
|
||||
Content,
|
||||
FunctionDeclaration,
|
||||
GenerateContentConfig,
|
||||
Part,
|
||||
Tool,
|
||||
)
|
||||
|
||||
from toolbox_core import ToolboxClient
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel id while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in it's name.",
|
||||
"Please book the hotel Hilton Basel for me.",
|
||||
"This is too expensive. Please cancel it.",
|
||||
"Please book Hyatt Regency for me",
|
||||
"My check in dates for my booking would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
async def run_application():
|
||||
async with ToolboxClient("<http://127.0.0.1:5000>") as toolbox_client:
|
||||
|
||||
# The toolbox_tools list contains Python callables (functions/methods) designed for LLM tool-use
|
||||
# integration. While this example uses Google's genai client, these callables can be adapted for
|
||||
# various function-calling or agent frameworks. For easier integration with supported frameworks
|
||||
# (https://github.com/googleapis/mcp-toolbox-python-sdk/tree/main/packages), use the
|
||||
# provided wrapper packages, which handle framework-specific boilerplate.
|
||||
toolbox_tools = await toolbox_client.load_toolset("my-toolset")
|
||||
genai_client = genai.Client(
|
||||
vertexai=True, project="project-id", location="us-central1"
|
||||
)
|
||||
|
||||
genai_tools = [
|
||||
Tool(
|
||||
function_declarations=[
|
||||
FunctionDeclaration.from_callable_with_api_option(callable=tool)
|
||||
]
|
||||
)
|
||||
for tool in toolbox_tools
|
||||
]
|
||||
history = []
|
||||
for query in queries:
|
||||
user_prompt_content = Content(
|
||||
role="user",
|
||||
parts=[Part.from_text(text=query)],
|
||||
)
|
||||
history.append(user_prompt_content)
|
||||
|
||||
response = genai_client.models.generate_content(
|
||||
model="gemini-2.0-flash-001",
|
||||
contents=history,
|
||||
config=GenerateContentConfig(
|
||||
system_instruction=prompt,
|
||||
tools=genai_tools,
|
||||
),
|
||||
)
|
||||
history.append(response.candidates[0].content)
|
||||
function_response_parts = []
|
||||
for function_call in response.function_calls:
|
||||
fn_name = function_call.name
|
||||
# The tools are sorted alphabetically
|
||||
if fn_name == "search-hotels-by-name":
|
||||
function_result = await toolbox_tools[3](**function_call.args)
|
||||
elif fn_name == "search-hotels-by-location":
|
||||
function_result = await toolbox_tools[2](**function_call.args)
|
||||
elif fn_name == "book-hotel":
|
||||
function_result = await toolbox_tools[0](**function_call.args)
|
||||
elif fn_name == "update-hotel":
|
||||
function_result = await toolbox_tools[4](**function_call.args)
|
||||
elif fn_name == "cancel-hotel":
|
||||
function_result = await toolbox_tools[1](**function_call.args)
|
||||
else:
|
||||
raise ValueError("Function name not present.")
|
||||
function_response = {"result": function_result}
|
||||
function_response_part = Part.from_function_response(
|
||||
name=function_call.name,
|
||||
response=function_response,
|
||||
)
|
||||
function_response_parts.append(function_response_part)
|
||||
|
||||
if function_response_parts:
|
||||
tool_response_content = Content(role="tool", parts=function_response_parts)
|
||||
history.append(tool_response_content)
|
||||
|
||||
response2 = genai_client.models.generate_content(
|
||||
model="gemini-2.0-flash-001",
|
||||
contents=history,
|
||||
config=GenerateContentConfig(
|
||||
tools=genai_tools,
|
||||
),
|
||||
)
|
||||
final_model_response_content = response2.candidates[0].content
|
||||
history.append(final_model_response_content)
|
||||
print(response2.text)
|
||||
|
||||
asyncio.run(run_application())
|
||||
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
{{< tabpane text=true persist=header >}}
|
||||
{{< tabpane text=true persist=header >}}
|
||||
{{% tab header="ADK" lang="en" %}}
|
||||
To learn more about Agent Development Kit, check out the [ADK
|
||||
documentation.](https://google.github.io/adk-docs/)
|
||||
@@ -146,7 +658,3 @@ Documentation](https://github.com/googleapis/python-genai?tab=readme-ov-file#man
|
||||
```sh
|
||||
python hotel_agent.py
|
||||
```
|
||||
|
||||
{{< notice info >}}
|
||||
For more information, visit the [Python SDK repo](https://github.com/googleapis/mcp-toolbox-sdk-python).
|
||||
{{</ notice >}}
|
||||
|
||||
@@ -1,93 +0,0 @@
|
||||
---
|
||||
title: "Go Quickstart (Local)"
|
||||
type: docs
|
||||
weight: 4
|
||||
description: >
|
||||
How to get started running Toolbox locally with [Go](https://github.com/googleapis/mcp-toolbox-sdk-go), PostgreSQL, and orchestration frameworks such as [LangChain Go](https://tmc.github.io/langchaingo/docs/), [GenkitGo](https://genkit.dev/go/docs/get-started-go/), [Go GenAI](https://github.com/googleapis/go-genai) and [OpenAI Go](https://github.com/openai/openai-go).
|
||||
---
|
||||
|
||||
## Before you begin
|
||||
|
||||
This guide assumes you have already done the following:
|
||||
|
||||
1. Installed [Go (v1.24.2 or higher)].
|
||||
1. Installed [PostgreSQL 16+ and the `psql` client][install-postgres].
|
||||
|
||||
[Go (v1.24.2 or higher)]: https://go.dev/doc/install
|
||||
[install-postgres]: https://www.postgresql.org/download/
|
||||
|
||||
### Cloud Setup (Optional)
|
||||
{{< regionInclude "quickstart/shared/cloud_setup.md" "cloud_setup" >}}
|
||||
|
||||
## Step 1: Set up your database
|
||||
{{< regionInclude "quickstart/shared/database_setup.md" "database_setup" >}}
|
||||
|
||||
## Step 2: Install and configure Toolbox
|
||||
{{< regionInclude "quickstart/shared/configure_toolbox.md" "configure_toolbox" >}}
|
||||
|
||||
## Step 3: Connect your agent to Toolbox
|
||||
|
||||
In this section, we will write and run an agent that will load the Tools
|
||||
from Toolbox.
|
||||
|
||||
1. Initialize a go module:
|
||||
|
||||
```bash
|
||||
go mod init main
|
||||
```
|
||||
|
||||
1. In a new terminal, install the
|
||||
[SDK](https://pkg.go.dev/github.com/googleapis/mcp-toolbox-sdk-go).
|
||||
|
||||
```bash
|
||||
go get github.com/googleapis/mcp-toolbox-sdk-go
|
||||
```
|
||||
|
||||
1. Create a new file named `hotelagent.go` and copy the following code to create
|
||||
an agent:
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="LangChain Go" lang="go" >}}
|
||||
|
||||
{{< include "quickstart/go/langchain/quickstart.go" >}}
|
||||
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="Genkit Go" lang="go" >}}
|
||||
|
||||
{{< include "quickstart/go/genkit/quickstart.go" >}}
|
||||
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="Go GenAI" lang="go" >}}
|
||||
|
||||
{{< include "quickstart/go/genAI/quickstart.go" >}}
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="OpenAI Go" lang="go" >}}
|
||||
|
||||
{{< include "quickstart/go/openAI/quickstart.go" >}}
|
||||
|
||||
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
1. Ensure all dependencies are installed:
|
||||
|
||||
```sh
|
||||
go mod tidy
|
||||
```
|
||||
|
||||
1. Run your agent, and observe the results:
|
||||
|
||||
```sh
|
||||
go run hotelagent.go
|
||||
```
|
||||
|
||||
{{< notice info >}}
|
||||
For more information, visit the [Go SDK
|
||||
repo](https://github.com/googleapis/mcp-toolbox-sdk-go).
|
||||
{{</ notice >}}
|
||||
@@ -1,99 +0,0 @@
|
||||
---
|
||||
title: "JS Quickstart (Local)"
|
||||
type: docs
|
||||
weight: 3
|
||||
description: >
|
||||
How to get started running Toolbox locally with [JavaScript](https://github.com/googleapis/mcp-toolbox-sdk-js), PostgreSQL, and orchestration frameworks such as [LangChain](https://js.langchain.com/docs/introduction/), [GenkitJS](https://genkit.dev/docs/get-started/), [LlamaIndex](https://ts.llamaindex.ai/) and [GoogleGenAI](https://github.com/googleapis/js-genai).
|
||||
---
|
||||
|
||||
## Before you begin
|
||||
|
||||
This guide assumes you have already done the following:
|
||||
|
||||
1. Installed [Node.js (v18 or higher)].
|
||||
1. Installed [PostgreSQL 16+ and the `psql` client][install-postgres].
|
||||
|
||||
[Node.js (v18 or higher)]: https://nodejs.org/
|
||||
[install-postgres]: https://www.postgresql.org/download/
|
||||
|
||||
### Cloud Setup (Optional)
|
||||
{{< regionInclude "quickstart/shared/cloud_setup.md" "cloud_setup" >}}
|
||||
|
||||
## Step 1: Set up your database
|
||||
{{< regionInclude "quickstart/shared/database_setup.md" "database_setup" >}}
|
||||
|
||||
## Step 2: Install and configure Toolbox
|
||||
{{< regionInclude "quickstart/shared/configure_toolbox.md" "configure_toolbox" >}}
|
||||
|
||||
## Step 3: Connect your agent to Toolbox
|
||||
|
||||
In this section, we will write and run an agent that will load the Tools
|
||||
from Toolbox.
|
||||
|
||||
1. (Optional) Initialize a Node.js project:
|
||||
|
||||
```bash
|
||||
npm init -y
|
||||
```
|
||||
|
||||
1. In a new terminal, install the [SDK](https://www.npmjs.com/package/@toolbox-sdk/core).
|
||||
|
||||
```bash
|
||||
npm install @toolbox-sdk/core
|
||||
```
|
||||
|
||||
1. Install other required dependencies
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="LangChain" lang="bash" >}}
|
||||
npm install langchain @langchain/google-genai
|
||||
{{< /tab >}}
|
||||
{{< tab header="GenkitJS" lang="bash" >}}
|
||||
npm install genkit @genkit-ai/googleai
|
||||
{{< /tab >}}
|
||||
{{< tab header="LlamaIndex" lang="bash" >}}
|
||||
npm install llamaindex @llamaindex/google @llamaindex/workflow
|
||||
{{< /tab >}}
|
||||
{{< tab header="GoogleGenAI" lang="bash" >}}
|
||||
npm install @google/genai
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
1. Create a new file named `hotelAgent.js` and copy the following code to create an agent:
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="LangChain" lang="js" >}}
|
||||
|
||||
{{< include "quickstart/js/langchain/quickstart.js" >}}
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="GenkitJS" lang="js" >}}
|
||||
|
||||
{{< include "quickstart/js/genkit/quickstart.js" >}}
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="LlamaIndex" lang="js" >}}
|
||||
|
||||
{{< include "quickstart/js/llamaindex/quickstart.js" >}}
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="GoogleGenAI" lang="js" >}}
|
||||
|
||||
{{< include "quickstart/js/genAI/quickstart.js" >}}
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< /tabpane >}}
|
||||
|
||||
1. Run your agent, and observe the results:
|
||||
|
||||
```sh
|
||||
node hotelAgent.js
|
||||
```
|
||||
|
||||
{{< notice info >}}
|
||||
For more information, visit the [JS SDK repo](https://github.com/googleapis/mcp-toolbox-sdk-js).
|
||||
{{</ notice >}}
|
||||
@@ -1,9 +1,9 @@
|
||||
---
|
||||
title: "Quickstart (MCP)"
|
||||
type: docs
|
||||
weight: 5
|
||||
weight: 3
|
||||
description: >
|
||||
How to get started running Toolbox locally with MCP Inspector.
|
||||
How to get started running Toolbox locally with MCP Inspector.
|
||||
---
|
||||
|
||||
## Overview
|
||||
@@ -71,7 +71,7 @@ access by our agent, and create a database user for Toolbox to connect with.
|
||||
|
||||
```sql
|
||||
INSERT INTO hotels(id, name, location, price_tier, checkin_date, checkout_date, booked)
|
||||
VALUES
|
||||
VALUES
|
||||
(1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-22', '2024-04-20', B'0'),
|
||||
(2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', B'0'),
|
||||
(3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', B'0'),
|
||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.8.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -200,7 +200,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
```
|
||||
|
||||
For more info on tools, check out the
|
||||
[Tools](../../resources/tools/) section.
|
||||
[Tools](../../resources/tools/_index.md) section.
|
||||
|
||||
1. Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
|
||||
|
||||
@@ -218,27 +218,17 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
|
||||
1. Type `y` when it asks to install the inspector package.
|
||||
|
||||
1. It should show the following when the MCP Inspector is up and running (please
|
||||
take note of `<YOUR_SESSION_TOKEN>`):
|
||||
1. It should show the following when the MCP Inspector is up and running:
|
||||
|
||||
```bash
|
||||
Starting MCP inspector...
|
||||
⚙️ Proxy server listening on localhost:6277
|
||||
🔑 Session token: <YOUR_SESSION_TOKEN>
|
||||
Use this token to authenticate requests or set DANGEROUSLY_OMIT_AUTH=true to disable auth
|
||||
|
||||
🚀 MCP Inspector is up and running at:
|
||||
http://localhost:6274/?MCP_PROXY_AUTH_TOKEN=<YOUR_SESSION_TOKEN>
|
||||
🔍 MCP Inspector is up and running at http://127.0.0.1:5173 🚀
|
||||
```
|
||||
|
||||
1. Open the above link in your browser.
|
||||
|
||||
1. For `Transport Type`, select `Streamable HTTP`.
|
||||
1. For `Transport Type`, select `SSE`.
|
||||
|
||||
1. For `URL`, type in `http://127.0.0.1:5000/mcp`.
|
||||
|
||||
1. For `Configuration` -> `Proxy Session Token`, make sure
|
||||
`<YOUR_SESSION_TOKEN>` is present.
|
||||
1. For `URL`, type in `http://127.0.0.1:5000/mcp/sse`.
|
||||
|
||||
1. Click Connect.
|
||||
|
||||
|
||||
|
Before Width: | Height: | Size: 32 KiB After Width: | Height: | Size: 22 KiB |
@@ -1,37 +0,0 @@
|
||||
module genai-quickstart
|
||||
|
||||
go 1.24.6
|
||||
|
||||
require (
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0
|
||||
google.golang.org/genai v1.24.0
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go v0.121.1 // indirect
|
||||
cloud.google.com/go/auth v0.16.2 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.7.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/google/go-cmp v0.7.0 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
|
||||
go.opentelemetry.io/otel v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.36.0 // indirect
|
||||
golang.org/x/crypto v0.39.0 // indirect
|
||||
golang.org/x/net v0.41.0 // indirect
|
||||
golang.org/x/oauth2 v0.30.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/text v0.26.0 // indirect
|
||||
google.golang.org/api v0.242.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/grpc v1.73.0 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
)
|
||||
@@ -1,174 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"google.golang.org/genai"
|
||||
)
|
||||
|
||||
// ConvertToGenaiTool translates a ToolboxTool into the genai.FunctionDeclaration format.
|
||||
func ConvertToGenaiTool(toolboxTool *core.ToolboxTool) *genai.Tool {
|
||||
|
||||
inputschema, err := toolboxTool.InputSchema()
|
||||
if err != nil {
|
||||
return &genai.Tool{}
|
||||
}
|
||||
|
||||
var paramsSchema *genai.Schema
|
||||
_ = json.Unmarshal(inputschema, ¶msSchema)
|
||||
// First, create the function declaration.
|
||||
funcDeclaration := &genai.FunctionDeclaration{
|
||||
Name: toolboxTool.Name(),
|
||||
Description: toolboxTool.Description(),
|
||||
Parameters: paramsSchema,
|
||||
}
|
||||
|
||||
// Then, wrap the function declaration in a genai.Tool struct.
|
||||
return &genai.Tool{
|
||||
FunctionDeclarations: []*genai.FunctionDeclaration{funcDeclaration},
|
||||
}
|
||||
}
|
||||
|
||||
func printResponse(resp *genai.GenerateContentResponse) {
|
||||
for _, cand := range resp.Candidates {
|
||||
if cand.Content != nil {
|
||||
for _, part := range cand.Content.Parts {
|
||||
fmt.Println(part.Text)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it.",
|
||||
"Please book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Setup
|
||||
ctx := context.Background()
|
||||
apiKey := os.Getenv("GOOGLE_API_KEY")
|
||||
toolboxURL := "http://localhost:5000"
|
||||
|
||||
// Initialize the Google GenAI client using the explicit ClientConfig.
|
||||
client, err := genai.NewClient(ctx, &genai.ClientConfig{
|
||||
APIKey: apiKey,
|
||||
})
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Google GenAI client: %v", err)
|
||||
}
|
||||
|
||||
// Initialize the MCP Toolbox client.
|
||||
toolboxClient, err := core.NewToolboxClient(toolboxURL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tool using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
genAITools := make([]*genai.Tool, len(tools))
|
||||
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
|
||||
|
||||
for i, tool := range tools {
|
||||
genAITools[i] = ConvertToGenaiTool(tool)
|
||||
toolsMap[tool.Name()] = tool
|
||||
}
|
||||
|
||||
// Set up the generative model with the available tool.
|
||||
modelName := "gemini-2.0-flash"
|
||||
|
||||
// Create the initial content prompt for the model.
|
||||
messageHistory := []*genai.Content{
|
||||
genai.NewContentFromText(systemPrompt, genai.RoleUser),
|
||||
}
|
||||
config := &genai.GenerateContentConfig{
|
||||
Tools: genAITools,
|
||||
ToolConfig: &genai.ToolConfig{
|
||||
FunctionCallingConfig: &genai.FunctionCallingConfig{
|
||||
Mode: genai.FunctionCallingConfigModeAny,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
|
||||
messageHistory = append(messageHistory, genai.NewContentFromText(query, genai.RoleUser))
|
||||
|
||||
genContentResp, err := client.Models.GenerateContent(ctx, modelName, messageHistory, config)
|
||||
if err != nil {
|
||||
log.Fatalf("LLM call failed for query '%s': %v", query, err)
|
||||
}
|
||||
|
||||
if len(genContentResp.Candidates) > 0 && genContentResp.Candidates[0].Content != nil {
|
||||
messageHistory = append(messageHistory, genContentResp.Candidates[0].Content)
|
||||
}
|
||||
|
||||
functionCalls := genContentResp.FunctionCalls()
|
||||
|
||||
toolResponseParts := []*genai.Part{}
|
||||
|
||||
for _, fc := range functionCalls {
|
||||
|
||||
toolToInvoke, found := toolsMap[fc.Name]
|
||||
if !found {
|
||||
log.Fatalf("Tool '%s' not found in loaded tools map. Check toolset configuration.", fc.Name)
|
||||
}
|
||||
|
||||
toolResult, invokeErr := toolToInvoke.Invoke(ctx, fc.Args)
|
||||
if invokeErr != nil {
|
||||
log.Fatalf("Failed to execute tool '%s': %v", fc.Name, invokeErr)
|
||||
}
|
||||
|
||||
// Enhanced Tool Result Handling (retained to prevent nil issues)
|
||||
toolResultString := ""
|
||||
if toolResult != nil {
|
||||
jsonBytes, marshalErr := json.Marshal(toolResult)
|
||||
if marshalErr == nil {
|
||||
toolResultString = string(jsonBytes)
|
||||
} else {
|
||||
toolResultString = fmt.Sprintf("%v", toolResult)
|
||||
}
|
||||
}
|
||||
|
||||
responseMap := map[string]any{"result": toolResultString}
|
||||
|
||||
toolResponseParts = append(toolResponseParts, genai.NewPartFromFunctionResponse(fc.Name, responseMap))
|
||||
}
|
||||
// Add all accumulated tool responses for this turn to the message history.
|
||||
toolResponseContent := genai.NewContentFromParts(toolResponseParts, "function")
|
||||
messageHistory = append(messageHistory, toolResponseContent)
|
||||
|
||||
finalResponse, err := client.Models.GenerateContent(ctx, modelName, messageHistory, &genai.GenerateContentConfig{})
|
||||
if err != nil {
|
||||
log.Fatalf("Error calling GenerateContent (with function result): %v", err)
|
||||
}
|
||||
|
||||
printResponse(finalResponse)
|
||||
// Add the final textual response from the LLM to the history
|
||||
if len(finalResponse.Candidates) > 0 && finalResponse.Candidates[0].Content != nil {
|
||||
messageHistory = append(messageHistory, finalResponse.Candidates[0].Content)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
module genkit-quickstart
|
||||
|
||||
go 1.24.6
|
||||
|
||||
require (
|
||||
github.com/firebase/genkit/go v0.6.2
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go v0.121.1 // indirect
|
||||
cloud.google.com/go/auth v0.16.2 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.7.0 // indirect
|
||||
github.com/bahlo/generic-list-go v0.2.0 // indirect
|
||||
github.com/buger/jsonparser v1.1.1 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/goccy/go-yaml v1.17.1 // indirect
|
||||
github.com/google/dotprompt/go v0.0.0-20250611200215-bb73406b05ca // indirect
|
||||
github.com/google/go-cmp v0.7.0 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
github.com/invopop/jsonschema v0.13.0 // indirect
|
||||
github.com/mailru/easyjson v0.9.0 // indirect
|
||||
github.com/mbleigh/raymond v0.0.0-20250414171441-6b3a58ab9e0a // indirect
|
||||
github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
|
||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
|
||||
github.com/xeipuuv/gojsonschema v1.2.0 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
|
||||
go.opentelemetry.io/otel v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.36.0 // indirect
|
||||
golang.org/x/crypto v0.39.0 // indirect
|
||||
golang.org/x/net v0.41.0 // indirect
|
||||
golang.org/x/oauth2 v0.30.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/text v0.26.0 // indirect
|
||||
google.golang.org/api v0.242.0 // indirect
|
||||
google.golang.org/genai v1.11.1 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/grpc v1.73.0 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
@@ -1,129 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/tbgenkit"
|
||||
|
||||
"github.com/firebase/genkit/go/ai"
|
||||
"github.com/firebase/genkit/go/genkit"
|
||||
"github.com/firebase/genkit/go/plugins/googlegenai"
|
||||
)
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
ctx := context.Background()
|
||||
|
||||
// Create Toolbox Client
|
||||
toolboxClient, err := core.NewToolboxClient("http://127.0.0.1:5000")
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tools using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
// Initialize Genkit
|
||||
g, err := genkit.Init(ctx,
|
||||
genkit.WithPlugins(&googlegenai.GoogleAI{}),
|
||||
genkit.WithDefaultModel("googleai/gemini-1.5-flash"),
|
||||
)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to init genkit: %v\n", err)
|
||||
}
|
||||
|
||||
// Create a conversation history
|
||||
conversationHistory := []*ai.Message{
|
||||
ai.NewSystemTextMessage(systemPrompt),
|
||||
}
|
||||
|
||||
// Convert your tool to a Genkit tool.
|
||||
genkitTools := make([]ai.Tool, len(tools))
|
||||
for i, tool := range tools {
|
||||
newTool, err := tbgenkit.ToGenkitTool(tool, g)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to convert tool: %v\n", err)
|
||||
}
|
||||
genkitTools[i] = newTool
|
||||
}
|
||||
|
||||
toolRefs := make([]ai.ToolRef, len(genkitTools))
|
||||
|
||||
for i, tool := range genkitTools {
|
||||
toolRefs[i] = tool
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
conversationHistory = append(conversationHistory, ai.NewUserTextMessage(query))
|
||||
response, err := genkit.Generate(ctx, g,
|
||||
ai.WithMessages(conversationHistory...),
|
||||
ai.WithTools(toolRefs...),
|
||||
ai.WithReturnToolRequests(true),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("%v\n", err)
|
||||
}
|
||||
conversationHistory = append(conversationHistory, response.Message)
|
||||
|
||||
parts := []*ai.Part{}
|
||||
|
||||
for _, req := range response.ToolRequests() {
|
||||
tool := genkit.LookupTool(g, req.Name)
|
||||
if tool == nil {
|
||||
log.Fatalf("tool %q not found", req.Name)
|
||||
}
|
||||
|
||||
output, err := tool.RunRaw(ctx, req.Input)
|
||||
if err != nil {
|
||||
log.Fatalf("tool %q execution failed: %v", tool.Name(), err)
|
||||
}
|
||||
|
||||
parts = append(parts,
|
||||
ai.NewToolResponsePart(&ai.ToolResponse{
|
||||
Name: req.Name,
|
||||
Ref: req.Ref,
|
||||
Output: output,
|
||||
}))
|
||||
|
||||
}
|
||||
|
||||
if len(parts) > 0 {
|
||||
resp, err := genkit.Generate(ctx, g,
|
||||
ai.WithMessages(append(response.History(), ai.NewMessage(ai.RoleTool, nil, parts...))...),
|
||||
ai.WithTools(toolRefs...),
|
||||
)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Println("\n", resp.Text())
|
||||
conversationHistory = append(conversationHistory, resp.Message)
|
||||
} else {
|
||||
fmt.Println("\n", response.Text())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
module langchan-quickstart
|
||||
|
||||
go 1.24.6
|
||||
|
||||
require (
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0
|
||||
github.com/tmc/langchaingo v0.1.13
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go v0.121.1 // indirect
|
||||
cloud.google.com/go/ai v0.7.0 // indirect
|
||||
cloud.google.com/go/aiplatform v1.85.0 // indirect
|
||||
cloud.google.com/go/auth v0.16.2 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.7.0 // indirect
|
||||
cloud.google.com/go/iam v1.5.2 // indirect
|
||||
cloud.google.com/go/longrunning v0.6.7 // indirect
|
||||
cloud.google.com/go/vertexai v0.12.0 // indirect
|
||||
github.com/dlclark/regexp2 v1.10.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/google/generative-ai-go v0.15.1 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/pkoukk/tiktoken-go v0.1.6 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
|
||||
go.opentelemetry.io/otel v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.36.0 // indirect
|
||||
golang.org/x/crypto v0.39.0 // indirect
|
||||
golang.org/x/net v0.41.0 // indirect
|
||||
golang.org/x/oauth2 v0.30.0 // indirect
|
||||
golang.org/x/sync v0.15.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/text v0.26.0 // indirect
|
||||
golang.org/x/time v0.12.0 // indirect
|
||||
google.golang.org/api v0.242.0 // indirect
|
||||
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/grpc v1.73.0 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
)
|
||||
@@ -1,149 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"github.com/tmc/langchaingo/llms"
|
||||
"github.com/tmc/langchaingo/llms/googleai"
|
||||
)
|
||||
|
||||
// ConvertToLangchainTool converts a generic core.ToolboxTool into a LangChainGo llms.Tool.
|
||||
func ConvertToLangchainTool(toolboxTool *core.ToolboxTool) llms.Tool {
|
||||
|
||||
// Fetch the tool's input schema
|
||||
inputschema, err := toolboxTool.InputSchema()
|
||||
if err != nil {
|
||||
return llms.Tool{}
|
||||
}
|
||||
|
||||
var paramsSchema map[string]any
|
||||
_ = json.Unmarshal(inputschema, ¶msSchema)
|
||||
|
||||
// Convert into LangChain's llms.Tool
|
||||
return llms.Tool{
|
||||
Type: "function",
|
||||
Function: &llms.FunctionDefinition{
|
||||
Name: toolboxTool.Name(),
|
||||
Description: toolboxTool.Description(),
|
||||
Parameters: paramsSchema,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it.",
|
||||
"Please book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
genaiKey := os.Getenv("GOOGLE_API_KEY")
|
||||
toolboxURL := "http://localhost:5000"
|
||||
ctx := context.Background()
|
||||
|
||||
// Initialize the Google AI client (LLM).
|
||||
llm, err := googleai.New(ctx, googleai.WithAPIKey(genaiKey), googleai.WithDefaultModel("gemini-1.5-flash"))
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Google AI client: %v", err)
|
||||
}
|
||||
|
||||
// Initialize the MCP Toolbox client.
|
||||
toolboxClient, err := core.NewToolboxClient(toolboxURL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tool using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
|
||||
|
||||
langchainTools := make([]llms.Tool, len(tools))
|
||||
// Convert the loaded ToolboxTools into the format LangChainGo requires.
|
||||
for i, tool := range tools {
|
||||
langchainTools[i] = ConvertToLangchainTool(tool)
|
||||
toolsMap[tool.Name()] = tool
|
||||
}
|
||||
|
||||
// Start the conversation history.
|
||||
messageHistory := []llms.MessageContent{
|
||||
llms.TextParts(llms.ChatMessageTypeSystem, systemPrompt),
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
messageHistory = append(messageHistory, llms.TextParts(llms.ChatMessageTypeHuman, query))
|
||||
|
||||
// Make the first call to the LLM, making it aware of the tool.
|
||||
resp, err := llm.GenerateContent(ctx, messageHistory, llms.WithTools(langchainTools))
|
||||
if err != nil {
|
||||
log.Fatalf("LLM call failed: %v", err)
|
||||
}
|
||||
respChoice := resp.Choices[0]
|
||||
|
||||
assistantResponse := llms.TextParts(llms.ChatMessageTypeAI, respChoice.Content)
|
||||
for _, tc := range respChoice.ToolCalls {
|
||||
assistantResponse.Parts = append(assistantResponse.Parts, tc)
|
||||
}
|
||||
messageHistory = append(messageHistory, assistantResponse)
|
||||
|
||||
// Process each tool call requested by the model.
|
||||
for _, tc := range respChoice.ToolCalls {
|
||||
toolName := tc.FunctionCall.Name
|
||||
tool := toolsMap[toolName]
|
||||
var args map[string]any
|
||||
if err := json.Unmarshal([]byte(tc.FunctionCall.Arguments), &args); err != nil {
|
||||
log.Fatalf("Failed to unmarshal arguments for tool '%s': %v", toolName, err)
|
||||
}
|
||||
toolResult, err := tool.Invoke(ctx, args)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to execute tool '%s': %v", toolName, err)
|
||||
}
|
||||
if toolResult == "" || toolResult == nil {
|
||||
toolResult = "Operation completed successfully with no specific return value."
|
||||
}
|
||||
|
||||
// Create the tool call response message and add it to the history.
|
||||
toolResponse := llms.MessageContent{
|
||||
Role: llms.ChatMessageTypeTool,
|
||||
Parts: []llms.ContentPart{
|
||||
llms.ToolCallResponse{
|
||||
Name: toolName,
|
||||
Content: fmt.Sprintf("%v", toolResult),
|
||||
},
|
||||
},
|
||||
}
|
||||
messageHistory = append(messageHistory, toolResponse)
|
||||
}
|
||||
finalResp, err := llm.GenerateContent(ctx, messageHistory)
|
||||
if err != nil {
|
||||
log.Fatalf("Final LLM call failed after tool execution: %v", err)
|
||||
}
|
||||
|
||||
// Add the final textual response from the LLM to the history
|
||||
messageHistory = append(messageHistory, llms.TextParts(llms.ChatMessageTypeAI, finalResp.Choices[0].Content))
|
||||
|
||||
fmt.Println(finalResp.Choices[0].Content)
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
module openai-quickstart
|
||||
|
||||
go 1.24.6
|
||||
|
||||
require (
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0
|
||||
github.com/openai/openai-go v1.12.0
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go/auth v0.16.2 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.7.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/tidwall/gjson v1.14.4 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.1 // indirect
|
||||
github.com/tidwall/sjson v1.2.5 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
|
||||
go.opentelemetry.io/otel v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.36.0 // indirect
|
||||
golang.org/x/crypto v0.39.0 // indirect
|
||||
golang.org/x/net v0.41.0 // indirect
|
||||
golang.org/x/oauth2 v0.30.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/text v0.26.0 // indirect
|
||||
google.golang.org/api v0.242.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/grpc v1.73.0 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
)
|
||||
@@ -1,141 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
openai "github.com/openai/openai-go"
|
||||
)
|
||||
|
||||
// ConvertToOpenAITool converts a ToolboxTool into the go-openai library's Tool format.
|
||||
func ConvertToOpenAITool(toolboxTool *core.ToolboxTool) openai.ChatCompletionToolParam {
|
||||
// Get the input schema
|
||||
jsonSchemaBytes, err := toolboxTool.InputSchema()
|
||||
if err != nil {
|
||||
return openai.ChatCompletionToolParam{}
|
||||
}
|
||||
|
||||
// Unmarshal the JSON bytes into FunctionParameters
|
||||
var paramsSchema openai.FunctionParameters
|
||||
if err := json.Unmarshal(jsonSchemaBytes, ¶msSchema); err != nil {
|
||||
return openai.ChatCompletionToolParam{}
|
||||
}
|
||||
|
||||
// Create and return the final tool parameter struct.
|
||||
return openai.ChatCompletionToolParam{
|
||||
Function: openai.FunctionDefinitionParam{
|
||||
Name: toolboxTool.Name(),
|
||||
Description: openai.String(toolboxTool.Description()),
|
||||
Parameters: paramsSchema,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Setup
|
||||
ctx := context.Background()
|
||||
toolboxURL := "http://localhost:5000"
|
||||
openAIClient := openai.NewClient()
|
||||
|
||||
// Initialize the MCP Toolbox client.
|
||||
toolboxClient, err := core.NewToolboxClient(toolboxURL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tools using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tool : %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
openAITools := make([]openai.ChatCompletionToolParam, len(tools))
|
||||
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
|
||||
|
||||
for i, tool := range tools {
|
||||
// Convert the Toolbox tool into the openAI FunctionDeclaration format.
|
||||
openAITools[i] = ConvertToOpenAITool(tool)
|
||||
// Add tool to a map for lookup later
|
||||
toolsMap[tool.Name()] = tool
|
||||
|
||||
}
|
||||
|
||||
params := openai.ChatCompletionNewParams{
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{
|
||||
openai.SystemMessage(systemPrompt),
|
||||
},
|
||||
Tools: openAITools,
|
||||
Seed: openai.Int(0),
|
||||
Model: openai.ChatModelGPT4o,
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
|
||||
params.Messages = append(params.Messages, openai.UserMessage(query))
|
||||
|
||||
// Make initial chat completion request
|
||||
completion, err := openAIClient.Chat.Completions.New(ctx, params)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
toolCalls := completion.Choices[0].Message.ToolCalls
|
||||
|
||||
// Return early if there are no tool calls
|
||||
if len(toolCalls) == 0 {
|
||||
log.Println("No function call")
|
||||
}
|
||||
|
||||
// If there was a function call, continue the conversation
|
||||
params.Messages = append(params.Messages, completion.Choices[0].Message.ToParam())
|
||||
for _, toolCall := range toolCalls {
|
||||
|
||||
toolName := toolCall.Function.Name
|
||||
toolToInvoke := toolsMap[toolName]
|
||||
|
||||
var args map[string]any
|
||||
err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
result, err := toolToInvoke.Invoke(ctx, args)
|
||||
if err != nil {
|
||||
log.Fatal("Could not invoke tool", err)
|
||||
}
|
||||
|
||||
params.Messages = append(params.Messages, openai.ToolMessage(result.(string), toolCall.ID))
|
||||
}
|
||||
|
||||
completion, err = openAIClient.Chat.Completions.New(ctx, params)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
params.Messages = append(params.Messages, openai.AssistantMessage(query))
|
||||
|
||||
fmt.Println("\n", completion.Choices[0].Message.Content)
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,119 +0,0 @@
|
||||
import { GoogleGenAI } from "@google/genai";
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
|
||||
|
||||
const TOOLBOX_URL = "http://127.0.0.1:5000"; // Update if needed
|
||||
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || 'your-api-key'; // Replace it with your API key
|
||||
|
||||
const prompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, you MUST use the available tools to find information. Mention its name, id,
|
||||
location and price tier. Always mention hotel id while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
function mapZodTypeToOpenAPIType(zodTypeName) {
|
||||
|
||||
console.log(zodTypeName)
|
||||
const typeMap = {
|
||||
'ZodString': 'string',
|
||||
'ZodNumber': 'number',
|
||||
'ZodBoolean': 'boolean',
|
||||
'ZodArray': 'array',
|
||||
'ZodObject': 'object',
|
||||
};
|
||||
return typeMap[zodTypeName] || 'string';
|
||||
}
|
||||
|
||||
async function main() {
|
||||
|
||||
const toolboxClient = new ToolboxClient(TOOLBOX_URL);
|
||||
const toolboxTools = await toolboxClient.loadToolset("my-toolset");
|
||||
|
||||
const geminiTools = [{
|
||||
functionDeclarations: toolboxTools.map(tool => {
|
||||
|
||||
const schema = tool.getParamSchema();
|
||||
const properties = {};
|
||||
const required = [];
|
||||
|
||||
|
||||
for (const [key, param] of Object.entries(schema.shape)) {
|
||||
properties[key] = {
|
||||
type: mapZodTypeToOpenAPIType(param.constructor.name),
|
||||
description: param.description || '',
|
||||
};
|
||||
required.push(key)
|
||||
}
|
||||
|
||||
return {
|
||||
name: tool.getName(),
|
||||
description: tool.getDescription(),
|
||||
parameters: { type: 'object', properties, required },
|
||||
};
|
||||
})
|
||||
}];
|
||||
|
||||
|
||||
const genAI = new GoogleGenAI({ apiKey: GOOGLE_API_KEY });
|
||||
|
||||
const chat = genAI.chats.create({
|
||||
model: "gemini-2.5-flash",
|
||||
config: {
|
||||
systemInstruction: prompt,
|
||||
tools: geminiTools,
|
||||
}
|
||||
});
|
||||
|
||||
for (const query of queries) {
|
||||
|
||||
let currentResult = await chat.sendMessage({ message: query });
|
||||
|
||||
let finalResponseGiven = false
|
||||
while (!finalResponseGiven) {
|
||||
|
||||
const response = currentResult;
|
||||
const functionCalls = response.functionCalls || [];
|
||||
|
||||
if (functionCalls.length === 0) {
|
||||
console.log(response.text)
|
||||
finalResponseGiven = true;
|
||||
} else {
|
||||
const toolResponses = [];
|
||||
for (const call of functionCalls) {
|
||||
const toolName = call.name
|
||||
const toolToExecute = toolboxTools.find(t => t.getName() === toolName);
|
||||
|
||||
if (toolToExecute) {
|
||||
try {
|
||||
const functionResult = await toolToExecute(call.args);
|
||||
toolResponses.push({
|
||||
functionResponse: { name: call.name, response: { result: functionResult } }
|
||||
});
|
||||
} catch (e) {
|
||||
console.error(`Error executing tool '${toolName}':`, e);
|
||||
toolResponses.push({
|
||||
functionResponse: { name: call.name, response: { error: e.message } }
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
currentResult = await chat.sendMessage({ message: toolResponses });
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -1,88 +0,0 @@
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
import { genkit } from "genkit";
|
||||
import { googleAI } from '@genkit-ai/googleai';
|
||||
|
||||
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || 'your-api-key'; // Replace it with your API key
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
async function main() {
|
||||
const toolboxClient = new ToolboxClient("http://127.0.0.1:5000");
|
||||
|
||||
const ai = genkit({
|
||||
plugins: [
|
||||
googleAI({
|
||||
apiKey: process.env.GEMINI_API_KEY || GOOGLE_API_KEY
|
||||
})
|
||||
],
|
||||
model: googleAI.model('gemini-2.0-flash'),
|
||||
});
|
||||
|
||||
const toolboxTools = await toolboxClient.loadToolset("my-toolset");
|
||||
const toolMap = Object.fromEntries(
|
||||
toolboxTools.map((tool) => {
|
||||
const definedTool = ai.defineTool(
|
||||
{
|
||||
name: tool.getName(),
|
||||
description: tool.getDescription(),
|
||||
inputSchema: tool.getParamSchema(),
|
||||
},
|
||||
tool
|
||||
);
|
||||
return [tool.getName(), definedTool];
|
||||
})
|
||||
);
|
||||
const tools = Object.values(toolMap);
|
||||
|
||||
let conversationHistory = [{ role: "system", content: [{ text: systemPrompt }] }];
|
||||
|
||||
for (const query of queries) {
|
||||
conversationHistory.push({ role: "user", content: [{ text: query }] });
|
||||
const response = await ai.generate({
|
||||
messages: conversationHistory,
|
||||
tools: tools,
|
||||
});
|
||||
conversationHistory.push(response.message);
|
||||
|
||||
const toolRequests = response.toolRequests;
|
||||
if (toolRequests?.length > 0) {
|
||||
// Execute tools concurrently and collect their responses.
|
||||
const toolResponses = await Promise.all(
|
||||
toolRequests.map(async (call) => {
|
||||
try {
|
||||
const toolOutput = await toolMap[call.name].invoke(call.input);
|
||||
return { role: "tool", content: [{ toolResponse: { name: call.name, output: toolOutput } }] };
|
||||
} catch (e) {
|
||||
console.error(`Error executing tool ${call.name}:`, e);
|
||||
return { role: "tool", content: [{ toolResponse: { name: call.name, output: { error: e.message } } }] };
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
conversationHistory.push(...toolResponses);
|
||||
|
||||
// Call the AI again with the tool results.
|
||||
response = await ai.generate({ messages: conversationHistory, tools });
|
||||
conversationHistory.push(response.message);
|
||||
}
|
||||
|
||||
console.log(response.text);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -1,73 +0,0 @@
|
||||
import { ChatGoogleGenerativeAI } from "@langchain/google-genai";
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
import { tool } from "@langchain/core/tools";
|
||||
import { createReactAgent } from "@langchain/langgraph/prebuilt";
|
||||
import { MemorySaver } from "@langchain/langgraph";
|
||||
|
||||
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || 'your-api-key'; // Replace it with your API key
|
||||
|
||||
const prompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
async function main() {
|
||||
const model = new ChatGoogleGenerativeAI({
|
||||
model: "gemini-2.0-flash",
|
||||
});
|
||||
|
||||
const client = new ToolboxClient("http://127.0.0.1:5000");
|
||||
const toolboxTools = await client.loadToolset("my-toolset");
|
||||
|
||||
// Define the basics of the tool: name, description, schema and core logic
|
||||
const getTool = (toolboxTool) => tool(toolboxTool, {
|
||||
name: toolboxTool.getName(),
|
||||
description: toolboxTool.getDescription(),
|
||||
schema: toolboxTool.getParamSchema()
|
||||
});
|
||||
const tools = toolboxTools.map(getTool);
|
||||
|
||||
const agent = createReactAgent({
|
||||
llm: model,
|
||||
tools: tools,
|
||||
checkpointer: new MemorySaver(),
|
||||
systemPrompt: prompt,
|
||||
});
|
||||
|
||||
const langGraphConfig = {
|
||||
configurable: {
|
||||
thread_id: "test-thread",
|
||||
},
|
||||
};
|
||||
|
||||
for (const query of queries) {
|
||||
const agentOutput = await agent.invoke(
|
||||
{
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: query,
|
||||
},
|
||||
],
|
||||
verbose: true,
|
||||
},
|
||||
langGraphConfig
|
||||
);
|
||||
const response = agentOutput.messages[agentOutput.messages.length - 1].content;
|
||||
console.log(response);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -1,79 +0,0 @@
|
||||
import { gemini, GEMINI_MODEL } from "@llamaindex/google";
|
||||
import { agent } from "@llamaindex/workflow";
|
||||
import { createMemory, staticBlock, tool } from "llamaindex";
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
|
||||
const TOOLBOX_URL = "http://127.0.0.1:5000"; // Update if needed
|
||||
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || 'your-api-key'; // Replace it with your API key
|
||||
|
||||
const prompt = `
|
||||
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and cancellations.
|
||||
When the user searches for a hotel, mention its name, id, location and price tier.
|
||||
Always mention hotel ids while performing any searches — this is very important for operations.
|
||||
For any bookings or cancellations, please provide the appropriate confirmation.
|
||||
Update check-in or check-out dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
async function main() {
|
||||
// Connect to MCP Toolbox
|
||||
const client = new ToolboxClient(TOOLBOX_URL);
|
||||
const toolboxTools = await client.loadToolset("my-toolset");
|
||||
const tools = toolboxTools.map((toolboxTool) => {
|
||||
return tool({
|
||||
name: toolboxTool.getName(),
|
||||
description: toolboxTool.getDescription(),
|
||||
parameters: toolboxTool.getParamSchema(),
|
||||
execute: toolboxTool,
|
||||
});
|
||||
});
|
||||
|
||||
// Initialize LLM
|
||||
const llm = gemini({
|
||||
model: GEMINI_MODEL.GEMINI_2_0_FLASH,
|
||||
apiKey: GOOGLE_API_KEY,
|
||||
});
|
||||
|
||||
const memory = createMemory({
|
||||
memoryBlocks: [
|
||||
staticBlock({
|
||||
content: prompt,
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
// Create the Agent
|
||||
const myAgent = agent({
|
||||
tools: tools,
|
||||
llm,
|
||||
memory,
|
||||
systemPrompt: prompt,
|
||||
});
|
||||
|
||||
for (const query of queries) {
|
||||
const result = await myAgent.run(query);
|
||||
const output = result.data.result;
|
||||
|
||||
console.log(`\nUser: ${query}`);
|
||||
if (typeof output === "string") {
|
||||
console.log(output.trim());
|
||||
} else if (typeof output === "object" && "text" in output) {
|
||||
console.log(output.text.trim());
|
||||
} else {
|
||||
console.log(JSON.stringify(output));
|
||||
}
|
||||
}
|
||||
//You may observe some extra logs during execution due to the run method provided by Llama.
|
||||
console.log("Agent run finished.");
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -1,70 +0,0 @@
|
||||
from google.adk.agents import Agent
|
||||
from google.adk.runners import Runner
|
||||
from google.adk.sessions import InMemorySessionService
|
||||
from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService
|
||||
from google.genai import types
|
||||
from toolbox_core import ToolboxSyncClient
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
# TODO(developer): replace this with your Google API key
|
||||
|
||||
os.environ['GOOGLE_API_KEY'] = 'your-api-key'
|
||||
|
||||
async def main():
|
||||
with ToolboxSyncClient("http://127.0.0.1:5000") as toolbox_client:
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
root_agent = Agent(
|
||||
model='gemini-2.0-flash-001',
|
||||
name='hotel_agent',
|
||||
description='A helpful AI assistant.',
|
||||
instruction=prompt,
|
||||
tools=toolbox_client.load_toolset("my-toolset"),
|
||||
)
|
||||
|
||||
session_service = InMemorySessionService()
|
||||
artifacts_service = InMemoryArtifactService()
|
||||
session = await session_service.create_session(
|
||||
state={}, app_name='hotel_agent', user_id='123'
|
||||
)
|
||||
runner = Runner(
|
||||
app_name='hotel_agent',
|
||||
agent=root_agent,
|
||||
artifact_service=artifacts_service,
|
||||
session_service=session_service,
|
||||
)
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
for query in queries:
|
||||
content = types.Content(role='user', parts=[types.Part(text=query)])
|
||||
events = runner.run(session_id=session.id,
|
||||
user_id='123', new_message=content)
|
||||
|
||||
responses = (
|
||||
part.text
|
||||
for event in events
|
||||
for part in event.content.parts
|
||||
if part.text is not None
|
||||
)
|
||||
|
||||
for text in responses:
|
||||
print(text)
|
||||
|
||||
asyncio.run(main())
|
||||
@@ -1,108 +0,0 @@
|
||||
import asyncio
|
||||
|
||||
from google import genai
|
||||
from google.genai.types import (
|
||||
Content,
|
||||
FunctionDeclaration,
|
||||
GenerateContentConfig,
|
||||
Part,
|
||||
Tool,
|
||||
)
|
||||
|
||||
from toolbox_core import ToolboxClient
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel id while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Please book the hotel Hilton Basel for me.",
|
||||
"This is too expensive. Please cancel it.",
|
||||
"Please book Hyatt Regency for me",
|
||||
"My check in dates for my booking would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
async def main():
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as toolbox_client:
|
||||
|
||||
# The toolbox_tools list contains Python callables (functions/methods) designed for LLM tool-use
|
||||
# integration. While this example uses Google's genai client, these callables can be adapted for
|
||||
# various function-calling or agent frameworks. For easier integration with supported frameworks
|
||||
# (https://github.com/googleapis/mcp-toolbox-python-sdk/tree/main/packages), use the
|
||||
# provided wrapper packages, which handle framework-specific boilerplate.
|
||||
toolbox_tools = await toolbox_client.load_toolset("my-toolset")
|
||||
genai_client = genai.Client(
|
||||
vertexai=True, project="project-id", location="us-central1"
|
||||
)
|
||||
|
||||
genai_tools = [
|
||||
Tool(
|
||||
function_declarations=[
|
||||
FunctionDeclaration.from_callable_with_api_option(callable=tool)
|
||||
]
|
||||
)
|
||||
for tool in toolbox_tools
|
||||
]
|
||||
history = []
|
||||
for query in queries:
|
||||
user_prompt_content = Content(
|
||||
role="user",
|
||||
parts=[Part.from_text(text=query)],
|
||||
)
|
||||
history.append(user_prompt_content)
|
||||
|
||||
response = genai_client.models.generate_content(
|
||||
model="gemini-2.0-flash-001",
|
||||
contents=history,
|
||||
config=GenerateContentConfig(
|
||||
system_instruction=prompt,
|
||||
tools=genai_tools,
|
||||
),
|
||||
)
|
||||
history.append(response.candidates[0].content)
|
||||
function_response_parts = []
|
||||
for function_call in response.function_calls:
|
||||
fn_name = function_call.name
|
||||
# The tools are sorted alphabetically
|
||||
if fn_name == "search-hotels-by-name":
|
||||
function_result = await toolbox_tools[3](**function_call.args)
|
||||
elif fn_name == "search-hotels-by-location":
|
||||
function_result = await toolbox_tools[2](**function_call.args)
|
||||
elif fn_name == "book-hotel":
|
||||
function_result = await toolbox_tools[0](**function_call.args)
|
||||
elif fn_name == "update-hotel":
|
||||
function_result = await toolbox_tools[4](**function_call.args)
|
||||
elif fn_name == "cancel-hotel":
|
||||
function_result = await toolbox_tools[1](**function_call.args)
|
||||
else:
|
||||
raise ValueError("Function name not present.")
|
||||
function_response = {"result": function_result}
|
||||
function_response_part = Part.from_function_response(
|
||||
name=function_call.name,
|
||||
response=function_response,
|
||||
)
|
||||
function_response_parts.append(function_response_part)
|
||||
|
||||
if function_response_parts:
|
||||
tool_response_content = Content(role="tool", parts=function_response_parts)
|
||||
history.append(tool_response_content)
|
||||
|
||||
response2 = genai_client.models.generate_content(
|
||||
model="gemini-2.0-flash-001",
|
||||
contents=history,
|
||||
config=GenerateContentConfig(
|
||||
tools=genai_tools,
|
||||
),
|
||||
)
|
||||
final_model_response_content = response2.candidates[0].content
|
||||
history.append(final_model_response_content)
|
||||
print(response2.text)
|
||||
|
||||
asyncio.run(main())
|
||||
@@ -1,52 +0,0 @@
|
||||
import asyncio
|
||||
|
||||
from langgraph.prebuilt import create_react_agent
|
||||
|
||||
# TODO(developer): replace this with another import if needed
|
||||
|
||||
from langchain_google_vertexai import ChatVertexAI
|
||||
|
||||
# from langchain_google_genai import ChatGoogleGenerativeAI
|
||||
|
||||
# from langchain_anthropic import ChatAnthropic
|
||||
|
||||
from langgraph.checkpoint.memory import MemorySaver
|
||||
|
||||
from toolbox_langchain import ToolboxClient
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
async def main():
|
||||
# TODO(developer): replace this with another model if needed
|
||||
model = ChatVertexAI(model_name="gemini-2.0-flash-001")
|
||||
# model = ChatGoogleGenerativeAI(model="gemini-2.0-flash-001")
|
||||
# model = ChatAnthropic(model="claude-3-5-sonnet-20240620")
|
||||
|
||||
# Load the tools from the Toolbox server
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
tools = await client.aload_toolset()
|
||||
|
||||
agent = create_react_agent(model, tools, checkpointer=MemorySaver())
|
||||
|
||||
config = {"configurable": {"thread_id": "thread-1"}}
|
||||
for query in queries:
|
||||
inputs = {"messages": [("user", prompt + query)]}
|
||||
response = agent.invoke(inputs, stream_mode="values", config=config)
|
||||
print(response["messages"][-1].content)
|
||||
|
||||
asyncio.run(main())
|
||||
@@ -1,63 +0,0 @@
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
from llama_index.core.agent.workflow import AgentWorkflow
|
||||
|
||||
from llama_index.core.workflow import Context
|
||||
|
||||
# TODO(developer): replace this with another import if needed
|
||||
|
||||
from llama_index.llms.google_genai import GoogleGenAI
|
||||
|
||||
# from llama_index.llms.anthropic import Anthropic
|
||||
|
||||
from toolbox_llamaindex import ToolboxClient
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
async def main():
|
||||
# TODO(developer): replace this with another model if needed
|
||||
llm = GoogleGenAI(
|
||||
model="gemini-2.0-flash-001",
|
||||
vertexai_config={"project": "project-id", "location": "us-central1"},
|
||||
)
|
||||
# llm = GoogleGenAI(
|
||||
# api_key=os.getenv("GOOGLE_API_KEY"),
|
||||
# model="gemini-2.0-flash-001",
|
||||
# )
|
||||
# llm = Anthropic(
|
||||
# model="claude-3-7-sonnet-latest",
|
||||
# api_key=os.getenv("ANTHROPIC_API_KEY")
|
||||
# )
|
||||
|
||||
# Load the tools from the Toolbox server
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
tools = await client.aload_toolset()
|
||||
|
||||
agent = AgentWorkflow.from_tools_or_functions(
|
||||
tools,
|
||||
llm=llm,
|
||||
system_prompt=prompt,
|
||||
)
|
||||
ctx = Context(agent)
|
||||
for query in queries:
|
||||
response = await agent.run(user_msg=query, ctx=ctx)
|
||||
print(f"---- {query} ----")
|
||||
print(str(response))
|
||||
|
||||
asyncio.run(main())
|
||||
@@ -1,20 +0,0 @@
|
||||
<!-- This file has been used in local_quickstart.md, local_quickstart_go.md & local_quickstart_js.md -->
|
||||
<!-- [START cloud_setup] -->
|
||||
If you plan to use **Google Cloud’s Vertex AI** with your agent (e.g., using
|
||||
`vertexai=True` or a Google GenAI model), follow these one-time setup steps for
|
||||
local development:
|
||||
|
||||
1. [Install the Google Cloud CLI](https://cloud.google.com/sdk/docs/install)
|
||||
1. [Set up Application Default Credentials (ADC)](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment)
|
||||
1. Set your project and enable Vertex AI
|
||||
|
||||
```bash
|
||||
gcloud config set project YOUR_PROJECT_ID
|
||||
gcloud services enable aiplatform.googleapis.com
|
||||
```
|
||||
|
||||
[install-python]: https://wiki.python.org/moin/BeginnersGuide/Download
|
||||
[install-pip]: https://pip.pypa.io/en/stable/installation/
|
||||
[install-venv]: https://packaging.python.org/en/latest/tutorials/installing-packages/#creating-virtual-environments
|
||||
[install-postgres]: https://www.postgresql.org/download/
|
||||
<!-- [END cloud_setup] -->
|
||||
@@ -1,122 +0,0 @@
|
||||
<!-- This file has been used in local_quickstart.md, local_quickstart_go.md & local_quickstart_js.md -->
|
||||
<!-- [START configure_toolbox] -->
|
||||
In this section, we will download Toolbox, configure our tools in a
|
||||
`tools.yaml`, and then run the Toolbox server.
|
||||
|
||||
1. Download the latest version of Toolbox as a binary:
|
||||
|
||||
{{< notice tip >}}
|
||||
Select the
|
||||
[correct binary](https://github.com/googleapis/genai-toolbox/releases)
|
||||
corresponding to your OS and CPU architecture.
|
||||
{{< /notice >}}
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Write the following into a `tools.yaml` file. Be sure to update any fields
|
||||
such as `user`, `password`, or `database` that you may have customized in the
|
||||
previous step.
|
||||
|
||||
{{< notice tip >}}
|
||||
In practice, use environment variable replacement with the format ${ENV_NAME}
|
||||
instead of hardcoding your secrets into the configuration file.
|
||||
{{< /notice >}}
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-pg-source:
|
||||
kind: postgres
|
||||
host: 127.0.0.1
|
||||
port: 5432
|
||||
database: toolbox_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
tools:
|
||||
search-hotels-by-name:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on name.
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: The name of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
|
||||
search-hotels-by-location:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on location.
|
||||
parameters:
|
||||
- name: location
|
||||
type: string
|
||||
description: The location of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%';
|
||||
book-hotel:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: >-
|
||||
Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to book.
|
||||
statement: UPDATE hotels SET booked = B'1' WHERE id = $1;
|
||||
update-hotel:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: >-
|
||||
Update a hotel's check-in and check-out dates by its ID. Returns a message
|
||||
indicating whether the hotel was successfully updated or not.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to update.
|
||||
- name: checkin_date
|
||||
type: string
|
||||
description: The new check-in date of the hotel.
|
||||
- name: checkout_date
|
||||
type: string
|
||||
description: The new check-out date of the hotel.
|
||||
statement: >-
|
||||
UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3
|
||||
as date) WHERE id = $1;
|
||||
cancel-hotel:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Cancel a hotel by its ID.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to cancel.
|
||||
statement: UPDATE hotels SET booked = B'0' WHERE id = $1;
|
||||
toolsets:
|
||||
my-toolset:
|
||||
- search-hotels-by-name
|
||||
- search-hotels-by-location
|
||||
- book-hotel
|
||||
- update-hotel
|
||||
- cancel-hotel
|
||||
```
|
||||
|
||||
For more info on tools, check out the `Resources` section of the docs.
|
||||
|
||||
1. Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
|
||||
|
||||
```bash
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the
|
||||
`--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
<!-- [END configure_toolbox] -->
|
||||
@@ -1,119 +0,0 @@
|
||||
<!-- This file has been used in local_quickstart.md, local_quickstart_go.md & local_quickstart_js.md -->
|
||||
<!-- [START database_setup] -->
|
||||
In this section, we will create a database, insert some data that needs to be
|
||||
accessed by our agent, and create a database user for Toolbox to connect with.
|
||||
|
||||
1. Connect to postgres using the `psql` command:
|
||||
|
||||
```bash
|
||||
psql -h 127.0.0.1 -U postgres
|
||||
```
|
||||
|
||||
Here, `postgres` denotes the default postgres superuser.
|
||||
|
||||
{{< notice info >}}
|
||||
|
||||
#### **Having trouble connecting?**
|
||||
|
||||
* **Password Prompt:** If you are prompted for a password for the `postgres`
|
||||
user and do not know it (or a blank password doesn't work), your PostgreSQL
|
||||
installation might require a password or a different authentication method.
|
||||
* **`FATAL: role "postgres" does not exist`:** This error means the default
|
||||
`postgres` superuser role isn't available under that name on your system.
|
||||
* **`Connection refused`:** Ensure your PostgreSQL server is actually running.
|
||||
You can typically check with `sudo systemctl status postgresql` and start it
|
||||
with `sudo systemctl start postgresql` on Linux systems.
|
||||
|
||||
<br/>
|
||||
|
||||
#### **Common Solution**
|
||||
|
||||
For password issues or if the `postgres` role seems inaccessible directly, try
|
||||
switching to the `postgres` operating system user first. This user often has
|
||||
permission to connect without a password for local connections (this is called
|
||||
peer authentication).
|
||||
|
||||
```bash
|
||||
sudo -i -u postgres
|
||||
psql -h 127.0.0.1
|
||||
```
|
||||
|
||||
Once you are in the `psql` shell using this method, you can proceed with the
|
||||
database creation steps below. Afterwards, type `\q` to exit `psql`, and then
|
||||
`exit` to return to your normal user shell.
|
||||
|
||||
If desired, once connected to `psql` as the `postgres` OS user, you can set a
|
||||
password for the `postgres` *database* user using: `ALTER USER postgres WITH
|
||||
PASSWORD 'your_chosen_password';`. This would allow direct connection with `-U
|
||||
postgres` and a password next time.
|
||||
{{< /notice >}}
|
||||
|
||||
1. Create a new database and a new user:
|
||||
|
||||
{{< notice tip >}}
|
||||
For a real application, it's best to follow the principle of least permission
|
||||
and only grant the privileges your application needs.
|
||||
{{< /notice >}}
|
||||
|
||||
```sql
|
||||
CREATE USER toolbox_user WITH PASSWORD 'my-password';
|
||||
|
||||
CREATE DATABASE toolbox_db;
|
||||
GRANT ALL PRIVILEGES ON DATABASE toolbox_db TO toolbox_user;
|
||||
|
||||
ALTER DATABASE toolbox_db OWNER TO toolbox_user;
|
||||
```
|
||||
|
||||
1. End the database session:
|
||||
|
||||
```bash
|
||||
\q
|
||||
```
|
||||
|
||||
(If you used `sudo -i -u postgres` and then `psql`, remember you might also
|
||||
need to type `exit` after `\q` to leave the `postgres` user's shell
|
||||
session.)
|
||||
|
||||
1. Connect to your database with your new user:
|
||||
|
||||
```bash
|
||||
psql -h 127.0.0.1 -U toolbox_user -d toolbox_db
|
||||
```
|
||||
|
||||
1. Create a table using the following command:
|
||||
|
||||
```sql
|
||||
CREATE TABLE hotels(
|
||||
id INTEGER NOT NULL PRIMARY KEY,
|
||||
name VARCHAR NOT NULL,
|
||||
location VARCHAR NOT NULL,
|
||||
price_tier VARCHAR NOT NULL,
|
||||
checkin_date DATE NOT NULL,
|
||||
checkout_date DATE NOT NULL,
|
||||
booked BIT NOT NULL
|
||||
);
|
||||
```
|
||||
|
||||
1. Insert data into the table.
|
||||
|
||||
```sql
|
||||
INSERT INTO hotels(id, name, location, price_tier, checkin_date, checkout_date, booked)
|
||||
VALUES
|
||||
(1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-22', '2024-04-20', B'0'),
|
||||
(2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', B'0'),
|
||||
(3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', B'0'),
|
||||
(4, 'Radisson Blu Lucerne', 'Lucerne', 'Midscale', '2024-04-24', '2024-04-05', B'0'),
|
||||
(5, 'Best Western Bern', 'Bern', 'Upper Midscale', '2024-04-23', '2024-04-01', B'0'),
|
||||
(6, 'InterContinental Geneva', 'Geneva', 'Luxury', '2024-04-23', '2024-04-28', B'0'),
|
||||
(7, 'Sheraton Zurich', 'Zurich', 'Upper Upscale', '2024-04-27', '2024-04-02', B'0'),
|
||||
(8, 'Holiday Inn Basel', 'Basel', 'Upper Midscale', '2024-04-24', '2024-04-09', B'0'),
|
||||
(9, 'Courtyard Zurich', 'Zurich', 'Upscale', '2024-04-03', '2024-04-13', B'0'),
|
||||
(10, 'Comfort Inn Bern', 'Bern', 'Midscale', '2024-04-04', '2024-04-16', B'0');
|
||||
```
|
||||
|
||||
1. End the database session:
|
||||
|
||||
```bash
|
||||
\q
|
||||
```
|
||||
<!-- [END database_setup] -->
|
||||
@@ -1,14 +0,0 @@
|
||||
---
|
||||
title: "AlloyDB Admin API using MCP"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
Create your AlloyDB database with MCP Toolbox.
|
||||
---
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://cloud.google.com/alloydb/docs/create-database-with-mcp-toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/alloydb/docs/create-database-with-mcp-toolbox"/>
|
||||
</head>
|
||||
</html>
|
||||
@@ -7,7 +7,7 @@ description: >
|
||||
---
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://cloud.google.com/alloydb/docs/connect-ide-using-mcp-toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/alloydb/docs/connect-ide-using-mcp-toolbox"/>
|
||||
<link rel="canonical" href="https://cloud.google.com/alloydb/docs/pre-built-tools-with-mcp-toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/alloydb/docs/pre-built-tools-with-mcp-toolbox"/>
|
||||
</head>
|
||||
</html>
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
---
|
||||
title: "Firestore using MCP"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
Connect your IDE to Firestore using Toolbox.
|
||||
---
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://cloud.google.com/firestore/native/docs/connect-ide-using-mcp-toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/firestore/native/docs/connect-ide-using-mcp-toolbox"/>
|
||||
</head>
|
||||
</html>
|
||||
@@ -1,311 +0,0 @@
|
||||
---
|
||||
title: "Looker using MCP"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
Connect your IDE to Looker using Toolbox.
|
||||
---
|
||||
|
||||
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is
|
||||
an open protocol for connecting Large Language Models (LLMs) to data sources
|
||||
like Postgres. This guide covers how to use [MCP Toolbox for Databases][toolbox]
|
||||
to expose your developer assistant tools to a Looker instance:
|
||||
|
||||
* [Gemini-CLI][gemini-cli]
|
||||
* [Cursor][cursor]
|
||||
* [Windsurf][windsurf] (Codium)
|
||||
* [Visual Studio Code][vscode] (Copilot)
|
||||
* [Cline][cline] (VS Code extension)
|
||||
* [Claude desktop][claudedesktop]
|
||||
* [Claude code][claudecode]
|
||||
|
||||
[toolbox]: https://github.com/googleapis/genai-toolbox
|
||||
[gemini-cli]: #configure-your-mcp-client
|
||||
[cursor]: #configure-your-mcp-client
|
||||
[windsurf]: #configure-your-mcp-client
|
||||
[vscode]: #configure-your-mcp-client
|
||||
[cline]: #configure-your-mcp-client
|
||||
[claudedesktop]: #configure-your-mcp-client
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
|
||||
## Set up Looker
|
||||
|
||||
1. Get a Looker Client ID and Client Secret. Follow the directions
|
||||
[here](https://cloud.google.com/looker/docs/api-auth#authentication_with_an_sdk).
|
||||
|
||||
1. Have the base URL of your Looker instance available. It is likely
|
||||
something like `https://looker.example.com`. In some cases the API is
|
||||
listening at a different port, and you will need to use
|
||||
`https://looker.example.com:19999` instead.
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct
|
||||
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
|
||||
to your OS and CPU architecture. You are required to use Toolbox version
|
||||
v0.10.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Verify the installation:
|
||||
|
||||
```bash
|
||||
./toolbox --version
|
||||
```
|
||||
|
||||
## Configure your MCP Client
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Gemini-CLI" lang="en" %}}
|
||||
|
||||
1. Install [Gemini-CLI](https://github.com/google-gemini/gemini-cli#install-globally-with-npm).
|
||||
1. Create a directory `.gemini` in your home directory if it doesn't exist.
|
||||
1. Create the file `.gemini/settings.json` if it doesn't exist.
|
||||
1. Add the following configuration, or add the mcpServers stanza if you already
|
||||
have a `settings.json` with content. Replace the path to the toolbox
|
||||
executable and the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"looker-toolbox": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--stdio", "--prebuilt", "looker"],
|
||||
"env": {
|
||||
"LOOKER_BASE_URL": "https://looker.example.com",
|
||||
"LOOKER_CLIENT_ID": "",
|
||||
"LOOKER_CLIENT_SECRET": "",
|
||||
"LOOKER_VERIFY_SSL": "true"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Start Gemini-CLI with the `gemini` command and use the command `/mcp` to see
|
||||
the configured MCP tools.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Claude code" lang="en" %}}
|
||||
|
||||
1. Install [Claude
|
||||
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
1. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"looker-toolbox": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--stdio", "--prebuilt", "looker"],
|
||||
"env": {
|
||||
"LOOKER_BASE_URL": "https://looker.example.com",
|
||||
"LOOKER_CLIENT_ID": "",
|
||||
"LOOKER_CLIENT_SECRET": "",
|
||||
"LOOKER_VERIFY_SSL": "true"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude Code to apply the new configuration.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Claude desktop" lang="en" %}}
|
||||
|
||||
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
|
||||
1. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"looker-toolbox": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--stdio", "--prebuilt", "looker"],
|
||||
"env": {
|
||||
"LOOKER_BASE_URL": "https://looker.example.com",
|
||||
"LOOKER_CLIENT_ID": "",
|
||||
"LOOKER_CLIENT_SECRET": "",
|
||||
"LOOKER_VERIFY_SSL": "true"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude desktop.
|
||||
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
|
||||
new MCP server available.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Cline" lang="en" %}}
|
||||
|
||||
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap
|
||||
the **MCP Servers** icon.
|
||||
1. Tap Configure MCP Servers to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"looker-toolbox": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--stdio", "--prebuilt", "looker"],
|
||||
"env": {
|
||||
"LOOKER_BASE_URL": "https://looker.example.com",
|
||||
"LOOKER_CLIENT_ID": "",
|
||||
"LOOKER_CLIENT_SECRET": "",
|
||||
"LOOKER_VERIFY_SSL": "true"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. You should see a green active status after the server is successfully
|
||||
connected.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Cursor" lang="en" %}}
|
||||
|
||||
1. Create a `.cursor` directory in your project root if it doesn't exist.
|
||||
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"looker-toolbox": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--stdio", "--prebuilt", "looker"],
|
||||
"env": {
|
||||
"LOOKER_BASE_URL": "https://looker.example.com",
|
||||
"LOOKER_CLIENT_ID": "",
|
||||
"LOOKER_CLIENT_SECRET": "",
|
||||
"LOOKER_VERIFY_SSL": "true"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
|
||||
Settings > MCP**. You should see a green active status after the server is
|
||||
successfully connected.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
|
||||
|
||||
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
|
||||
create a `.vscode` directory in your project root if it doesn't exist.
|
||||
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"servers": {
|
||||
"looker-toolbox": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--stdio", "--prebuilt", "looker"],
|
||||
"env": {
|
||||
"LOOKER_BASE_URL": "https://looker.example.com",
|
||||
"LOOKER_CLIENT_ID": "",
|
||||
"LOOKER_CLIENT_SECRET": "",
|
||||
"LOOKER_VERIFY_SSL": "true"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
|
||||
Cascade assistant.
|
||||
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"looker-toolbox": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--stdio", "--prebuilt", "looker"],
|
||||
"env": {
|
||||
"LOOKER_BASE_URL": "https://looker.example.com",
|
||||
"LOOKER_CLIENT_ID": "",
|
||||
"LOOKER_CLIENT_SECRET": "",
|
||||
"LOOKER_VERIFY_SSL": "true"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Use Tools
|
||||
|
||||
Your AI tool is now connected to Looker using MCP. Try asking your AI
|
||||
assistant to list models, explores, dimensions, and measures. Run a
|
||||
query, retrieve the SQL for a query, and run a saved Look.
|
||||
|
||||
The following tools are available to the LLM:
|
||||
|
||||
1. **get_models**: list the LookML models in Looker
|
||||
1. **get_explores**: list the explores in a given model
|
||||
1. **get_dimensions**: list the dimensions in a given explore
|
||||
1. **get_measures**: list the measures in a given explore
|
||||
1. **get_filters**: list the filters in a given explore
|
||||
1. **get_parameters**: list the parameters in a given explore
|
||||
1. **query**: Run a query and return the data
|
||||
1. **query_sql**: Return the SQL generated by Looker for a query
|
||||
1. **query_url**: Return a link to the query in Looker for further exploration
|
||||
1. **get_looks**: Return the saved Looks that match a title or description
|
||||
1. **run_look**: Run a saved Look and return the data
|
||||
1. **make_look**: Create a saved Look in Looker and return the URL
|
||||
1. **get_dashboards**: Return the saved dashboards that match a title or description
|
||||
1. **make_dashboard**: Create a saved dashboard in Looker and return the URL
|
||||
1. **add_dashboard_element**: Add a tile to a dashboard
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
will adapt to the tools available, so this shouldn't affect most users.
|
||||
{{< /notice >}}
|
||||
@@ -1,287 +0,0 @@
|
||||
---
|
||||
title: SQL Server using MCP
|
||||
type: docs
|
||||
weight: 2
|
||||
description: "Connect your IDE to SQL Server using Toolbox."
|
||||
---
|
||||
|
||||
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like SQL Server. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a SQL Server instance:
|
||||
|
||||
* [Cursor][cursor]
|
||||
* [Windsurf][windsurf] (Codium)
|
||||
* [Visual Studio Code][vscode] (Copilot)
|
||||
* [Cline][cline] (VS Code extension)
|
||||
* [Claude desktop][claudedesktop]
|
||||
* [Claude code][claudecode]
|
||||
* [Gemini CLI][geminicli]
|
||||
* [Gemini Code Assist][geminicodeassist]
|
||||
|
||||
[toolbox]: https://github.com/googleapis/genai-toolbox
|
||||
[cursor]: #configure-your-mcp-client
|
||||
[windsurf]: #configure-your-mcp-client
|
||||
[vscode]: #configure-your-mcp-client
|
||||
[cline]: #configure-your-mcp-client
|
||||
[claudedesktop]: #configure-your-mcp-client
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
[geminicli]: #configure-your-mcp-client
|
||||
[geminicodeassist]: #configure-your-mcp-client
|
||||
|
||||
## Set up the database
|
||||
|
||||
1. [Create or select a SQL Server instance.](https://www.microsoft.com/en-us/sql-server/sql-server-downloads)
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version V0.10.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Verify the installation:
|
||||
|
||||
```bash
|
||||
./toolbox --version
|
||||
```
|
||||
|
||||
## Configure your MCP Client
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Claude code" lang="en" %}}
|
||||
|
||||
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
1. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlserver": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude code to apply the new configuration.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Claude desktop" lang="en" %}}
|
||||
|
||||
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
|
||||
1. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlserver": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude desktop.
|
||||
1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cline" lang="en" %}}
|
||||
|
||||
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
|
||||
1. Tap Configure MCP Servers to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlserver": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cursor" lang="en" %}}
|
||||
|
||||
1. Create a `.cursor` directory in your project root if it doesn't exist.
|
||||
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlserver": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
|
||||
|
||||
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
|
||||
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"servers": {
|
||||
"mssql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
|
||||
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlserver": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini CLI" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlserver": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
|
||||
1. Enable Agent Mode in Gemini Code Assist chat.
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlserver": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Use Tools
|
||||
|
||||
Your AI tool is now connected to SQL Server using MCP. Try asking your AI assistant to list tables, create a table, or define and execute other SQL statements.
|
||||
|
||||
The following tools are available to the LLM:
|
||||
|
||||
1. **list_tables**: lists tables and descriptions
|
||||
1. **execute_sql**: execute any SQL statement
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
|
||||
{{< /notice >}}
|
||||
@@ -1,287 +0,0 @@
|
||||
---
|
||||
title: MySQL using MCP
|
||||
type: docs
|
||||
weight: 2
|
||||
description: "Connect your IDE to MySQL using Toolbox."
|
||||
---
|
||||
|
||||
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like MySQL. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a MySQL instance:
|
||||
|
||||
* [Cursor][cursor]
|
||||
* [Windsurf][windsurf] (Codium)
|
||||
* [Visual Studio Code][vscode] (Copilot)
|
||||
* [Cline][cline] (VS Code extension)
|
||||
* [Claude desktop][claudedesktop]
|
||||
* [Claude code][claudecode]
|
||||
* [Gemini CLI][geminicli]
|
||||
* [Gemini Code Assist][geminicodeassist]
|
||||
|
||||
[toolbox]: https://github.com/googleapis/genai-toolbox
|
||||
[cursor]: #configure-your-mcp-client
|
||||
[windsurf]: #configure-your-mcp-client
|
||||
[vscode]: #configure-your-mcp-client
|
||||
[cline]: #configure-your-mcp-client
|
||||
[claudedesktop]: #configure-your-mcp-client
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
[geminicli]: #configure-your-mcp-client
|
||||
[geminicodeassist]: #configure-your-mcp-client
|
||||
|
||||
## Set up the database
|
||||
|
||||
1. [Create or select a MySQL instance.](https://dev.mysql.com/downloads/installer/)
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version V0.10.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Verify the installation:
|
||||
|
||||
```bash
|
||||
./toolbox --version
|
||||
```
|
||||
|
||||
## Configure your MCP Client
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Claude code" lang="en" %}}
|
||||
|
||||
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
1. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"mysql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "mysql", "--stdio"],
|
||||
"env": {
|
||||
"MYSQL_HOST": "",
|
||||
"MYSQL_PORT": "",
|
||||
"MYSQL_DATABASE": "",
|
||||
"MYSQL_USER": "",
|
||||
"MYSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude code to apply the new configuration.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Claude desktop" lang="en" %}}
|
||||
|
||||
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
|
||||
1. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"mysql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "mysql", "--stdio"],
|
||||
"env": {
|
||||
"MYSQL_HOST": "",
|
||||
"MYSQL_PORT": "",
|
||||
"MYSQL_DATABASE": "",
|
||||
"MYSQL_USER": "",
|
||||
"MYSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude desktop.
|
||||
1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cline" lang="en" %}}
|
||||
|
||||
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
|
||||
1. Tap Configure MCP Servers to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"mysql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "mysql", "--stdio"],
|
||||
"env": {
|
||||
"MYSQL_HOST": "",
|
||||
"MYSQL_PORT": "",
|
||||
"MYSQL_DATABASE": "",
|
||||
"MYSQL_USER": "",
|
||||
"MYSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cursor" lang="en" %}}
|
||||
|
||||
1. Create a `.cursor` directory in your project root if it doesn't exist.
|
||||
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"mysql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "mysql", "--stdio"],
|
||||
"env": {
|
||||
"MYSQL_HOST": "",
|
||||
"MYSQL_PORT": "",
|
||||
"MYSQL_DATABASE": "",
|
||||
"MYSQL_USER": "",
|
||||
"MYSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
|
||||
|
||||
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
|
||||
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"servers": {
|
||||
"mysql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mysql","--stdio"],
|
||||
"env": {
|
||||
"MYSQL_HOST": "",
|
||||
"MYSQL_PORT": "",
|
||||
"MYSQL_DATABASE": "",
|
||||
"MYSQL_USER": "",
|
||||
"MYSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
|
||||
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"mysql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mysql","--stdio"],
|
||||
"env": {
|
||||
"MYSQL_HOST": "",
|
||||
"MYSQL_PORT": "",
|
||||
"MYSQL_DATABASE": "",
|
||||
"MYSQL_USER": "",
|
||||
"MYSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini CLI" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"mysql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mysql","--stdio"],
|
||||
"env": {
|
||||
"MYSQL_HOST": "",
|
||||
"MYSQL_PORT": "",
|
||||
"MYSQL_DATABASE": "",
|
||||
"MYSQL_USER": "",
|
||||
"MYSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
|
||||
1. Enable Agent Mode in Gemini Code Assist chat.
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"mysql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mysql","--stdio"],
|
||||
"env": {
|
||||
"MYSQL_HOST": "",
|
||||
"MYSQL_PORT": "",
|
||||
"MYSQL_DATABASE": "",
|
||||
"MYSQL_USER": "",
|
||||
"MYSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Use Tools
|
||||
|
||||
Your AI tool is now connected to MySQL using MCP. Try asking your AI assistant to list tables, create a table, or define and execute other SQL statements.
|
||||
|
||||
The following tools are available to the LLM:
|
||||
|
||||
1. **list_tables**: lists tables and descriptions
|
||||
1. **execute_sql**: execute any SQL statement
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
|
||||
{{< /notice >}}
|
||||
@@ -1,281 +0,0 @@
|
||||
---
|
||||
title: Neo4j using MCP
|
||||
type: docs
|
||||
weight: 2
|
||||
description: "Connect your IDE to Neo4j using Toolbox."
|
||||
---
|
||||
|
||||
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like Neo4j. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a Neo4j instance:
|
||||
|
||||
* [Cursor][cursor]
|
||||
* [Windsurf][windsurf] (Codium)
|
||||
* [Visual Studio Code][vscode] (Copilot)
|
||||
* [Cline][cline] (VS Code extension)
|
||||
* [Claude desktop][claudedesktop]
|
||||
* [Claude code][claudecode]
|
||||
* [Gemini CLI][geminicli]
|
||||
* [Gemini Code Assist][geminicodeassist]
|
||||
|
||||
[toolbox]: https://github.com/googleapis/genai-toolbox
|
||||
[cursor]: #configure-your-mcp-client
|
||||
[windsurf]: #configure-your-mcp-client
|
||||
[vscode]: #configure-your-mcp-client
|
||||
[cline]: #configure-your-mcp-client
|
||||
[claudedesktop]: #configure-your-mcp-client
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
[geminicli]: #configure-your-mcp-client
|
||||
[geminicodeassist]: #configure-your-mcp-client
|
||||
|
||||
## Set up the database
|
||||
|
||||
1. [Create or select a Neo4j instance.](https://neo4j.com/cloud/platform/aura-graph-database/)
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version v0.15.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Verify the installation:
|
||||
|
||||
```bash
|
||||
./toolbox --version
|
||||
```
|
||||
|
||||
## Configure your MCP Client
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Claude code" lang="en" %}}
|
||||
|
||||
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
1. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude code to apply the new configuration.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Claude desktop" lang="en" %}}
|
||||
|
||||
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
|
||||
1. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude desktop.
|
||||
1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cline" lang="en" %}}
|
||||
|
||||
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
|
||||
1. Tap Configure MCP Servers to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cursor" lang="en" %}}
|
||||
|
||||
1. Create a `.cursor` directory in your project root if it doesn't exist.
|
||||
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
|
||||
|
||||
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
|
||||
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcp" : {
|
||||
"servers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
|
||||
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini CLI" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
|
||||
1. Enable Agent Mode in Gemini Code Assist chat.
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Use Tools
|
||||
|
||||
Your AI tool is now connected to Neo4j using MCP. Try asking your AI assistant to get the graph schema or execute Cypher statements.
|
||||
|
||||
The following tools are available to the LLM:
|
||||
|
||||
1. **get_schema**: extracts the complete database schema, including details about node labels, relationships, properties, constraints, and indexes.
|
||||
1. **execute_cypher**: executes any arbitrary Cypher statement.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
|
||||
{{< /notice >}}
|
||||
@@ -17,8 +17,6 @@ to expose your developer assistant tools to a Postgres instance:
|
||||
* [Cline][cline] (VS Code extension)
|
||||
* [Claude desktop][claudedesktop]
|
||||
* [Claude code][claudecode]
|
||||
* [Gemini CLI][geminicli]
|
||||
* [Gemini Code Assist][geminicodeassist]
|
||||
|
||||
[toolbox]: https://github.com/googleapis/genai-toolbox
|
||||
[cursor]: #configure-your-mcp-client
|
||||
@@ -27,8 +25,6 @@ to expose your developer assistant tools to a Postgres instance:
|
||||
[cline]: #configure-your-mcp-client
|
||||
[claudedesktop]: #configure-your-mcp-client
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
[geminicli]: #configure-your-mcp-client
|
||||
[geminicodeassist]: #configure-your-mcp-client
|
||||
|
||||
{{< notice tip >}}
|
||||
This guide can be used with [AlloyDB
|
||||
@@ -56,19 +52,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.8.0/linux/amd64/toolbox>
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.8.0/darwin/arm64/toolbox>
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.8.0/darwin/amd64/toolbox>
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.8.0/windows/amd64/toolbox>
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
@@ -217,7 +213,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
|
||||
|
||||
```json
|
||||
{
|
||||
"servers": {
|
||||
"mcpServers": {
|
||||
"postgres": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","postgres","--stdio"],
|
||||
@@ -263,57 +259,6 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Gemini CLI" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"postgres": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","postgres","--stdio"],
|
||||
"env": {
|
||||
"POSTGRES_HOST": "",
|
||||
"POSTGRES_PORT": "",
|
||||
"POSTGRES_DATABASE": "",
|
||||
"POSTGRES_USER": "",
|
||||
"POSTGRES_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
|
||||
1. Enable Agent Mode in Gemini Code Assist chat.
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"postgres": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","postgres","--stdio"],
|
||||
"env": {
|
||||
"POSTGRES_HOST": "",
|
||||
"POSTGRES_PORT": "",
|
||||
"POSTGRES_DATABASE": "",
|
||||
"POSTGRES_USER": "",
|
||||
"POSTGRES_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Use Tools
|
||||
|
||||
@@ -1,255 +0,0 @@
|
||||
---
|
||||
title: SQLite using MCP
|
||||
type: docs
|
||||
weight: 2
|
||||
description: "Connect your IDE to SQLite using Toolbox."
|
||||
---
|
||||
|
||||
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like SQLite. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a SQLite instance:
|
||||
|
||||
* [Cursor][cursor]
|
||||
* [Windsurf][windsurf] (Codium)
|
||||
* [Visual Studio Code][vscode] (Copilot)
|
||||
* [Cline][cline] (VS Code extension)
|
||||
* [Claude desktop][claudedesktop]
|
||||
* [Claude code][claudecode]
|
||||
* [Gemini CLI][geminicli]
|
||||
* [Gemini Code Assist][geminicodeassist]
|
||||
|
||||
[toolbox]: https://github.com/googleapis/genai-toolbox
|
||||
[cursor]: #configure-your-mcp-client
|
||||
[windsurf]: #configure-your-mcp-client
|
||||
[vscode]: #configure-your-mcp-client
|
||||
[cline]: #configure-your-mcp-client
|
||||
[claudedesktop]: #configure-your-mcp-client
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
[geminicli]: #configure-your-mcp-client
|
||||
[geminicodeassist]: #configure-your-mcp-client
|
||||
|
||||
## Set up the database
|
||||
|
||||
1. [Create or select a SQLite database file.](https://www.sqlite.org/download.html)
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version V0.10.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Verify the installation:
|
||||
|
||||
```bash
|
||||
./toolbox --version
|
||||
```
|
||||
|
||||
## Configure your MCP Client
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Claude code" lang="en" %}}
|
||||
|
||||
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
1. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlite": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "sqlite", "--stdio"],
|
||||
"env": {
|
||||
"SQLITE_DATABASE": "./sample.db"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude code to apply the new configuration.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Claude desktop" lang="en" %}}
|
||||
|
||||
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
|
||||
1. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlite": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "sqlite", "--stdio"],
|
||||
"env": {
|
||||
"SQLITE_DATABASE": "./sample.db"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude desktop.
|
||||
1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cline" lang="en" %}}
|
||||
|
||||
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
|
||||
1. Tap Configure MCP Servers to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlite": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "sqlite", "--stdio"],
|
||||
"env": {
|
||||
"SQLITE_DATABASE": "./sample.db"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cursor" lang="en" %}}
|
||||
|
||||
1. Create a `.cursor` directory in your project root if it doesn't exist.
|
||||
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlite": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "sqlite", "--stdio"],
|
||||
"env": {
|
||||
"SQLITE_DATABASE": "./sample.db"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
|
||||
|
||||
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
|
||||
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"servers": {
|
||||
"sqlite": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","sqlite","--stdio"],
|
||||
"env": {
|
||||
"SQLITE_DATABASE": "./sample.db"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
|
||||
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlite": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","sqlite","--stdio"],
|
||||
"env": {
|
||||
"SQLITE_DATABASE": "./sample.db"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini CLI" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlite": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","sqlite","--stdio"],
|
||||
"env": {
|
||||
"SQLITE_DATABASE": "./sample.db"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
|
||||
1. Enable Agent Mode in Gemini Code Assist chat.
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlite": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","sqlite","--stdio"],
|
||||
"env": {
|
||||
"SQLITE_DATABASE": "./sample.db"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Use Tools
|
||||
|
||||
Your AI tool is now connected to SQLite using MCP. Try asking your AI assistant to list tables, create a table, or define and execute other SQL statements.
|
||||
|
||||
The following tools are available to the LLM:
|
||||
|
||||
1. **list_tables**: lists tables and descriptions
|
||||
1. **execute_sql**: execute any SQL statement
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
|
||||
{{< /notice >}}
|
||||
@@ -20,17 +20,18 @@ The native SDKs can be combined with MCP clients in many cases.
|
||||
|
||||
Toolbox currently supports the following versions of MCP specification:
|
||||
|
||||
* [2025-06-18](https://modelcontextprotocol.io/specification/2025-06-18)
|
||||
* [2025-03-26](https://modelcontextprotocol.io/specification/2025-03-26)
|
||||
* [2024-11-05](https://modelcontextprotocol.io/specification/2024-11-05)
|
||||
* [2024-11-05](https://spec.modelcontextprotocol.io/specification/2024-11-05/)
|
||||
|
||||
### Toolbox AuthZ/AuthN Not Supported by MCP
|
||||
### Features Not Supported by MCP
|
||||
|
||||
The auth implementation in Toolbox is not supported in MCP's auth specification.
|
||||
This includes:
|
||||
Toolbox has several features that are not yet supported in the MCP specification:
|
||||
|
||||
* [Authenticated Parameters](../resources/tools/#authenticated-parameters)
|
||||
* [Authorized Invocations](../resources/tools/#authorized-invocations)
|
||||
* **AuthZ/AuthN:** There are no auth implementation in the `2024-11-05`
|
||||
specification. This includes:
|
||||
* [Authenticated Parameters](../resources/tools/_index.md#authenticated-parameters)
|
||||
* [Authorized Invocations](../resources/tools/_index.md#authorized-invocations)
|
||||
* **Notifications:** Currently, editing Toolbox Tools requires a server restart.
|
||||
Clients should reload tools on disconnect to get the latest version.
|
||||
|
||||
## Connecting to Toolbox with an MCP client
|
||||
|
||||
@@ -40,7 +41,7 @@ This includes:
|
||||
MCP is only compatible with Toolbox version 0.3.0 and above.
|
||||
{{< /notice >}}
|
||||
|
||||
1. [Install](../getting-started/introduction/#installing-the-server)
|
||||
1. [Install](../getting-started/introduction/_index.md#installing-the-server)
|
||||
Toolbox version 0.3.0+.
|
||||
|
||||
1. Make sure you've set up and initialized your database.
|
||||
@@ -63,15 +64,14 @@ remote HTTP server. Logs will be set to the `warn` level by default. `debug` and
|
||||
`info` logs are not supported with stdio.
|
||||
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the
|
||||
`--disable-reload` flag.
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
|
||||
### Connecting via HTTP
|
||||
|
||||
Toolbox supports the HTTP transport protocol with and without SSE.
|
||||
|
||||
{{< tabpane text=true >}} {{% tab header="HTTP with SSE (deprecated)" lang="en" %}}
|
||||
{{< tabpane text=true >}} {{% tab header="HTTP with SSE" lang="en" %}}
|
||||
Add the following configuration to your MCP client configuration:
|
||||
|
||||
```bash
|
||||
@@ -87,25 +87,11 @@ Add the following configuration to your MCP client configuration:
|
||||
|
||||
If you would like to connect to a specific toolset, replace `url` with
|
||||
`"http://127.0.0.1:5000/mcp/{toolset_name}/sse"`.
|
||||
{{% /tab %}} {{% tab header="HTTP POST" lang="en" %}}
|
||||
Connect to Toolbox HTTP POST via `http://127.0.0.1:5000/mcp`.
|
||||
|
||||
HTTP with SSE is only supported in version `2024-11-05` and is currently
|
||||
deprecated.
|
||||
{{% /tab %}} {{% tab header="Streamable HTTP" lang="en" %}}
|
||||
Add the following configuration to your MCP client configuration:
|
||||
|
||||
```bash
|
||||
{
|
||||
"mcpServers": {
|
||||
"toolbox": {
|
||||
"type": "http",
|
||||
"url": "http://127.0.0.1:5000/mcp",
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
If you would like to connect to a specific toolset, replace `url` with
|
||||
`"http://127.0.0.1:5000/mcp/{toolset_name}"`.
|
||||
If you would like to connect to a specific toolset, connect via
|
||||
`http://127.0.0.1:5000/mcp/{toolset_name}`.
|
||||
{{% /tab %}} {{< /tabpane >}}
|
||||
|
||||
### Using the MCP Inspector with Toolbox
|
||||
@@ -132,8 +118,8 @@ testing and debugging Toolbox server.
|
||||
1. Click the `Connect` button. It might take awhile to spin up Toolbox. Voila!
|
||||
You should be able to inspect your toolbox tools!
|
||||
{{% /tab %}}
|
||||
{{% tab header="HTTP with SSE (deprecated)" lang="en" %}}
|
||||
1. [Run Toolbox](../getting-started/introduction/#running-the-server).
|
||||
{{% tab header="HTTP with SSE" lang="en" %}}
|
||||
1. [Run Toolbox](../getting-started/introduction/_index.md#running-the-server).
|
||||
|
||||
1. In a separate terminal, run Inspector directly through `npx`:
|
||||
|
||||
@@ -146,23 +132,6 @@ testing and debugging Toolbox server.
|
||||
1. For `URL`, type in `http://127.0.0.1:5000/mcp/sse` to use all tool or
|
||||
`http//127.0.0.1:5000/mcp/{toolset_name}/sse` to use a specific toolset.
|
||||
|
||||
1. Click the `Connect` button. Voila! You should be able to inspect your toolbox
|
||||
tools!
|
||||
{{% /tab %}}
|
||||
{{% tab header="Streamable HTTP" lang="en" %}}
|
||||
1. [Run Toolbox](../getting-started/introduction/#running-the-server).
|
||||
|
||||
1. In a separate terminal, run Inspector directly through `npx`:
|
||||
|
||||
```bash
|
||||
npx @modelcontextprotocol/inspector
|
||||
```
|
||||
|
||||
1. For `Transport Type` dropdown menu, select `Streamable HTTP`.
|
||||
|
||||
1. For `URL`, type in `http://127.0.0.1:5000/mcp` to use all tool or
|
||||
`http//127.0.0.1:5000/mcp/{toolset_name}` to use a specific toolset.
|
||||
|
||||
1. Click the `Connect` button. Voila! You should be able to inspect your toolbox
|
||||
tools!
|
||||
{{% /tab %}} {{< /tabpane >}}
|
||||
|
||||
@@ -75,7 +75,7 @@ networks:
|
||||
|
||||
{{< notice tip >}}
|
||||
|
||||
You can use this setup to quickly set up Toolbox + Postgres to follow along in our
|
||||
You can use this setup quickly set up Toolbox + Postgres to follow along in our
|
||||
[Quickstart](../getting-started/local_quickstart.md)
|
||||
|
||||
{{< /notice >}}
|
||||
|
||||
@@ -60,8 +60,8 @@ description: >
|
||||
gcloud iam service-accounts create $SA_NAME
|
||||
```
|
||||
|
||||
1. Grant any IAM roles necessary to the IAM service account. Each source has a
|
||||
list of necessary IAM permissions listed on its page. The example below is
|
||||
1. Grant any IAM roles necessary to the IAM service account. Each source have a
|
||||
list of necessary IAM permissions listed on it's page. The example below is
|
||||
for cloud sql postgres source:
|
||||
|
||||
```bash
|
||||
|
||||
@@ -79,7 +79,7 @@ database are in the same VPC network.
|
||||
|
||||
Create a `tools.yaml` file that contains your configuration for Toolbox. For
|
||||
details, see the
|
||||
[configuration](../resources/sources/)
|
||||
[configuration](https://googleapis.github.io/genai-toolbox/resources/sources/)
|
||||
section.
|
||||
|
||||
## Deploy to Cloud Run
|
||||
@@ -103,14 +103,6 @@ section.
|
||||
```bash
|
||||
export IMAGE=us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:latest
|
||||
```
|
||||
{{< notice note >}}
|
||||
**The `$PORT` Environment Variable**
|
||||
Google Cloud Run dictates the port your application must listen on by setting the
|
||||
`$PORT` environment variable inside your container. This value defaults to
|
||||
**8080**. Your application's `--port` argument **must** be set to listen on this
|
||||
port. If there is a mismatch, the container will fail to start and the
|
||||
deployment will time out.
|
||||
{{< /notice >}}
|
||||
|
||||
1. Deploy Toolbox to Cloud Run using the following command:
|
||||
|
||||
@@ -133,7 +125,7 @@ deployment will time out.
|
||||
--region us-central1 \
|
||||
--set-secrets "/app/tools.yaml=tools:latest" \
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080" \
|
||||
# TODO(dev): update the following to match your VPC if necessary
|
||||
# TODO(dev): update the following to match your VPC if necessary
|
||||
--network default \
|
||||
--subnet default
|
||||
# --allow-unauthenticated # https://cloud.google.com/run/docs/authenticating/public#gcloud
|
||||
@@ -141,15 +133,21 @@ deployment will time out.
|
||||
|
||||
## Connecting with Toolbox Client SDK
|
||||
|
||||
You can connect to Toolbox Cloud Run instances directly through the SDK.
|
||||
You can connect to Toolbox Cloud Run instances directly through the SDK
|
||||
|
||||
1. [Set up `Cloud Run Invoker` role
|
||||
access](https://cloud.google.com/run/docs/securing/managing-access#service-add-principals)
|
||||
to your Cloud Run service.
|
||||
|
||||
1. (Only for local runs) Set up [Application Default
|
||||
1. Set up [Application Default
|
||||
Credentials](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment)
|
||||
for the principal you set up the `Cloud Run Invoker` role access to.
|
||||
for the principle you set up the `Cloud Run Invoker` role access to.
|
||||
|
||||
{{< notice tip >}}
|
||||
If you're working in some other environment than local, set up [environment
|
||||
specific Default
|
||||
Credentials](https://cloud.google.com/docs/authentication/provide-credentials-adc).
|
||||
{{< /notice >}}
|
||||
|
||||
1. Run the following to retrieve a non-deterministic URL for the cloud run service:
|
||||
|
||||
@@ -159,68 +157,16 @@ You can connect to Toolbox Cloud Run instances directly through the SDK.
|
||||
|
||||
1. Import and initialize the toolbox client with the URL retrieved above:
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="Python" lang="python" >}}
|
||||
from toolbox_core import ToolboxClient, auth_methods
|
||||
```python
|
||||
from toolbox_core import ToolboxClient, auth_methods
|
||||
|
||||
# Replace with the Cloud Run service URL generated in the previous step.
|
||||
URL = "https://cloud-run-url.app"
|
||||
auth_token_provider = auth_methods.aget_google_id_token # can also use sync method
|
||||
|
||||
auth_token_provider = auth_methods.aget_google_id_token(URL) # can also use sync method
|
||||
|
||||
async with ToolboxClient(
|
||||
URL,
|
||||
client_headers={"Authorization": auth_token_provider},
|
||||
) as toolbox:
|
||||
{{< /tab >}}
|
||||
{{< tab header="Javascript" lang="javascript" >}}
|
||||
import { ToolboxClient } from '@toolbox-sdk/core';
|
||||
import {getGoogleIdToken} from '@toolbox-sdk/core/auth'
|
||||
|
||||
// Replace with the Cloud Run service URL generated in the previous step.
|
||||
const URL = 'http://127.0.0.1:5000';
|
||||
const authTokenProvider = () => getGoogleIdToken(URL);
|
||||
|
||||
const client = new ToolboxClient(URL, null, {"Authorization": authTokenProvider});
|
||||
{{< /tab >}}
|
||||
{{< tab header="Go" lang="go" >}}
|
||||
import "github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
|
||||
func main() {
|
||||
// Replace with the Cloud Run service URL generated in the previous step.
|
||||
URL := "http://127.0.0.1:5000"
|
||||
auth_token_provider, err := core.GetGoogleIDToken(ctx, URL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to fetch token %v", err)
|
||||
}
|
||||
toolboxClient, err := core.NewToolboxClient(
|
||||
# Replace with the Cloud Run service URL generated in the previous step.
|
||||
async with ToolboxClient(
|
||||
URL,
|
||||
core.WithClientHeaderString("Authorization", auth_token_provider))
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
}
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
client_headers={"Authorization": auth_token_provider},
|
||||
) as toolbox:
|
||||
```
|
||||
|
||||
Now, you can use this client to connect to the deployed Cloud Run instance!
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
{{< notice note >}}
|
||||
For any deployment or runtime error, the best first step is to check the logs for your service in the Google Cloud Console's Cloud Run section. They often contain the specific error message needed to diagnose the problem.
|
||||
{{< /notice >}}
|
||||
|
||||
* **Deployment Fails with "Container failed to start":** This is almost always
|
||||
caused by a port mismatch. Ensure your container's `--port` argument is set to
|
||||
`8080` to match the `$PORT` environment variable provided by Cloud Run.
|
||||
|
||||
* **Client Receives Permission Denied Error (401 or 403):** If your client application (e.g., your local SDK) gets a `401 Unauthorized` or `403 Forbidden` error when trying to call your Cloud Run service, it means the client is not properly authenticated as an invoker.
|
||||
* Ensure the user or service account calling the service has the **Cloud Run Invoker** (`roles/run.invoker`) IAM role.
|
||||
* If running locally, make sure your Application Default Credentials are set up correctly by running `gcloud auth application-default login`.
|
||||
|
||||
* **Service Fails to Access Secrets (in logs):** If your application starts but the logs show errors like "permission denied" when trying to access Secret Manager, it means the Toolbox service account is missing permissions.
|
||||
* Ensure the `toolbox-identity` service account has the **Secret Manager Secret Accessor** (`roles/secretmanager.secretAccessor`) IAM role.
|
||||
|
||||
|
||||
|
||||
|
Before Width: | Height: | Size: 36 MiB |
|
Before Width: | Height: | Size: 298 KiB |
@@ -1,109 +0,0 @@
|
||||
---
|
||||
title: "Toolbox UI"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
How to effectively use Toolbox UI.
|
||||
---
|
||||
|
||||
Toolbox UI is a built-in web interface that allows users to visually inspect and test out configured resources such as tools and toolsets.
|
||||
|
||||
## Launching Toolbox UI
|
||||
|
||||
To launch Toolbox's interactive UI, use the `--ui` flag.
|
||||
|
||||
```sh
|
||||
./toolbox --ui
|
||||
```
|
||||
|
||||
Toolbox UI will be served from the same host and port as the Toolbox Server, with the `/ui` suffix. Once Toolbox
|
||||
is launched, the following INFO log with Toolbox UI's url will be shown:
|
||||
|
||||
```bash
|
||||
INFO "Toolbox UI is up and running at: http://localhost:5000/ui"
|
||||
```
|
||||
|
||||
## Navigating the Tools Page
|
||||
|
||||
The tools page shows all tools loaded from your configuration file. This corresponds to the default toolset (represented by an empty string). Each tool's name on this page will exactly match its name in the configuration
|
||||
file.
|
||||
|
||||
To view details for a specific tool, click on the tool name. The main content area will be populated
|
||||
with the tool name, description, and available parameters.
|
||||
|
||||

|
||||
|
||||
### Invoking a Tool
|
||||
|
||||
1. Click on a Tool
|
||||
1. Enter appropriate parameters in each parameter field
|
||||
1. Click "Run Tool"
|
||||
1. Done! Your results will appear in the response field
|
||||
1. (Optional) Uncheck "Prettify JSON" to format the response as plain text
|
||||
|
||||

|
||||
|
||||
### Optional Parameters
|
||||
|
||||
Toolbox allows users to add [optional parameters](../../resources/tools/#basic-parameters) with or without a default value.
|
||||
|
||||
To exclude a parameter, uncheck the box to the right of an associated parameter, and that parameter will not be
|
||||
included in the request body. If the parameter is not sent, Toolbox will either use it as `nil` value or the `default` value, if configured. If the parameter is required, Toolbox will throw an error.
|
||||
|
||||
When the box is checked, parameter will be sent exactly as entered in the response field (e.g. empty string).
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
### Editing Headers
|
||||
|
||||
To edit headers, press the "Edit Headers" button to display the header modal. Within this modal,
|
||||
users can make direct edits by typing into the header's text area.
|
||||
|
||||
Toolbox UI validates that the headers are in correct JSON format. Other header-related errors (e.g.,
|
||||
incorrect header names or values required by the tool) will be reported in the Response section
|
||||
after running the tool.
|
||||
|
||||

|
||||
|
||||
#### Google OAuth
|
||||
|
||||
Currently, Toolbox supports Google OAuth 2.0 as an AuthService, which allows tools to utilize
|
||||
authorized parameters. When a tool uses an authorized parameter, the parameter will be displayed
|
||||
but not editable, as it will be populated from the authentication token.
|
||||
|
||||
To provide the token, add your Google OAuth ID Token to the request header using the "Edit Headers"
|
||||
button and modal described above. The key should be the name of your AuthService as defined in
|
||||
your tool configuration file, suffixed with `_token`. The value should be your ID token as a string.
|
||||
|
||||
1. Select a tool that requires [authenticated parameters]()
|
||||
1. The auth parameter's text field is greyed out. This is because it cannot be entered manually and will
|
||||
be parsed from the resolved auth token
|
||||
1. To update request headers with the token, select "Edit Headers"
|
||||
1. (Optional) If you wish to manually edit the header, checkout the dropdown "How to extract Google OAuth ID Token manually" for guidance on retrieving ID token
|
||||
1. To edit the header automatically, click the "Auto Setup" button that is associated with your Auth Profile
|
||||
1. Enter the Client ID defined in your tools configuration file
|
||||
1. Click "Continue"
|
||||
1. Click "Sign in With Google" and login with your associated google account. This should automatically populate the header text area with your token
|
||||
1. Click "Save"
|
||||
1. Click "Run Tool"
|
||||
|
||||
```json
|
||||
{
|
||||
"Content-Type": "application/json",
|
||||
"my-google-auth_token": "YOUR_ID_TOKEN_HERE"
|
||||
}
|
||||
```
|
||||
|
||||

|
||||
|
||||
## Navigating the Toolsets Page
|
||||
|
||||
Through the toolsets page, users can search for a specific toolset to retrieve tools from. Simply
|
||||
enter the toolset name in the search bar, and press "Enter" to retrieve the associated tools.
|
||||
|
||||
If the toolset name is not defined within the tools configuration file, an error message will be
|
||||
displayed.
|
||||
|
||||

|
||||
|
Before Width: | Height: | Size: 58 KiB |
|
Before Width: | Height: | Size: 59 KiB |
|
Before Width: | Height: | Size: 5.4 MiB |
|
Before Width: | Height: | Size: 269 KiB |
|
Before Width: | Height: | Size: 136 KiB |
@@ -1,8 +0,0 @@
|
||||
---
|
||||
title: "Reference"
|
||||
type: docs
|
||||
weight: 7
|
||||
description: >
|
||||
This section contains reference documentation.
|
||||
---
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
---
|
||||
title: "CLI"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
This page describes the `toolbox` command-line options.
|
||||
---
|
||||
|
||||
## Reference
|
||||
|
||||
| Flag (Short) | Flag (Long) | Description | Default |
|
||||
|---|---|---|---|
|
||||
| `-a` | `--address` | Address of the interface the server will listen on. | `127.0.0.1` |
|
||||
| | `--disable-reload` | Disables dynamic reloading of tools file. | |
|
||||
| `-h` | `--help` | help for toolbox | |
|
||||
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
|
||||
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
|
||||
| `-p` | `--port` | Port the server will listen on. | `5000` |
|
||||
| | `--prebuilt` | Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
|
||||
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
|
||||
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
|
||||
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
|
||||
| | `--telemetry-service-name` | Sets the value of the service.name resource attribute for telemetry data. | `toolbox` |
|
||||
| | `--tools-file` | File path specifying the tool configuration. Cannot be used with --prebuilt, --tools-files, or --tools-folder. | |
|
||||
| | `--tools-files` | Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --prebuilt, --tools-file, or --tools-folder. | |
|
||||
| | `--tools-folder` | Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --prebuilt, --tools-file, or --tools-files. | |
|
||||
| | `--ui` | Launches the Toolbox UI web server. | |
|
||||
| `-v` | `--version` | version for toolbox | |
|
||||
|
||||
## Examples
|
||||
|
||||
### Transport Configuration
|
||||
|
||||
**Server Settings:**
|
||||
- `--address`, `-a`: Server listening address (default: "127.0.0.1")
|
||||
- `--port`, `-p`: Server listening port (default: 5000)
|
||||
|
||||
**STDIO:**
|
||||
- `--stdio`: Run in MCP STDIO mode instead of HTTP server
|
||||
|
||||
#### Usage Examples
|
||||
|
||||
```bash
|
||||
# Basic server with custom port configuration
|
||||
./toolbox --tools-file "tools.yaml" --port 8080
|
||||
```
|
||||
|
||||
### Tool Configuration Sources
|
||||
|
||||
The CLI supports multiple mutually exclusive ways to specify tool configurations:
|
||||
|
||||
**Single File:** (default)
|
||||
- `--tools-file`: Path to a single YAML configuration file (default: `tools.yaml`)
|
||||
|
||||
**Multiple Files:**
|
||||
- `--tools-files`: Comma-separated list of YAML files to merge
|
||||
|
||||
**Directory:**
|
||||
- `--tools-folder`: Directory containing YAML files to load and merge
|
||||
|
||||
**Prebuilt Configurations:**
|
||||
- `--prebuilt`: Use predefined configurations for specific database types (e.g., 'bigquery', 'postgres', 'spanner'). See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values.
|
||||
|
||||
{{< notice tip >}}
|
||||
The CLI enforces mutual exclusivity between configuration source flags, preventing simultaneous use of `--prebuilt` with file-based options, and ensuring only one of `--tools-file`, `--tools-files`, or `--tools-folder` is used at a time.
|
||||
{{< /notice >}}
|
||||
|
||||
### Hot Reload
|
||||
|
||||
Toolbox enables dynamic reloading by default. To disable, use the
|
||||
`--disable-reload` flag.
|
||||
|
||||
### Toolbox UI
|
||||
|
||||
To launch Toolbox's interactive UI, use the `--ui` flag. This allows you to test tools and toolsets with features such as authorized parameters. To learn more, visit [Toolbox UI](../how-to/toolbox-ui/index.md).
|
||||
@@ -1,296 +0,0 @@
|
||||
---
|
||||
title: "Prebuilt Tools"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
This page lists all the prebuilt tools available.
|
||||
---
|
||||
|
||||
Prebuilt tools are reusable, pre-packaged toolsets that are designed to extend the capabilities of agents. These tools are built to be generic and adaptable, allowing developers to interact with and take action on databases.
|
||||
|
||||
See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
|
||||
## AlloyDB Postgres
|
||||
|
||||
* `--prebuilt` value: `alloydb-postgres`
|
||||
* **Environment Variables:**
|
||||
* `ALLOYDB_POSTGRES_PROJECT`: The GCP project ID.
|
||||
* `ALLOYDB_POSTGRES_REGION`: The region of your AlloyDB instance.
|
||||
* `ALLOYDB_POSTGRES_CLUSTER`: The ID of your AlloyDB cluster.
|
||||
* `ALLOYDB_POSTGRES_INSTANCE`: The ID of your AlloyDB instance.
|
||||
* `ALLOYDB_POSTGRES_DATABASE`: The name of the database to connect to.
|
||||
* `ALLOYDB_POSTGRES_USER`: (Optional) The database username. Defaults to IAM authentication if unspecified.
|
||||
* `ALLOYDB_POSTGRES_PASSWORD`: (Optional) The password for the database user. Defaults to IAM authentication if unspecified.
|
||||
* `ALLOYDB_POSTGRES_IP_TYPE`: (Optional) The IP type i.e. "Public" or "Private" (Default: Public).
|
||||
* **Permissions:**
|
||||
* **AlloyDB Client** (`roles/alloydb.client`) to connect to the instance.
|
||||
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
|
||||
* **Tools:**
|
||||
* `execute_sql`: Executes a SQL query.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
|
||||
## AlloyDB Postgres Admin
|
||||
|
||||
* `--prebuilt` value: `alloydb-postgres-admin`
|
||||
* **Environment Variables:**
|
||||
* `API_KEY`: Your API key for the AlloyDB API.
|
||||
* **Permissions:**
|
||||
* **AlloyDB Admin** (`roles/alloydb.admin`) IAM role is required on the project.
|
||||
* **Tools:**
|
||||
* `alloydb-create-cluster`: Creates a new AlloyDB cluster.
|
||||
* `alloydb-operations-get`: Polls the operations API to track the status of long-running operations.
|
||||
* `alloydb-create-instance`: Creates a new AlloyDB instance within a cluster.
|
||||
* `alloydb-list-clusters`: Lists all AlloyDB clusters in a project.
|
||||
* `alloydb-list-instances`: Lists all instances within an AlloyDB cluster.
|
||||
* `alloydb-list-users`: Lists all database users within an AlloyDB cluster.
|
||||
* `alloydb-create-user`: Creates a new database user in an AlloyDB cluster.
|
||||
|
||||
## BigQuery
|
||||
|
||||
* `--prebuilt` value: `bigquery`
|
||||
* **Environment Variables:**
|
||||
* `BIGQUERY_PROJECT`: The GCP project ID.
|
||||
* `BIGQUERY_LOCATION`: (Optional) The dataset location.
|
||||
* **Permissions:**
|
||||
* **BigQuery User** (`roles/bigquery.user`) to execute queries and view metadata.
|
||||
* **BigQuery Metadata Viewer** (`roles/bigquery.metadataViewer`) to view all datasets.
|
||||
* **BigQuery Data Editor** (`roles/bigquery.dataEditor`) to create or modify datasets and tables.
|
||||
* **Gemini for Google Cloud** (`roles/cloudaicompanion.user`) to use the conversational analytics API.
|
||||
* **Tools:**
|
||||
* `analyze_contribution`: Use this tool to perform contribution analysis, also called key driver analysis.
|
||||
* `ask_data_insights`: Use this tool to perform data analysis, get insights, or answer complex questions about the contents of specific BigQuery tables. For more information on required roles, API setup, and IAM configuration, see the setup and authentication section of the [Conversational Analytics API documentation](https://cloud.google.com/gemini/docs/conversational-analytics-api/overview).
|
||||
* `execute_sql`: Executes a SQL statement.
|
||||
* `forecast`: Use this tool to forecast time series data.
|
||||
* `get_dataset_info`: Gets dataset metadata.
|
||||
* `get_table_info`: Gets table metadata.
|
||||
* `list_dataset_ids`: Lists datasets.
|
||||
* `list_table_ids`: Lists tables.
|
||||
|
||||
## Cloud SQL for MySQL
|
||||
|
||||
* `--prebuilt` value: `cloud-sql-mysql`
|
||||
* **Environment Variables:**
|
||||
* `CLOUD_SQL_MYSQL_PROJECT`: The GCP project ID.
|
||||
* `CLOUD_SQL_MYSQL_REGION`: The region of your Cloud SQL instance.
|
||||
* `CLOUD_SQL_MYSQL_INSTANCE`: The ID of your Cloud SQL instance.
|
||||
* `CLOUD_SQL_MYSQL_DATABASE`: The name of the database to connect to.
|
||||
* `CLOUD_SQL_MYSQL_USER`: The database username.
|
||||
* `CLOUD_SQL_MYSQL_PASSWORD`: The password for the database user.
|
||||
* `CLOUD_SQL_MYSQL_IP_TYPE`: The IP type i.e. "Public
|
||||
or "Private" (Default: Public).
|
||||
* **Permissions:**
|
||||
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the instance.
|
||||
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
|
||||
* **Tools:**
|
||||
* `execute_sql`: Executes a SQL query.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
|
||||
## Cloud SQL for PostgreSQL
|
||||
|
||||
* `--prebuilt` value: `cloud-sql-postgres`
|
||||
* **Environment Variables:**
|
||||
* `CLOUD_SQL_POSTGRES_PROJECT`: The GCP project ID.
|
||||
* `CLOUD_SQL_POSTGRES_REGION`: The region of your Cloud SQL instance.
|
||||
* `CLOUD_SQL_POSTGRES_INSTANCE`: The ID of your Cloud SQL instance.
|
||||
* `CLOUD_SQL_POSTGRES_DATABASE`: The name of the database to connect to.
|
||||
* `CLOUD_SQL_POSTGRES_USER`: (Optional) The database username. Defaults to IAM authentication if unspecified.
|
||||
* `CLOUD_SQL_POSTGRES_PASSWORD`: (Optional) The password for the database user. Defaults to IAM authentication if unspecified.
|
||||
* `CLOUD_SQL_POSTGRES_IP_TYPE`: (Optional) The IP type i.e. "Public" or "Private" (Default: Public).
|
||||
* **Permissions:**
|
||||
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the instance.
|
||||
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
|
||||
* **Tools:**
|
||||
* `execute_sql`: Executes a SQL query.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
|
||||
## Cloud SQL for SQL Server
|
||||
|
||||
* `--prebuilt` value: `cloud-sql-mssql`
|
||||
* **Environment Variables:**
|
||||
* `CLOUD_SQL_MSSQL_PROJECT`: The GCP project ID.
|
||||
* `CLOUD_SQL_MSSQL_REGION`: The region of your Cloud SQL instance.
|
||||
* `CLOUD_SQL_MSSQL_INSTANCE`: The ID of your Cloud SQL instance.
|
||||
* `CLOUD_SQL_MSSQL_DATABASE`: The name of the database to connect to.
|
||||
* `CLOUD_SQL_MSSQL_IP_ADDRESS`: The IP address of the Cloud SQL instance.
|
||||
* `CLOUD_SQL_MSSQL_USER`: The database username.
|
||||
* `CLOUD_SQL_MSSQL_PASSWORD`: The password for the database user.
|
||||
* `CLOUD_SQL_MSSQL_IP_TYPE`: (Optional) The IP type i.e. "Public" or "Private" (Default: Public).
|
||||
* **Permissions:**
|
||||
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the instance.
|
||||
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
|
||||
* **Tools:**
|
||||
* `execute_sql`: Executes a SQL query.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
|
||||
## Dataplex
|
||||
|
||||
* `--prebuilt` value: `dataplex`
|
||||
* **Environment Variables:**
|
||||
* `DATAPLEX_PROJECT`: The GCP project ID.
|
||||
* **Permissions:**
|
||||
* **Dataplex Reader** (`roles/dataplex.viewer`) to search and look up entries.
|
||||
* **Dataplex Editor** (`roles/dataplex.editor`) to modify entries.
|
||||
* **Tools:**
|
||||
* `dataplex_search_entries`: Searches for entries in Dataplex Catalog.
|
||||
* `dataplex_lookup_entry`: Retrieves a specific entry from Dataplex Catalog.
|
||||
* `dataplex_search_aspect_types`: Finds aspect types relevant to the query.
|
||||
|
||||
## Firestore
|
||||
|
||||
* `--prebuilt` value: `firestore`
|
||||
* **Environment Variables:**
|
||||
* `FIRESTORE_PROJECT`: The GCP project ID.
|
||||
* `FIRESTORE_DATABASE`: (Optional) The Firestore database ID. Defaults to "(default)".
|
||||
* **Permissions:**
|
||||
* **Cloud Datastore User** (`roles/datastore.user`) to get documents, list collections, and query collections.
|
||||
* **Firebase Rules Viewer** (`roles/firebaserules.viewer`) to get and validate Firestore rules.
|
||||
* **Tools:**
|
||||
* `firestore-get-documents`: Gets multiple documents from Firestore by their paths.
|
||||
* `firestore-list-collections`: Lists Firestore collections for a given parent path.
|
||||
* `firestore-delete-documents`: Deletes multiple documents from Firestore.
|
||||
* `firestore-query-collection`: Retrieves one or more Firestore documents from a collection.
|
||||
* `firestore-get-rules`: Retrieves the active Firestore security rules.
|
||||
* `firestore-validate-rules`: Checks the provided Firestore Rules source for syntax and validation errors.
|
||||
|
||||
## Looker
|
||||
|
||||
* `--prebuilt` value: `looker`
|
||||
* **Environment Variables:**
|
||||
* `LOOKER_BASE_URL`: The URL of your Looker instance.
|
||||
* `LOOKER_CLIENT_ID`: The client ID for the Looker API.
|
||||
* `LOOKER_CLIENT_SECRET`: The client secret for the Looker API.
|
||||
* `LOOKER_VERIFY_SSL`: Whether to verify SSL certificates.
|
||||
* **Permissions:**
|
||||
* A Looker account with permissions to access the desired models, explores, and data is required.
|
||||
* **Tools:**
|
||||
* `get_models`: Retrieves the list of LookML models.
|
||||
* `get_explores`: Retrieves the list of explores in a model.
|
||||
* `get_dimensions`: Retrieves the list of dimensions in an explore.
|
||||
* `get_measures`: Retrieves the list of measures in an explore.
|
||||
* `get_filters`: Retrieves the list of filters in an explore.
|
||||
* `get_parameters`: Retrieves the list of parameters in an explore.
|
||||
* `query`: Runs a query against the LookML model.
|
||||
* `query_sql`: Generates the SQL for a query.
|
||||
* `query_url`: Generates a URL for a query in Looker.
|
||||
* `get_looks`: Searches for saved looks.
|
||||
* `run_look`: Runs the query associated with a look.
|
||||
* `make_look`: Creates a new look.
|
||||
* `get_dashboards`: Searches for saved dashboards.
|
||||
* `make_dashboard`: Creates a new dashboard.
|
||||
* `add_dashboard_element`: Adds a tile to a dashboard.
|
||||
|
||||
## Microsoft SQL Server
|
||||
|
||||
* `--prebuilt` value: `mssql`
|
||||
* **Environment Variables:**
|
||||
* `MSSQL_HOST`: The hostname or IP address of the SQL Server instance.
|
||||
* `MSSQL_PORT`: The port number for the SQL Server instance.
|
||||
* `MSSQL_DATABASE`: The name of the database to connect to.
|
||||
* `MSSQL_USER`: The database username.
|
||||
* `MSSQL_PASSWORD`: The password for the database user.
|
||||
* **Permissions:**
|
||||
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
|
||||
* **Tools:**
|
||||
* `execute_sql`: Executes a SQL query.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
|
||||
## MySQL
|
||||
|
||||
* `--prebuilt` value: `mysql`
|
||||
* **Environment Variables:**
|
||||
* `MYSQL_HOST`: The hostname or IP address of the MySQL server.
|
||||
* `MYSQL_PORT`: The port number for the MySQL server.
|
||||
* `MYSQL_DATABASE`: The name of the database to connect to.
|
||||
* `MYSQL_USER`: The database username.
|
||||
* `MYSQL_PASSWORD`: The password for the database user.
|
||||
* **Permissions:**
|
||||
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
|
||||
* **Tools:**
|
||||
* `execute_sql`: Executes a SQL query.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
|
||||
## OceanBase
|
||||
|
||||
* `--prebuilt` value: `oceanbase`
|
||||
* **Environment Variables:**
|
||||
* `OCEANBASE_HOST`: The hostname or IP address of the OceanBase server.
|
||||
* `OCEANBASE_PORT`: The port number for the OceanBase server.
|
||||
* `OCEANBASE_DATABASE`: The name of the database to connect to.
|
||||
* `OCEANBASE_USER`: The database username.
|
||||
* `OCEANBASE_PASSWORD`: The password for the database user.
|
||||
* **Permissions:**
|
||||
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
|
||||
* **Tools:**
|
||||
* `execute_sql`: Executes a SQL query.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
|
||||
## PostgreSQL
|
||||
|
||||
* `--prebuilt` value: `postgres`
|
||||
* **Environment Variables:**
|
||||
* `POSTGRES_HOST`: The hostname or IP address of the PostgreSQL server.
|
||||
* `POSTGRES_PORT`: The port number for the PostgreSQL server.
|
||||
* `POSTGRES_DATABASE`: The name of the database to connect to.
|
||||
* `POSTGRES_USER`: The database username.
|
||||
* `POSTGRES_PASSWORD`: The password for the database user.
|
||||
* `POSTGRES_QUERY_PARAMS`: (Optional) Raw query to be added to the db connection string.
|
||||
* **Permissions:**
|
||||
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
|
||||
* **Tools:**
|
||||
* `execute_sql`: Executes a SQL query.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
|
||||
## Spanner (GoogleSQL dialect)
|
||||
|
||||
* `--prebuilt` value: `spanner`
|
||||
* **Environment Variables:**
|
||||
* `SPANNER_PROJECT`: The GCP project ID.
|
||||
* `SPANNER_INSTANCE`: The Spanner instance ID.
|
||||
* `SPANNER_DATABASE`: The Spanner database ID.
|
||||
* **Permissions:**
|
||||
* **Cloud Spanner Database Reader** (`roles/spanner.databaseReader`) to execute DQL queries and list tables.
|
||||
* **Cloud Spanner Database User** (`roles/spanner.databaseUser`) to execute DML queries.
|
||||
* **Tools:**
|
||||
* `execute_sql`: Executes a DML SQL query.
|
||||
* `execute_sql_dql`: Executes a DQL SQL query.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
|
||||
## Spanner (PostgreSQL dialect)
|
||||
|
||||
* `--prebuilt` value: `spanner-postgres`
|
||||
* **Environment Variables:**
|
||||
* `SPANNER_PROJECT`: The GCP project ID.
|
||||
* `SPANNER_INSTANCE`: The Spanner instance ID.
|
||||
* `SPANNER_DATABASE`: The Spanner database ID.
|
||||
* **Permissions:**
|
||||
* **Cloud Spanner Database Reader** (`roles/spanner.databaseReader`) to execute DQL queries and list tables.
|
||||
* **Cloud Spanner Database User** (`roles/spanner.databaseUser`) to execute DML queries.
|
||||
* **Tools:**
|
||||
* `execute_sql`: Executes a DML SQL query using the PostgreSQL interface for Spanner.
|
||||
* `execute_sql_dql`: Executes a DQL SQL query using the PostgreSQL interface for Spanner.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
|
||||
## SQLite
|
||||
|
||||
* `--prebuilt` value: `sqlite`
|
||||
* **Environment Variables:**
|
||||
* `SQLITE_DATABASE`: The path to the SQLite database file (e.g., `./sample.db`).
|
||||
* **Permissions:**
|
||||
* File system read/write permissions for the specified database file.
|
||||
* **Tools:**
|
||||
* `execute_sql`: Executes a SQL query.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
|
||||
## Neo4j
|
||||
|
||||
* `--prebuilt` value: `neo4j`
|
||||
* **Environment Variables:**
|
||||
* `NEO4J_URI`: The URI of the Neo4j instance (e.g., `bolt://localhost:7687`).
|
||||
* `NEO4J_DATABASE`: The name of the Neo4j database to connect to.
|
||||
* `NEO4J_USERNAME`: The username for the Neo4j instance.
|
||||
* `NEO4J_PASSWORD`: The password for the Neo4j instance.
|
||||
* **Permissions:**
|
||||
* **Database-level permissions** are required to execute Cypher queries.
|
||||
* **Tools:**
|
||||
* `execute_cypher`: Executes a Cypher query.
|
||||
* `get_schema`: Retrieves the schema of the Neo4j database.
|
||||
@@ -3,11 +3,11 @@ title: "AuthServices"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
AuthServices represent services that handle authentication and authorization.
|
||||
AuthServices represent services that handle authentication and authorization.
|
||||
---
|
||||
|
||||
AuthServices represent services that handle authentication and authorization. It
|
||||
can primarily be used by [Tools](../tools/) in two different ways:
|
||||
can primarily be used by [Tools](../tools) in two different ways:
|
||||
|
||||
- [**Authorized Invocation**][auth-invoke] is when a tool
|
||||
is validated by the auth service before the call can be invoked. Toolbox
|
||||
@@ -68,10 +68,6 @@ parameter when loading tools, or the `add_auth_token_getter`() /
|
||||
|
||||
### Specifying tokens during load
|
||||
|
||||
#### Python
|
||||
|
||||
Use the [Python SDK](https://github.com/googleapis/mcp-toolbox-sdk-python/tree/main).
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="Core" lang="Python" >}}
|
||||
import asyncio
|
||||
@@ -139,69 +135,8 @@ if **name** == "**main**":
|
||||
asyncio.run(main()){{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
#### Javascript/Typescript
|
||||
|
||||
Use the [JS SDK](https://github.com/googleapis/mcp-toolbox-sdk-js/tree/main).
|
||||
|
||||
```javascript
|
||||
import { ToolboxClient } from '@toolbox-sdk/core';
|
||||
|
||||
async function getAuthToken() {
|
||||
// ... Logic to retrieve ID token (e.g., from local storage, OAuth flow)
|
||||
// This example just returns a placeholder. Replace with your actual token retrieval.
|
||||
return "YOUR_ID_TOKEN" // Placeholder
|
||||
}
|
||||
|
||||
const URL = 'http://127.0.0.1:5000';
|
||||
let client = new ToolboxClient(URL);
|
||||
const authTool = await client.loadTool("my-tool", {"my_auth_app_1": getAuthToken});
|
||||
const result = await authTool({param:"value"});
|
||||
console.log(result);
|
||||
print(result)
|
||||
```
|
||||
|
||||
#### Go
|
||||
|
||||
Use the [Go SDK](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main).
|
||||
|
||||
```go
|
||||
import "github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
import "fmt"
|
||||
|
||||
func getAuthToken() string {
|
||||
// ... Logic to retrieve ID token (e.g., from local storage, OAuth flow)
|
||||
// This example just returns a placeholder. Replace with your actual token retrieval.
|
||||
return "YOUR_ID_TOKEN" // Placeholder
|
||||
}
|
||||
|
||||
func main() {
|
||||
URL := 'http://127.0.0.1:5000'
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
dynamicTokenSource := core.NewCustomTokenSource(getAuthToken)
|
||||
authTool, err := client.LoadTool(
|
||||
"my-tool",
|
||||
ctx,
|
||||
core.WithAuthTokenSource("my_auth_app_1", dynamicTokenSource))
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tool: %v", err)
|
||||
}
|
||||
inputs := map[string]any{"param": "value"}
|
||||
result, err := authTool.Invoke(ctx, inputs)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to invoke tool: %v", err)
|
||||
}
|
||||
fmt.Println(result)
|
||||
}
|
||||
```
|
||||
|
||||
### Specifying tokens for existing tools
|
||||
|
||||
#### Python
|
||||
Use the [Python SDK](https://github.com/googleapis/mcp-toolbox-sdk-python/tree/main).
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="Core" lang="Python" >}}
|
||||
tools = await toolbox.load_toolset()
|
||||
@@ -247,57 +182,4 @@ authorized_tool = tools[0].add_auth_token_getters({
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
#### Javascript/Typescript
|
||||
|
||||
Use the [JS SDK](https://github.com/googleapis/mcp-toolbox-sdk-js/tree/main).
|
||||
|
||||
```javascript
|
||||
const URL = 'http://127.0.0.1:5000';
|
||||
let client = new ToolboxClient(URL);
|
||||
let tool = await client.loadTool("my-tool")
|
||||
|
||||
// for a single token
|
||||
const authorizedTool = tool.addAuthTokenGetter("my_auth", get_auth_token)
|
||||
|
||||
// OR, if multiple tokens are needed
|
||||
const multiAuthTool = tool.addAuthTokenGetters({
|
||||
"my_auth_1": getAuthToken1,
|
||||
"my_auth_2": getAuthToken2,
|
||||
})
|
||||
|
||||
```
|
||||
|
||||
#### Go
|
||||
|
||||
Use the [Go SDK](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main).
|
||||
|
||||
```go
|
||||
import "github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
|
||||
func main() {
|
||||
URL := 'http://127.0.0.1:5000'
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
tool, err := client.LoadTool("my-tool", ctx))
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tool: %v", err)
|
||||
}
|
||||
dynamicTokenSource1 := core.NewCustomTokenSource(getAuthToken1)
|
||||
dynamicTokenSource2 := core.NewCustomTokenSource(getAuthToken1)
|
||||
|
||||
// For a single token
|
||||
authTool, err := tool.ToolFrom(
|
||||
core.WithAuthTokenSource("my-auth", dynamicTokenSource),
|
||||
)
|
||||
|
||||
// OR, if multiple tokens are needed
|
||||
authTool, err := tool.ToolFrom(
|
||||
core.WithAuthTokenSource("my-auth_1", dynamicTokenSource1),
|
||||
core.WithAuthTokenSource("my-auth_2", dynamicTokenSource2),
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
## Kinds of Auth Services
|
||||
|
||||
@@ -3,7 +3,7 @@ title: "Google Sign-In"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Use Google Sign-In for Oauth 2.0 flow and token lifecycle.
|
||||
Use Google Sign-In for Oauth 2.0 flow and token lifecycle.
|
||||
---
|
||||
|
||||
## Getting Started
|
||||
@@ -33,7 +33,7 @@ ID.
|
||||
When using [Authenticated Parameters][auth-params], any [claim provided by the
|
||||
id-token][provided-claims] can be used for the parameter.
|
||||
|
||||
[auth-params]: ../tools/#authenticated-parameters
|
||||
[auth-params]: ../tools/#authenticated-phugarameters
|
||||
[provided-claims]:
|
||||
https://developers.google.com/identity/openid-connect/openid-connect#obtaininguserprofileinformation
|
||||
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
---
|
||||
title: "AlloyDB Admin"
|
||||
linkTitle: "AlloyDB Admin"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
The "alloydb-admin" source provides a client for the AlloyDB API.
|
||||
aliases:
|
||||
- /resources/sources/alloydb-admin
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `alloydb-admin` source provides a client to interact with the [Google AlloyDB API](https://cloud.google.com/alloydb/docs/reference/rest). This allows tools to perform administrative tasks on AlloyDB resources, such as managing clusters, instances, and users.
|
||||
|
||||
Authentication can be handled in two ways:
|
||||
1. **Application Default Credentials (ADC):** By default, the source uses ADC to authenticate with the API.
|
||||
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will expect an OAuth 2.0 access token to be provided by the client (e.g., a web browser) for each request.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-alloydb-admin:
|
||||
kind: alloy-admin
|
||||
|
||||
my-oauth-alloydb-admin:
|
||||
kind: alloydb-admin
|
||||
useClientOAuth: true
|
||||
```
|
||||
|
||||
## Reference
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "alloydb-admin". |
|
||||
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |
|
||||
@@ -22,28 +22,6 @@ cluster][alloydb-free-trial].
|
||||
[alloydb-docs]: https://cloud.google.com/alloydb/docs
|
||||
[alloydb-free-trial]: https://cloud.google.com/alloydb/docs/create-free-trial-cluster
|
||||
|
||||
## Available Tools
|
||||
|
||||
- [`alloydb-ai-nl`](../tools/alloydbainl/alloydb-ai-nl.md)
|
||||
Use natural language queries on AlloyDB, powered by AlloyDB AI.
|
||||
|
||||
- [`postgres-sql`](../tools/postgres/postgres-sql.md)
|
||||
Execute SQL queries as prepared statements in AlloyDB Postgres.
|
||||
|
||||
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
|
||||
Run parameterized SQL statements in AlloyDB Postgres.
|
||||
|
||||
- [`postgres-list-tables`](../tools/postgres/postgres-list-tables.md)
|
||||
List tables in an AlloyDB for PostgreSQL database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [AlloyDB using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/alloydb_pg_mcp/)
|
||||
Connect your IDE to AlloyDB using Toolbox.
|
||||
|
||||
- [AlloyDB Admin API using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/alloydb_pg_admin_mcp/)
|
||||
Create your AlloyDB database with MCP Toolbox.
|
||||
|
||||
## Requirements
|
||||
|
||||
### IAM Permissions
|
||||
|
||||
@@ -34,107 +34,40 @@ avoiding full table scans or complex filters.
|
||||
[bigquery-quickstart-cli]: https://cloud.google.com/bigquery/docs/quickstarts/quickstart-command-line
|
||||
[bigquery-googlesql]: https://cloud.google.com/bigquery/docs/reference/standard-sql/
|
||||
|
||||
## Available Tools
|
||||
|
||||
- [`bigquery-analyze-contribution`](../tools/bigquery/bigquery-analyze-contribution.md)
|
||||
Performs contribution analysis, also called key driver analysis in BigQuery.
|
||||
|
||||
- [`bigquery-conversational-analytics`](../tools/bigquery/bigquery-conversational-analytics.md)
|
||||
Allows conversational interaction with a BigQuery source.
|
||||
|
||||
- [`bigquery-execute-sql`](../tools/bigquery/bigquery-execute-sql.md)
|
||||
Execute structured queries using parameters.
|
||||
|
||||
- [`bigquery-forecast`](../tools/bigquery/bigquery-forecast.md)
|
||||
Forecasts time series data in BigQuery.
|
||||
|
||||
- [`bigquery-get-dataset-info`](../tools/bigquery/bigquery-get-dataset-info.md)
|
||||
Retrieve metadata for a specific dataset.
|
||||
|
||||
- [`bigquery-get-table-info`](../tools/bigquery/bigquery-get-table-info.md)
|
||||
Retrieve metadata for a specific table.
|
||||
|
||||
- [`bigquery-list-dataset-ids`](../tools/bigquery/bigquery-list-dataset-ids.md)
|
||||
List available dataset IDs.
|
||||
|
||||
- [`bigquery-list-table-ids`](../tools/bigquery/bigquery-list-table-ids.md)
|
||||
List tables in a given dataset.
|
||||
|
||||
- [`bigquery-sql`](../tools/bigquery/bigquery-sql.md)
|
||||
Run SQL queries directly against BigQuery datasets.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [BigQuery using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/bigquery_mcp/)
|
||||
Connect your IDE to BigQuery using Toolbox.
|
||||
|
||||
## Requirements
|
||||
|
||||
### IAM Permissions
|
||||
|
||||
BigQuery uses [Identity and Access Management (IAM)][iam-overview] to control
|
||||
user and group access to BigQuery resources like projects, datasets, and tables.
|
||||
Toolbox will use your [Application Default Credentials (ADC)][adc] to authorize
|
||||
and authenticate when interacting with [BigQuery][bigquery-docs].
|
||||
|
||||
### Authentication via Application Default Credentials (ADC)
|
||||
In addition to [setting the ADC for your server][set-adc], you need to ensure
|
||||
the IAM identity has been given the correct IAM permissions for the queries
|
||||
you intend to run. Common roles include `roles/bigquery.user` (which includes
|
||||
permissions to run jobs and read data) or `roles/bigquery.dataViewer`. See
|
||||
[Introduction to BigQuery IAM][grant-permissions] for more information on
|
||||
applying IAM permissions and roles to an identity.
|
||||
|
||||
By **default**, Toolbox will use your [Application Default Credentials (ADC)][adc] to authorize and authenticate when interacting with [BigQuery][bigquery-docs].
|
||||
|
||||
When using this method, you need to ensure the IAM identity associated with your
|
||||
ADC (such as a service account) has the correct permissions for the queries you
|
||||
intend to run. Common roles include `roles/bigquery.user` (which includes
|
||||
permissions to run jobs and read data) or `roles/bigbigquery.dataViewer`.
|
||||
Follow this [guide][set-adc] to set up your ADC.
|
||||
|
||||
### Authentication via User's OAuth Access Token
|
||||
|
||||
If the `useClientOAuth` parameter is set to `true`, Toolbox will instead use the
|
||||
OAuth access token for authentication. This token is parsed from the
|
||||
`Authorization` header passed in with the tool invocation request. This method
|
||||
allows Toolbox to make queries to [BigQuery][bigquery-docs] on behalf of the
|
||||
client or the end-user.
|
||||
|
||||
When using this on-behalf-of authentication, you must ensure that the
|
||||
identity used has been granted the correct IAM permissions.
|
||||
|
||||
[iam-overview]: <https://cloud.google.com/bigquery/docs/access-control>
|
||||
[adc]: <https://cloud.google.com/docs/authentication#adc>
|
||||
[set-adc]: <https://cloud.google.com/docs/authentication/provide-credentials-adc>
|
||||
[iam-overview]: https://cloud.google.com/bigquery/docs/access-control
|
||||
[adc]: https://cloud.google.com/docs/authentication#adc
|
||||
[set-adc]: https://cloud.google.com/docs/authentication/provide-credentials-adc
|
||||
[grant-permissions]: https://cloud.google.com/bigquery/docs/access-control
|
||||
|
||||
## Example
|
||||
|
||||
Initialize a BigQuery source that uses ADC:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-bigquery-source:
|
||||
kind: "bigquery"
|
||||
project: "my-project-id"
|
||||
# location: "US" # Optional: Specifies the location for query jobs.
|
||||
# allowedDatasets: # Optional: Restricts tool access to a specific list of datasets.
|
||||
# - "my_dataset_1"
|
||||
# - "other_project.my_dataset_2"
|
||||
```
|
||||
|
||||
Initialize a BigQuery source that uses the client's access token:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-bigquery-client-auth-source:
|
||||
kind: "bigquery"
|
||||
project: "my-project-id"
|
||||
useClientOAuth: true
|
||||
# location: "US" # Optional: Specifies the location for query jobs.
|
||||
# allowedDatasets: # Optional: Restricts tool access to a specific list of datasets.
|
||||
# - "my_dataset_1"
|
||||
# - "other_project.my_dataset_2"
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "bigquery". |
|
||||
| project | string | true | Id of the Google Cloud project to use for billing and as the default project for BigQuery resources. |
|
||||
| location | string | false | Specifies the location (e.g., 'us', 'asia-northeast1') in which to run the query job. This location must match the location of any tables referenced in the query. Defaults to the table's location or 'US' if the location cannot be determined. [Learn More](https://cloud.google.com/bigquery/docs/locations) |
|
||||
| allowedDatasets | []string | false | An optional list of dataset IDs that tools using this source are allowed to access. If provided, any tool operation attempting to access a dataset not in this list will be rejected. To enforce this, two types of operations are also disallowed: 1) Dataset-level operations (e.g., `CREATE SCHEMA`), and 2) operations where table access cannot be statically analyzed (e.g., `EXECUTE IMMEDIATE`, `CREATE PROCEDURE`). If a single dataset is provided, it will be treated as the default for prebuilt tools. |
|
||||
| useClientOAuth | bool | false | If true, forwards the client's OAuth access token from the "Authorization" header to downstream queries. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|-------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "bigquery". |
|
||||
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
|
||||
| location | string | false | Specifies the location (e.g., 'us', 'asia-northeast1') in which to run the query job. This location must match the location of any tables referenced in the query. The default behavior is for it to be executed in the US multi-region |
|
||||
|
||||
@@ -32,11 +32,6 @@ such as avoiding full table scans or complex filters.
|
||||
[bigtable-googlesql]:
|
||||
https://cloud.google.com/bigtable/docs/googlesql-overview
|
||||
|
||||
## Available Tools
|
||||
|
||||
- [`bigtable-sql`](../tools/bigtable/bigtable-sql.md)
|
||||
Run SQL-like queries over Bigtable rows.
|
||||
|
||||
## Requirements
|
||||
|
||||
### IAM Permissions
|
||||
|
||||
@@ -1,91 +0,0 @@
|
||||
---
|
||||
title: "ClickHouse"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
ClickHouse is an open-source, OLTP database.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
[ClickHouse][clickhouse-docs] is a fast, open-source, column-oriented database
|
||||
|
||||
[clickhouse-docs]: https://clickhouse.com/docs
|
||||
|
||||
## Available Tools
|
||||
|
||||
- [`clickhouse-execute-sql`](../tools/clickhouse/clickhouse-execute-sql.md)
|
||||
Execute parameterized SQL queries in ClickHouse with query logging.
|
||||
|
||||
- [`clickhouse-sql`](../tools/clickhouse/clickhouse-sql.md)
|
||||
Execute SQL queries as prepared statements in ClickHouse.
|
||||
|
||||
|
||||
## Requirements
|
||||
|
||||
### Database User
|
||||
|
||||
This source uses standard ClickHouse authentication. You will need to [create a
|
||||
ClickHouse user][clickhouse-users] (or with [ClickHouse Cloud][clickhouse-cloud]) to connect to the database with. The user
|
||||
should have appropriate permissions for the operations you plan to perform.
|
||||
|
||||
[clickhouse-cloud]: https://clickhouse.com/docs/getting-started/quick-start/cloud#connect-with-your-app
|
||||
[clickhouse-users]: https://clickhouse.com/docs/en/sql-reference/statements/create/user
|
||||
|
||||
### Network Access
|
||||
|
||||
ClickHouse supports multiple protocols:
|
||||
|
||||
- **HTTPS protocol** (default port 8443) - Secure HTTP access (default)
|
||||
- **HTTP protocol** (default port 8123) - Good for web-based access
|
||||
|
||||
## Example
|
||||
|
||||
### Secure Connection Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
secure-clickhouse-source:
|
||||
kind: clickhouse
|
||||
host: clickhouse.example.com
|
||||
port: "8443"
|
||||
database: analytics
|
||||
user: ${CLICKHOUSE_USER}
|
||||
password: ${CLICKHOUSE_PASSWORD}
|
||||
protocol: https
|
||||
secure: true
|
||||
```
|
||||
|
||||
### HTTP Protocol Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
http-clickhouse-source:
|
||||
kind: clickhouse
|
||||
host: localhost
|
||||
port: "8123"
|
||||
database: logs
|
||||
user: ${CLICKHOUSE_USER}
|
||||
password: ${CLICKHOUSE_PASSWORD}
|
||||
protocol: http
|
||||
secure: false
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
Use environment variable replacement with the format ${ENV_NAME}
|
||||
instead of hardcoding your secrets into the configuration file.
|
||||
{{< /notice >}}
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "clickhouse". |
|
||||
| host | string | true | IP address or hostname to connect to (e.g. "127.0.0.1" or "clickhouse.example.com") |
|
||||
| port | string | true | Port to connect to (e.g. "8443" for HTTPS, "8123" for HTTP) |
|
||||
| database | string | true | Name of the ClickHouse database to connect to (e.g. "my_database"). |
|
||||
| user | string | true | Name of the ClickHouse user to connect as (e.g. "analytics_user"). |
|
||||
| password | string | false | Password of the ClickHouse user (e.g. "my-password"). |
|
||||
| protocol | string | false | Connection protocol: "https" (default) or "http". |
|
||||
| secure | boolean | false | Whether to use a secure connection (TLS). Default: false. |
|
||||
@@ -1,36 +0,0 @@
|
||||
---
|
||||
title: "Cloud Monitoring"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "cloud-monitoring" source provides a client for the Cloud Monitoring API.
|
||||
aliases:
|
||||
- /resources/sources/cloud-monitoring
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `cloud-monitoring` source provides a client to interact with the [Google Cloud Monitoring API](https://cloud.google.com/monitoring/api). This allows tools to access cloud monitoring metrics explorer and run promql queries.
|
||||
|
||||
Authentication can be handled in two ways:
|
||||
1. **Application Default Credentials (ADC):** By default, the source uses ADC to authenticate with the API.
|
||||
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will expect an OAuth 2.0 access token to be provided by the client (e.g., a web browser) for each request.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-cloud-monitoring:
|
||||
kind: cloud-monitoring
|
||||
|
||||
my-oauth-cloud-monitoring:
|
||||
kind: cloud-monitoring
|
||||
useClientOAuth: true
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "cloud-monitoring". |
|
||||
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |
|
||||
@@ -1,36 +0,0 @@
|
||||
---
|
||||
title: "Cloud SQL Admin"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "cloud-sql-admin" source provides a client for the Cloud SQL Admin API.
|
||||
aliases:
|
||||
- /resources/sources/cloud-sql-admin
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `cloud-sql-admin` source provides a client to interact with the [Google Cloud SQL Admin API](https://cloud.google.com/sql/docs/mysql/admin-api/v1). This allows tools to perform administrative tasks on Cloud SQL instances, such as creating users and databases.
|
||||
|
||||
Authentication can be handled in two ways:
|
||||
1. **Application Default Credentials (ADC):** By default, the source uses ADC to authenticate with the API.
|
||||
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will expect an OAuth 2.0 access token to be provided by the client (e.g., a web browser) for each request.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-cloud-sql-admin:
|
||||
kind: cloud-sql-admin
|
||||
|
||||
my-oauth-cloud-sql-admin:
|
||||
kind: cloud-sql-admin
|
||||
useClientOAuth: true
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "cloud-sql-admin". |
|
||||
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |
|
||||
@@ -19,19 +19,6 @@ to a database by following these instructions][csql-mssql-connect].
|
||||
[csql-mssql-docs]: https://cloud.google.com/sql/docs/sqlserver
|
||||
[csql-mssql-connect]: https://cloud.google.com/sql/docs/sqlserver/connect-overview
|
||||
|
||||
## Available Tools
|
||||
|
||||
- [`mssql-sql`](../tools/mssql/mssql-sql.md)
|
||||
Execute pre-defined SQL Server queries with placeholder parameters.
|
||||
|
||||
- [`mssql-execute-sql`](../tools/mssql/mssql-execute-sql.md)
|
||||
Run parameterized SQL Server queries in Cloud SQL for SQL Server.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [Cloud SQL for SQL Server using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_mssql_mcp/)
|
||||
Connect your IDE to Cloud SQL for SQL Server using Toolbox.
|
||||
|
||||
## Requirements
|
||||
|
||||
### IAM Permissions
|
||||
|
||||
@@ -20,22 +20,6 @@ to a database by following these instructions][csql-mysql-quickstart].
|
||||
[csql-mysql-docs]: https://cloud.google.com/sql/docs/mysql
|
||||
[csql-mysql-quickstart]: https://cloud.google.com/sql/docs/mysql/connect-instance-local-computer
|
||||
|
||||
## Available Tools
|
||||
|
||||
- [`mysql-sql`](../tools/mysql/mysql-sql.md)
|
||||
Execute pre-defined prepared SQL queries in MySQL.
|
||||
|
||||
- [`mysql-execute-sql`](../tools/mysql/mysql-execute-sql.md)
|
||||
Run parameterized SQL queries in Cloud SQL for MySQL.
|
||||
|
||||
- [`mysql-list-tables`](../tools/mysql/mysql-list-tables.md)
|
||||
List tables in a Cloud SQL for MySQL database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [Cloud SQL for MySQL using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_mysql_mcp/)
|
||||
Connect your IDE to Cloud SQL for MySQL using Toolbox.
|
||||
|
||||
## Requirements
|
||||
|
||||
### IAM Permissions
|
||||
|
||||
@@ -20,23 +20,6 @@ to a database by following these instructions][csql-pg-quickstart].
|
||||
[csql-pg-docs]: https://cloud.google.com/sql/docs/postgres
|
||||
[csql-pg-quickstart]: https://cloud.google.com/sql/docs/postgres/connect-instance-local-computer
|
||||
|
||||
## Available Tools
|
||||
|
||||
- [`postgres-sql`](../tools/postgres/postgres-sql.md)
|
||||
Execute SQL queries as prepared statements in PostgreSQL.
|
||||
|
||||
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
|
||||
Run parameterized SQL statements in PostgreSQL.
|
||||
|
||||
- [`postgres-list-tables`](../tools/postgres/postgres-list-tables.md)
|
||||
List tables in a PostgreSQL database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [Cloud SQL for Postgres using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_pg_mcp/)
|
||||
Connect your IDE to Cloud SQL for Postgres using Toolbox.
|
||||
|
||||
|
||||
## Requirements
|
||||
|
||||
### IAM Permissions
|
||||
|
||||
@@ -11,28 +11,19 @@ description: >
|
||||
A `couchbase` source establishes a connection to a Couchbase database cluster,
|
||||
allowing tools to execute SQL queries against it.
|
||||
|
||||
## Available Tools
|
||||
|
||||
- [`couchbase-sql`](../tools/couchbase/couchbase-sql.md)
|
||||
Run SQL++ statements on Couchbase with parameterized input.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-couchbase-instance:
|
||||
kind: couchbase
|
||||
connectionString: couchbase://localhost
|
||||
connectionString: couchbase://localhost:8091
|
||||
bucket: travel-sample
|
||||
scope: inventory
|
||||
username: Administrator
|
||||
password: password
|
||||
```
|
||||
|
||||
{{< notice note >}}
|
||||
For more details about alternate addresses and custom ports refer to [Managing Connections](https://docs.couchbase.com/java-sdk/current/howtos/managing-connections.html).
|
||||
{{< /notice >}}
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|
||||
@@ -1,324 +0,0 @@
|
||||
---
|
||||
title: "Dataplex"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Dataplex Universal Catalog is a unified, intelligent governance solution for data and AI assets in Google Cloud. Dataplex Universal Catalog powers AI, analytics, and business intelligence at scale.
|
||||
---
|
||||
|
||||
# Dataplex Source
|
||||
|
||||
[Dataplex][dataplex-docs] Universal Catalog is a unified, intelligent governance
|
||||
solution for data and AI assets in Google Cloud. Dataplex Universal Catalog
|
||||
powers AI, analytics, and business intelligence at scale.
|
||||
|
||||
At the heart of these governance capabilities is a catalog that contains a
|
||||
centralized inventory of the data assets in your organization. Dataplex
|
||||
Universal Catalog holds business, technical, and runtime metadata for all of
|
||||
your data. It helps you discover relationships and semantics in the metadata by
|
||||
applying artificial intelligence and machine learning.
|
||||
|
||||
[dataplex-docs]: https://cloud.google.com/dataplex/docs
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-dataplex-source:
|
||||
kind: "dataplex"
|
||||
project: "my-project-id"
|
||||
```
|
||||
|
||||
## Sample System Prompt
|
||||
|
||||
You can use the following system prompt as "Custom Instructions" in your client
|
||||
application.
|
||||
|
||||
```
|
||||
# Objective
|
||||
Your primary objective is to help discover, organize and manage metadata related to data assets.
|
||||
|
||||
# Tone and Style
|
||||
1. Adopt the persona of a senior subject matter expert
|
||||
2. Your communication style must be:
|
||||
1. Concise: Always favor brevity.
|
||||
2. Direct: Avoid greetings (e.g., "Hi there!", "Certainly!"). Get straight to the point.
|
||||
Example (Incorrect): Hi there! I see that you are looking for...
|
||||
Example (Correct): This problem likely stems from...
|
||||
3. Do not reiterate or summarize the question in the answer.
|
||||
4. Crucially, always convey a tone of uncertainty and caution. Since you are interpreting metadata and have no way to externally verify your answers, never express complete confidence. Frame your responses as interpretations based solely on the provided metadata. Use a suggestive tone, not a prescriptive one:
|
||||
Example (Correct): "The entry describes..."
|
||||
Example (Correct): "According to catalog,..."
|
||||
Example (Correct): "Based on the metadata,..."
|
||||
Example (Correct): "Based on the search results,..."
|
||||
5. Do not make assumptions
|
||||
|
||||
# Data Model
|
||||
## Entries
|
||||
Entry represents a specific data asset. Entry acts as a metadata record for something that is managed by Catalog, such as:
|
||||
|
||||
- A BigQuery table or dataset
|
||||
- A Cloud Storage bucket or folder
|
||||
- An on-premises SQL table
|
||||
|
||||
## Aspects
|
||||
While the Entry itself is a container, the rich descriptive information about the asset (e.g., schema, data types, business descriptions, classifications) is stored in associated components called Aspects. Aspects are created based on pre-defined blueprints known as Aspect Types.
|
||||
|
||||
## Aspect Types
|
||||
Aspect Type is a reusable template that defines the schema for a set of metadata fields. Think of an Aspect Type as a structure for the kind of metadata that is organized in the catalog within the Entry.
|
||||
|
||||
Examples:
|
||||
- projects/dataplex-types/locations/global/aspectTypes/analytics-hub-exchange
|
||||
- projects/dataplex-types/locations/global/aspectTypes/analytics-hub
|
||||
- projects/dataplex-types/locations/global/aspectTypes/analytics-hub-listing
|
||||
- projects/dataplex-types/locations/global/aspectTypes/bigquery-connection
|
||||
- projects/dataplex-types/locations/global/aspectTypes/bigquery-data-policy
|
||||
- projects/dataplex-types/locations/global/aspectTypes/bigquery-dataset
|
||||
- projects/dataplex-types/locations/global/aspectTypes/bigquery-model
|
||||
- projects/dataplex-types/locations/global/aspectTypes/bigquery-policy
|
||||
- projects/dataplex-types/locations/global/aspectTypes/bigquery-routine
|
||||
- projects/dataplex-types/locations/global/aspectTypes/bigquery-row-access-policy
|
||||
- projects/dataplex-types/locations/global/aspectTypes/bigquery-table
|
||||
- projects/dataplex-types/locations/global/aspectTypes/bigquery-view
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloud-bigtable-instance
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloud-bigtable-table
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-database
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-instance
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-table
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-view
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-database
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-instance
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-schema
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-table
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-view
|
||||
- projects/dataplex-types/locations/global/aspectTypes/contacts
|
||||
- projects/dataplex-types/locations/global/aspectTypes/dataform-code-asset
|
||||
- projects/dataplex-types/locations/global/aspectTypes/dataform-repository
|
||||
- projects/dataplex-types/locations/global/aspectTypes/dataform-workspace
|
||||
- projects/dataplex-types/locations/global/aspectTypes/dataproc-metastore-database
|
||||
- projects/dataplex-types/locations/global/aspectTypes/dataproc-metastore-service
|
||||
- projects/dataplex-types/locations/global/aspectTypes/dataproc-metastore-table
|
||||
- projects/dataplex-types/locations/global/aspectTypes/data-product
|
||||
- projects/dataplex-types/locations/global/aspectTypes/data-quality-scorecard
|
||||
- projects/dataplex-types/locations/global/aspectTypes/external-connection
|
||||
- projects/dataplex-types/locations/global/aspectTypes/overview
|
||||
- projects/dataplex-types/locations/global/aspectTypes/pubsub-topic
|
||||
- projects/dataplex-types/locations/global/aspectTypes/schema
|
||||
- projects/dataplex-types/locations/global/aspectTypes/sensitive-data-protection-job-result
|
||||
- projects/dataplex-types/locations/global/aspectTypes/sensitive-data-protection-profile
|
||||
- projects/dataplex-types/locations/global/aspectTypes/sql-access
|
||||
- projects/dataplex-types/locations/global/aspectTypes/storage-bucket
|
||||
- projects/dataplex-types/locations/global/aspectTypes/storage-folder
|
||||
- projects/dataplex-types/locations/global/aspectTypes/storage
|
||||
- projects/dataplex-types/locations/global/aspectTypes/usage
|
||||
|
||||
## Entry Types
|
||||
Every Entry must conform to an Entry Type. The Entry Type acts as a template, defining the structure, required aspects, and constraints for Entries of that type.
|
||||
|
||||
Examples:
|
||||
- projects/dataplex-types/locations/global/entryTypes/analytics-hub-exchange
|
||||
- projects/dataplex-types/locations/global/entryTypes/analytics-hub-listing
|
||||
- projects/dataplex-types/locations/global/entryTypes/bigquery-connection
|
||||
- projects/dataplex-types/locations/global/entryTypes/bigquery-data-policy
|
||||
- projects/dataplex-types/locations/global/entryTypes/bigquery-dataset
|
||||
- projects/dataplex-types/locations/global/entryTypes/bigquery-model
|
||||
- projects/dataplex-types/locations/global/entryTypes/bigquery-routine
|
||||
- projects/dataplex-types/locations/global/entryTypes/bigquery-row-access-policy
|
||||
- projects/dataplex-types/locations/global/entryTypes/bigquery-table
|
||||
- projects/dataplex-types/locations/global/entryTypes/bigquery-view
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloud-bigtable-instance
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloud-bigtable-table
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloud-spanner-database
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloud-spanner-instance
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloud-spanner-table
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloud-spanner-view
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-database
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-instance
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-table
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-view
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-database
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-instance
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-schema
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-table
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-view
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-database
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-instance
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-schema
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-table
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-view
|
||||
- projects/dataplex-types/locations/global/entryTypes/dataform-code-asset
|
||||
- projects/dataplex-types/locations/global/entryTypes/dataform-repository
|
||||
- projects/dataplex-types/locations/global/entryTypes/dataform-workspace
|
||||
- projects/dataplex-types/locations/global/entryTypes/dataproc-metastore-database
|
||||
- projects/dataplex-types/locations/global/entryTypes/dataproc-metastore-service
|
||||
- projects/dataplex-types/locations/global/entryTypes/dataproc-metastore-table
|
||||
- projects/dataplex-types/locations/global/entryTypes/pubsub-topic
|
||||
- projects/dataplex-types/locations/global/entryTypes/storage-bucket
|
||||
- projects/dataplex-types/locations/global/entryTypes/storage-folder
|
||||
- projects/dataplex-types/locations/global/entryTypes/vertexai-dataset
|
||||
- projects/dataplex-types/locations/global/entryTypes/vertexai-feature-group
|
||||
- projects/dataplex-types/locations/global/entryTypes/vertexai-feature-online-store
|
||||
|
||||
## Entry Groups
|
||||
Entries are organized within Entry Groups, which are logical groupings of Entries. An Entry Group acts as a namespace for its Entries.
|
||||
|
||||
## Entry Links
|
||||
Entries can be linked together using EntryLinks to represent relationships between data assets (e.g. foreign keys).
|
||||
|
||||
# Tool instructions
|
||||
## Tool: dataplex_search_entries
|
||||
## General
|
||||
- Do not try to search within search results on your own.
|
||||
- Do not fetch multiple pages of results unless explicitly asked.
|
||||
|
||||
## Search syntax
|
||||
|
||||
### Simple search
|
||||
In its simplest form, a search query consists of a single predicate. Such a predicate can match several pieces of metadata:
|
||||
|
||||
- A substring of a name, display name, or description of a resource
|
||||
- A substring of the type of a resource
|
||||
- A substring of a column name (or nested column name) in the schema of a resource
|
||||
- A substring of a project ID
|
||||
- A string from an overview description
|
||||
|
||||
For example, the predicate foo matches the following resources:
|
||||
- Resource with the name foo.bar
|
||||
- Resource with the display name Foo Bar
|
||||
- Resource with the description This is the foo script
|
||||
- Resource with the exact type foo
|
||||
- Column foo_bar in the schema of a resource
|
||||
- Nested column foo_bar in the schema of a resource
|
||||
- Project prod-foo-bar
|
||||
- Resource with an overview containing the word foo
|
||||
|
||||
|
||||
### Qualified predicates
|
||||
You can qualify a predicate by prefixing it with a key that restricts the matching to a specific piece of metadata:
|
||||
- An equal sign (=) restricts the search to an exact match.
|
||||
- A colon (:) after the key matches the predicate to either a substring or a token within the value in the search results.
|
||||
|
||||
Tokenization splits the stream of text into a series of tokens, with each token usually corresponding to a single word. For example:
|
||||
- name:foo selects resources with names that contain the foo substring, like foo1 and barfoo.
|
||||
- description:foo selects resources with the foo token in the description, like bar and foo.
|
||||
- location=foo matches resources in a specified location with foo as the location name.
|
||||
|
||||
The predicate keys type, system, location, and orgid support only the exact match (=) qualifier, not the substring qualifier (:). For example, type=foo or orgid=number.
|
||||
|
||||
Search syntax supports the following qualifiers:
|
||||
- "name:x" - Matches x as a substring of the resource ID.
|
||||
- "displayname:x" - Match x as a substring of the resource display name.
|
||||
- "column:x" - Matches x as a substring of the column name (or nested column name) in the schema of the resource.
|
||||
- "description:x" - Matches x as a token in the resource description.
|
||||
- "label:bar" - Matches BigQuery resources that have a label (with some value) and the label key has bar as a substring.
|
||||
- "label=bar" - Matches BigQuery resources that have a label (with some value) and the label key equals bar as a string.
|
||||
- "label:bar:x" - Matches x as a substring in the value of a label with a key bar attached to a BigQuery resource.
|
||||
- "label=foo:bar" - Matches BigQuery resources where the key equals foo and the key value equals bar.
|
||||
- "label.foo=bar" - Matches BigQuery resources where the key equals foo and the key value equals bar.
|
||||
- "label.foo" - Matches BigQuery resources that have a label whose key equals foo as a string.
|
||||
- "type=TYPE" - Matches resources of a specific entry type or its type alias.
|
||||
- "projectid:bar" - Matches resources within Google Cloud projects that match bar as a substring in the ID.
|
||||
- "parent:x" - Matches x as a substring of the hierarchical path of a resource. It supports same syntax as `name` predicate.
|
||||
- "orgid=number" - Matches resources within a Google Cloud organization with the exact ID value of the number.
|
||||
- "system=SYSTEM" - Matches resources from a specified system. For example, system=bigquery matches BigQuery resources.
|
||||
- "location=LOCATION" - Matches resources in a specified location with an exact name. For example, location=us-central1 matches assets hosted in Iowa. BigQuery Omni assets support this qualifier by using the BigQuery Omni location name. For example, location=aws-us-east-1 matches BigQuery Omni assets in Northern Virginia.
|
||||
- "createtime" -
|
||||
Finds resources that were created within, before, or after a given date or time. For example "createtime:2019-01-01" matches resources created on 2019-01-01.
|
||||
- "updatetime" - Finds resources that were updated within, before, or after a given date or time. For example "updatetime>2019-01-01" matches resources updated after 2019-01-01.
|
||||
|
||||
### Aspect Search
|
||||
To search for entries based on their attached aspects, use the following query syntax.
|
||||
|
||||
aspect:x Matches x as a substring of the full path to the aspect type of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID
|
||||
aspect=x Matches x as the full path to the aspect type of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID
|
||||
aspect:xOPERATORvalue
|
||||
Searches for aspect field values. Matches x as a substring of the full path to the aspect type and field name of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID.FIELD_NAME
|
||||
|
||||
The list of supported {OPERATOR}s depends on the type of field in the aspect, as follows:
|
||||
- String: = (exact match) and : (substring)
|
||||
- All number types: =, :, <, >, <=, >=, =>, =<
|
||||
- Enum: =
|
||||
- Datetime: same as for numbers, but the values to compare are treated as datetimes instead of numbers
|
||||
- Boolean: =
|
||||
|
||||
Only top-level fields of the aspect are searchable. For example, all of the following queries match entries where the value of the is-enrolled field in the employee-info aspect type is true. Other entries that match on the substring are also returned.
|
||||
- aspect:example-project.us-central1.employee-info.is-enrolled=true
|
||||
- aspect:example-project.us-central1.employee=true
|
||||
- aspect:employee=true
|
||||
|
||||
Example:-
|
||||
You can use following filters
|
||||
- dataplex-types.global.bigquery-table.type={BIGLAKE_TABLE, BIGLAKE_OBJECT_TABLE, EXTERNAL_TABLE, TABLE}
|
||||
- dataplex-types.global.storage.type={STRUCTURED, UNSTRUCTURED}
|
||||
|
||||
### Logical operators
|
||||
A query can consist of several predicates with logical operators. If you don't specify an operator, logical AND is implied. For example, foo bar returns resources that match both predicate foo and predicate bar.
|
||||
Logical AND and logical OR are supported. For example, foo OR bar.
|
||||
|
||||
You can negate a predicate with a - (hyphen) or NOT prefix. For example, -name:foo returns resources with names that don't match the predicate foo.
|
||||
Logical operators are case-sensitive. `OR` and `AND` are acceptable whereas `or` and `and` are not.
|
||||
|
||||
### Request
|
||||
1. Always try to rewrite the prompt using search syntax.
|
||||
|
||||
### Response
|
||||
1. If there are multiple search results found
|
||||
1. Present the list of search results
|
||||
2. Format the output in nested ordered list, for example:
|
||||
Given
|
||||
```
|
||||
{
|
||||
results: [
|
||||
{
|
||||
name: "projects/test-project/locations/us/entryGroups/@bigquery-aws-us-east-1/entries/users"
|
||||
entrySource: {
|
||||
displayName: "Users"
|
||||
description: "Table contains list of users."
|
||||
location: "aws-us-east-1"
|
||||
system: "BigQuery"
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "projects/another_project/locations/us-central1/entryGroups/@bigquery/entries/top_customers"
|
||||
entrySource: {
|
||||
displayName: "Top customers",
|
||||
description: "Table contains list of best customers."
|
||||
location: "us-central1"
|
||||
system: "BigQuery"
|
||||
}
|
||||
},
|
||||
]
|
||||
}
|
||||
```
|
||||
Return output formatted as markdown nested list:
|
||||
```
|
||||
* Users:
|
||||
- projectId: test_project
|
||||
- location: aws-us-east-1
|
||||
- description: Table contains list of users.
|
||||
* Top customers:
|
||||
- projectId: another_project
|
||||
- location: us-central1
|
||||
- description: Table contains list of best customers.
|
||||
```
|
||||
3. Ask to select one of the presented search results
|
||||
2. If there is only one search result found
|
||||
1. Present the search result immediately.
|
||||
3. If there are no search result found
|
||||
1. Explain that no search result was found
|
||||
2. Suggest to provide a more specific search query.
|
||||
|
||||
## Tool: dataplex_lookup_entry
|
||||
### Request
|
||||
1. Always try to limit the size of the response by specifying `aspect_types` parameter. Make sure to include to select view=CUSTOM when using aspect_types parameter. If you do not know the name of the aspect type, use the `dataplex_search_aspect_types` tool.
|
||||
2. If you do not know the name of the entry, use `dataplex_search_entries` tool
|
||||
### Response
|
||||
1. Unless asked for a specific aspect, respond with all aspects attached to the entry.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|----------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "dataplex". |
|
||||
| project | string | true | ID of the GCP project used for quota and billing purposes (e.g. "my-project-id").|
|
||||
@@ -21,11 +21,6 @@ Dgraph Cloud. If you're new to Dgraph, the fastest way to get started is to
|
||||
[dgraph-docs]: https://dgraph.io/docs
|
||||
[dgraph-login]: https://cloud.dgraph.io/login
|
||||
|
||||
## Available Tools
|
||||
|
||||
- [`dgraph-dql`](../tools/dgraph/dgraph-dql.md)
|
||||
Run DQL (Dgraph Query Language) queries.
|
||||
|
||||
## Requirements
|
||||
|
||||
### Database User
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
---
|
||||
title: "Firebird"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Firebird is a powerful, cross-platform, and open-source relational database.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
[Firebird][fb-docs] is a relational database management system offering many ANSI SQL standard features that runs on Linux, Windows, and a variety of Unix platforms. It is known for its small footprint, powerful features, and easy maintenance.
|
||||
|
||||
[fb-docs]: https://firebirdsql.org/
|
||||
|
||||
## Available Tools
|
||||
|
||||
- [`firebird-sql`](../tools/firebird/firebird-sql.md)
|
||||
Execute SQL queries as prepared statements in Firebird.
|
||||
|
||||
- [`firebird-execute-sql`](../tools/firebird/firebird-execute-sql.md)
|
||||
Run parameterized SQL statements in Firebird.
|
||||
|
||||
## Requirements
|
||||
|
||||
### Database User
|
||||
|
||||
This source uses standard authentication. You will need to [create a Firebird user][fb-users] to login to the database with.
|
||||
|
||||
[fb-users]: https://firebirdsql.org/refdocs/langrefupd25-sql-create-user.html
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my_firebird_db:
|
||||
kind: firebird
|
||||
host: "localhost"
|
||||
port: 3050
|
||||
database: "/path/to/your/database.fdb"
|
||||
user: ${FIREBIRD_USER}
|
||||
password: ${FIREBIRD_PASS}
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
Use environment variable replacement with the format ${ENV_NAME}
|
||||
instead of hardcoding your secrets into the configuration file.
|
||||
{{< /notice >}}
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "firebird". |
|
||||
| host | string | true | IP address to connect to (e.g. "127.0.0.1") |
|
||||
| port | string | true | Port to connect to (e.g. "3050") |
|
||||
| database | string | true | Path to the Firebird database file (e.g. "/var/lib/firebird/data/test.fdb"). |
|
||||
| user | string | true | Name of the Firebird user to connect as (e.g. "SYSDBA"). |
|
||||
| password | string | true | Password of the Firebird user (e.g. "masterkey"). |
|
||||