Compare commits

..

1 Commits

Author SHA1 Message Date
Yuan Teoh
a060660580 chore!: update kind field to type 2025-09-04 19:16:53 -07:00
519 changed files with 3413 additions and 22175 deletions

View File

@@ -62,28 +62,6 @@ steps:
postgressql \
postgresexecutesql
- id: "alloydb"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "ALLOYDB_PROJECT=$PROJECT_ID"
- "ALLOYDB_CLUSTER=$_ALLOYDB_POSTGRES_CLUSTER"
- "ALLOYDB_INSTANCE=$_ALLOYDB_POSTGRES_INSTANCE"
- "ALLOYDB_REGION=$_REGION"
secretEnv: ["ALLOYDB_POSTGRES_USER"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"AlloyDB" \
alloydb \
alloydb
- id: "alloydb-pg"
name: golang:1
waitFor: ["compile-test-binary"]
@@ -350,23 +328,23 @@ steps:
mssql \
mssql
# - id: "dgraph"
# name: golang:1
# waitFor: ["compile-test-binary"]
# entrypoint: /bin/bash
# env:
# - "GOPATH=/gopath"
# - "DGRAPH_URL=$_DGRAPHURL"
# volumes:
# - name: "go"
# path: "/gopath"
# args:
# - -c
# - |
# .ci/test_with_coverage.sh \
# "Dgraph" \
# dgraph \
# dgraph
- id: "dgraph"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "DGRAPH_URL=$_DGRAPHURL"
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Dgraph" \
dgraph \
dgraph
- id: "http"
name: golang:1
@@ -553,24 +531,6 @@ steps:
utility \
utility/alloydbwaitforoperation
- id: "cloud-sql"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
secretEnv: ["CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Cloud SQL Wait for Operation" \
cloudsql \
cloudsql
- id: "tidb"
name: golang:1
waitFor: ["compile-test-binary"]
@@ -660,25 +620,6 @@ steps:
trino \
trinosql trinoexecutesql
- id: "yugabytedb"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "YUGABYTEDB_DATABASE=$_YUGABYTEDB_DATABASE"
- "YUGABYTEDB_PORT=$_YUGABYTEDB_PORT"
- "YUGABYTEDB_LOADBALANCE=$_YUGABYTEDB_LOADBALANCE"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["YUGABYTEDB_USER", "YUGABYTEDB_PASS", "YUGABYTEDB_HOST", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
./yugabytedb.test -test.v
availableSecrets:
secretManager:
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_user/versions/latest
@@ -757,13 +698,6 @@ availableSecrets:
env: OCEANBASE_USER
- versionName: projects/$PROJECT_ID/secrets/oceanbase_pass/versions/latest
env: OCEANBASE_PASSWORD
- versionName: projects/$PROJECT_ID/secrets/yugabytedb_host/versions/latest
env: YUGABYTEDB_HOST
- versionName: projects/$PROJECT_ID/secrets/yugabytedb_user/versions/latest
env: YUGABYTEDB_USER
- versionName: projects/$PROJECT_ID/secrets/yugabytedb_pass/versions/latest
env: YUGABYTEDB_PASS
options:
logging: CLOUD_LOGGING_ONLY
@@ -810,6 +744,3 @@ substitutions:
_TRINO_SCHEMA: "default"
_OCEANBASE_PORT: "2883"
_OCEANBASE_DATABASE: "oceanbase"
_YUGABYTEDB_DATABASE: "yugabyte"
_YUGABYTEDB_PORT: "5433"
_YUGABYTEDB_LOADBALANCE: "false"

View File

@@ -1,24 +1,7 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
assign_issues:
- Yuan325
- duwenxin99
- averikitsch
- anubhav756
- dishaprakash
- twishabansal
assign_issues_by:
- labels:
- 'product: bigquery'

4
.github/labels.yaml vendored
View File

@@ -88,10 +88,6 @@
color: 8befd7
description: 'Status: reviewer is awaiting feedback or responses from the author before proceeding.'
- name: 'release candidate'
color: 32CD32
description: 'Use label to signal PR should be included in the next release.'
# Product Labels
- name: 'product: bigquery'
color: 5065c7

View File

@@ -20,21 +20,26 @@ extraFiles: [
"README.md",
"docs/en/getting-started/colab_quickstart.ipynb",
"docs/en/getting-started/introduction/_index.md",
"docs/en/getting-started/local_quickstart.md",
"docs/en/getting-started/local_quickstart_js.md",
"docs/en/getting-started/local_quickstart_go.md",
"docs/en/getting-started/mcp_quickstart/_index.md",
"docs/en/getting-started/quickstart/shared/configure_toolbox.md",
"docs/en/samples/alloydb/_index.md",
"docs/en/samples/alloydb/mcp_quickstart.md",
"docs/en/samples/alloydb/ai-nl/alloydb_ai_nl.ipynb",
"docs/en/samples/bigquery/local_quickstart.md",
"docs/en/samples/bigquery/mcp_quickstart/_index.md",
"docs/en/samples/bigquery/colab_quickstart_bigquery.ipynb",
"docs/en/samples/looker/looker_gemini.md",
"docs/en/samples/looker/looker_gemini_oauth/_index.md",
"docs/en/samples/looker/looker_mcp_inspector/_index.md",
"docs/en/samples/looker/looker_mcp_inspector.md",
"docs/en/how-to/connect-ide/alloydb_pg_mcp.md",
"docs/en/how-to/connect-ide/alloydb_pg_admin_mcp.md",
"docs/en/how-to/connect-ide/bigquery_mcp.md",
"docs/en/how-to/connect-ide/cloud_sql_pg_mcp.md",
"docs/en/how-to/connect-ide/cloud_sql_mssql_mcp.md",
"docs/en/how-to/connect-ide/cloud_sql_mysql_mcp.md",
"docs/en/how-to/connect-ide/firestore_mcp.md",
"docs/en/how-to/connect-ide/looker_mcp.md",
"docs/en/how-to/connect-ide/mysql_mcp.md",
"docs/en/how-to/connect-ide/mssql_mcp.md",
"docs/en/how-to/connect-ide/postgres_mcp.md",
"docs/en/how-to/connect-ide/neo4j_mcp.md",
"docs/en/how-to/connect-ide/sqlite_mcp.md",
"docs/en/how-to/connect-ide/spanner_mcp.md",
]

View File

@@ -37,7 +37,7 @@ jobs:
runs-on: 'ubuntu-latest'
steps:
- uses: 'actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b' # v7
- uses: 'actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea' # v7
with:
script: |-
// parse test names

View File

@@ -48,7 +48,7 @@ jobs:
git push
- name: Comment
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
with:
script: |
github.rest.issues.createComment({

View File

@@ -90,7 +90,7 @@ jobs:
commit_message: "stage: PR-${{ github.event.number }}: ${{ github.event.head_commit.message }}"
- name: Comment
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
with:
script: |
github.rest.issues.createComment({

View File

@@ -36,7 +36,7 @@ jobs:
steps:
- name: Remove PR Label
if: "${{ github.event.action == 'labeled' && github.event.label.name == 'tests: run' }}"
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7.1.0
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |

View File

@@ -41,7 +41,7 @@ jobs:
steps:
- name: Remove PR label
if: "${{ github.event.action == 'labeled' && github.event.label.name == 'tests: run' }}"
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7.1.0
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |

View File

@@ -1,33 +1,5 @@
# Changelog
## [0.14.0](https://github.com/googleapis/genai-toolbox/compare/v0.13.0...v0.14.0) (2025-09-05)
### ⚠ BREAKING CHANGES
* **bigquery:** Move `useClientOAuth` config from tool to source ([#1279](https://github.com/googleapis/genai-toolbox/issues/1279)) ([8d20a48](https://github.com/googleapis/genai-toolbox/commit/8d20a48f13bcda853d41bdf80a162de12b076d1b))
* **tools/bigquerysql:** remove `useClientOAuth` from tools config ([#1312](https://github.com/googleapis/genai-toolbox/issues/1312))
### Features
* **clickhouse:** Add ClickHouse Source and Tools ([#1088](https://github.com/googleapis/genai-toolbox/issues/1088)) ([75a04a5](https://github.com/googleapis/genai-toolbox/commit/75a04a55dd2259bed72fe95119a7a51a906c0b21))
* **prebuilt/alloydb-postgres:** Support ipType and IAM users ([#1324](https://github.com/googleapis/genai-toolbox/issues/1324)) ([0b2121e](https://github.com/googleapis/genai-toolbox/commit/0b2121ea72eb81348dcd9c740a62ccd32e71fe37))
* **server/mcp:** Support toolbox auth in mcp ([#1140](https://github.com/googleapis/genai-toolbox/issues/1140)) ([ca353e0](https://github.com/googleapis/genai-toolbox/commit/ca353e0b66fedc00e9110df57db18632aef49018))
* **source/mysql:** Support `queryParams` in MySQL source ([#1299](https://github.com/googleapis/genai-toolbox/issues/1299)) ([3ae2526](https://github.com/googleapis/genai-toolbox/commit/3ae2526e0fe36b57b05a9b54f1d99f3fc68d9657))
* **tools/bigquery:** Support end-user credential passthrough on multiple BQ tools ([#1314](https://github.com/googleapis/genai-toolbox/issues/1314)) ([88f4b30](https://github.com/googleapis/genai-toolbox/commit/88f4b3028df3b6a400936cdf8a035bf55021924c))
* **tools/looker:** Add description for looker-get-models tool ([#1266](https://github.com/googleapis/genai-toolbox/issues/1266)) ([89af3a4](https://github.com/googleapis/genai-toolbox/commit/89af3a4ca332f029615b2a739d1f6cd50519638d))
* **tools/looker:** Authenticate via end user credentials ([#1257](https://github.com/googleapis/genai-toolbox/issues/1257)) ([8755e3d](https://github.com/googleapis/genai-toolbox/commit/8755e3db3476abb35629b3cca9c78db7366757a4))
* **tools/looker:** Report field suggestions to agent ([#1267](https://github.com/googleapis/genai-toolbox/issues/1267)) ([2cad82e](https://github.com/googleapis/genai-toolbox/commit/2cad82e5107566dd6c9b75e34e9976af63af0bb5))
### Bug Fixes
* Do not print usage on runtime error ([#1315](https://github.com/googleapis/genai-toolbox/issues/1315)) ([afba7a5](https://github.com/googleapis/genai-toolbox/commit/afba7a57cdd4fe7c1b0741dbf8f8c78b14a68089))
* Update env var to allow empty string ([#1260](https://github.com/googleapis/genai-toolbox/issues/1260)) ([03aa9fa](https://github.com/googleapis/genai-toolbox/commit/03aa9fabacda06f860c9f178485126bddb7d5782))
* **tools/firestore:** Add document/collection path validation ([#1229](https://github.com/googleapis/genai-toolbox/issues/1229)) ([14c2249](https://github.com/googleapis/genai-toolbox/commit/14c224939a2f9bb349fa00a7d5227877198530c2))
* **tools/looker-get-dashboards:** Fix Looker client OAuth check ([#1338](https://github.com/googleapis/genai-toolbox/issues/1338)) ([36225aa](https://github.com/googleapis/genai-toolbox/commit/36225aa6db7f8426ad87930866530fde4e9bf0cd))
* **tools/oceanbase:** Fix encoded text with mysql driver ([#1283](https://github.com/googleapis/genai-toolbox/issues/1283)) ([d16f89f](https://github.com/googleapis/genai-toolbox/commit/d16f89fbb6e49c03998f114ef7dc2b584b5e4967)), closes [#1161](https://github.com/googleapis/genai-toolbox/issues/1161)
## [0.13.0](https://github.com/googleapis/genai-toolbox/compare/v0.12.0...v0.13.0) (2025-08-27)
### ⚠ BREAKING CHANGES

View File

@@ -75,7 +75,7 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s
* **Implement the
[`SourceConfig`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/sources/sources.go#L57)
interface**. This interface requires two methods:
* `SourceConfigKind() string`: Returns a unique string identifier for your
* `SourceConfigType() string`: Returns a unique string identifier for your
data source (e.g., `"newdb"`).
* `Initialize(ctx context.Context, tracer trace.Tracer) (Source, error)`:
Creates a new instance of your data source and establishes a connection to
@@ -83,15 +83,12 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s
* **Implement the
[`Source`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/sources/sources.go#L63)
interface**. This interface requires one method:
* `SourceKind() string`: Returns the same string identifier as `SourceConfigKind()`.
* `SourceType() string`: Returns the same string identifier as `SourceConfigType()`.
* **Implement `init()`** to register the new Source.
* **Implement Unit Tests** in a file named `newdb_test.go`.
### Adding a New Tool
> [!NOTE]
> Please follow the tool naming convention detailed [here](./DEVELOPER.md#tool-naming-conventions).
We recommend looking at an [example tool
implementation](https://github.com/googleapis/genai-toolbox/tree/main/internal/tools/postgres/postgressql).
@@ -103,7 +100,7 @@ tools.
* **Implement the
[`ToolConfig`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/tools/tools.go#L61)
interface**. This interface requires one method:
* `ToolConfigKind() string`: Returns a unique string identifier for your tool
* `ToolConfigType() string`: Returns a unique string identifier for your tool
(e.g., `"newdb"`).
* `Initialize(sources map[string]Source) (Tool, error)`: Creates a new
instance of your tool and validates that it can connect to the specified
@@ -218,7 +215,7 @@ resources.
| style | Update src code, with only formatting and whitespace updates (e.g. code formatter or linter changes). |
Pull requests should always add scope whenever possible. The scope is
formatted as `<scope-type>/<scope-kind>` (e.g., `sources/postgres`, or
formatted as `<scope-type>/<scope-type>` (e.g., `sources/postgres`, or
`tools/mssql-sql`).
Ideally, **each PR covers only one scope**, if this is

View File

@@ -44,47 +44,6 @@ Before you begin, ensure you have the following:
curl http://127.0.0.1:5000
```
### Tool Naming Conventions
This section details the purpose and conventions for MCP Toolbox's tools naming
properties, **tool name** and **tool kind**.
```
cancel_hotel: <- tool name
kind: postgres-sql <- tool kind
source: my_pg_source
```
#### Tool Name
Tool name is the identifier used by a Large Language Model (LLM) to invoke a
specific tool.
* Custom tools: The user can define any name they want. The below guidelines
do not apply.
* Pre-built tools: The tool name is predefined and cannot be changed. It
should follow the guidelines.
The following guidelines apply to tool names:
* Should use underscores over hyphens (e.g., `list_collections` instead of
`list-collections`).
* Should not have the product name in the name (e.g., `list_collections` instead
of `firestore_list_collections`).
* Superficial changes are NOT considered as breaking (e.g., changing tool name).
* Non-superficial changes MAY be considered breaking (e.g. adding new parameters
to a function) until they can be validated through extensive testing to ensure
they do not negatively impact agent's performances.
#### Tool Kind
Tool kind serves as a category or type that a user can assign to a tool.
The following guidelines apply to tool kinds:
* Should user hyphens over underscores (e.g. `firestore-list-collections` or
`firestore_list_colelctions`).
* Should use product name in name (e.g. `firestore-list-collections` over
`list-collections`).
* Changes to tool kind are breaking changes and should be avoided.
## Testing
### Infrastructure

View File

@@ -117,7 +117,7 @@ To install Toolbox as a binary:
<!-- {x-release-please-start-version} -->
```sh
# see releases page for other versions
export VERSION=0.14.0
export VERSION=0.13.0
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
chmod +x toolbox
```
@@ -130,7 +130,7 @@ You can also install Toolbox as a container:
```sh
# see releases page for other versions
export VERSION=0.14.0
export VERSION=0.13.0
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
```
@@ -154,7 +154,7 @@ To install from source, ensure you have the latest version of
[Go installed](https://go.dev/doc/install), and then run the following command:
```sh
go install github.com/googleapis/genai-toolbox@v0.14.0
go install github.com/googleapis/genai-toolbox@v0.13.0
```
<!-- {x-release-please-end} -->
@@ -726,7 +726,7 @@ For more details on configuring different types of sources, see the
### Tools
The `tools` section of a `tools.yaml` define the actions an agent can take: what
kind of tool it is, which source(s) it affects, what parameters it uses, etc.
type of tool it is, which source(s) it affects, what parameters it uses, etc.
```yaml
tools:

View File

@@ -42,14 +42,7 @@ import (
"github.com/googleapis/genai-toolbox/internal/util"
// Import tool packages for side effect of registration
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetcluster"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetinstance"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetuser"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistclusters"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistinstances"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistusers"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydbainl"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryanalyzecontribution"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryconversationalanalytics"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryforecast"
@@ -60,18 +53,7 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigtable"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouseexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselistdatabases"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhousesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatedatabase"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreateusers"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistdatabases"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistinstances"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmssql/cloudsqlmssqlcreateinstance"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmysql/cloudsqlmysqlcreateinstance"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlpg/cloudsqlpgcreateinstances"
_ "github.com/googleapis/genai-toolbox/internal/tools/couchbase"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexlookupentry"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchaspecttypes"
@@ -84,7 +66,6 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetdocuments"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetrules"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorelistcollections"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequery"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequerycollection"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreupdatedocument"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules"
@@ -116,7 +97,6 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jcypher"
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jexecutecypher"
@@ -124,14 +104,11 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbaseexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbasesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
_ "github.com/googleapis/genai-toolbox/internal/tools/redis"
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannersql"
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqliteexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlitesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinoexecutesql"
@@ -139,17 +116,13 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/alloydbwaitforoperation"
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/wait"
_ "github.com/googleapis/genai-toolbox/internal/tools/valkey"
_ "github.com/googleapis/genai-toolbox/internal/tools/yugabytedbsql"
"github.com/spf13/cobra"
_ "github.com/googleapis/genai-toolbox/internal/sources/alloydbadmin"
_ "github.com/googleapis/genai-toolbox/internal/sources/alloydbpg"
_ "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
_ "github.com/googleapis/genai-toolbox/internal/sources/bigtable"
_ "github.com/googleapis/genai-toolbox/internal/sources/clickhouse"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudmonitoring"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmssql"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmysql"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg"
@@ -172,7 +145,6 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/sources/tidb"
_ "github.com/googleapis/genai-toolbox/internal/sources/trino"
_ "github.com/googleapis/genai-toolbox/internal/sources/valkey"
_ "github.com/googleapis/genai-toolbox/internal/sources/yugabytedb"
)
var (

View File

@@ -496,7 +496,7 @@ func TestParseToolFile(t *testing.T) {
Sources: server.SourceConfigs{
"my-pg-instance": cloudsqlpgsrc.Config{
Name: "my-pg-instance",
Kind: cloudsqlpgsrc.SourceKind,
Type: cloudsqlpgsrc.SourceType,
Project: "my-project",
Region: "my-region",
Instance: "my-instance",
@@ -509,7 +509,7 @@ func TestParseToolFile(t *testing.T) {
Tools: server.ToolConfigs{
"example_tool": postgressql.Config{
Name: "example_tool",
Kind: "postgres-sql",
Type: "postgres-sql",
Source: "my-pg-instance",
Description: "some description",
Statement: "SELECT * FROM SQL_STATEMENT;\n",
@@ -615,7 +615,7 @@ func TestParseToolFileWithAuth(t *testing.T) {
Sources: server.SourceConfigs{
"my-pg-instance": cloudsqlpgsrc.Config{
Name: "my-pg-instance",
Kind: cloudsqlpgsrc.SourceKind,
Type: cloudsqlpgsrc.SourceType,
Project: "my-project",
Region: "my-region",
Instance: "my-instance",
@@ -628,19 +628,19 @@ func TestParseToolFileWithAuth(t *testing.T) {
AuthServices: server.AuthServiceConfigs{
"my-google-service": google.Config{
Name: "my-google-service",
Kind: google.AuthServiceKind,
Type: google.AuthServiceType,
ClientID: "my-client-id",
},
"other-google-service": google.Config{
Name: "other-google-service",
Kind: google.AuthServiceKind,
Type: google.AuthServiceType,
ClientID: "other-client-id",
},
},
Tools: server.ToolConfigs{
"example_tool": postgressql.Config{
Name: "example_tool",
Kind: "postgres-sql",
Type: "postgres-sql",
Source: "my-pg-instance",
Description: "some description",
Statement: "SELECT * FROM SQL_STATEMENT;\n",
@@ -714,7 +714,7 @@ func TestParseToolFileWithAuth(t *testing.T) {
Sources: server.SourceConfigs{
"my-pg-instance": cloudsqlpgsrc.Config{
Name: "my-pg-instance",
Kind: cloudsqlpgsrc.SourceKind,
Type: cloudsqlpgsrc.SourceType,
Project: "my-project",
Region: "my-region",
Instance: "my-instance",
@@ -727,19 +727,19 @@ func TestParseToolFileWithAuth(t *testing.T) {
AuthSources: server.AuthServiceConfigs{
"my-google-service": google.Config{
Name: "my-google-service",
Kind: google.AuthServiceKind,
Type: google.AuthServiceType,
ClientID: "my-client-id",
},
"other-google-service": google.Config{
Name: "other-google-service",
Kind: google.AuthServiceKind,
Type: google.AuthServiceType,
ClientID: "other-client-id",
},
},
Tools: server.ToolConfigs{
"example_tool": postgressql.Config{
Name: "example_tool",
Kind: "postgres-sql",
Type: "postgres-sql",
Source: "my-pg-instance",
Description: "some description",
Statement: "SELECT * FROM SQL_STATEMENT;\n",
@@ -815,7 +815,7 @@ func TestParseToolFileWithAuth(t *testing.T) {
Sources: server.SourceConfigs{
"my-pg-instance": cloudsqlpgsrc.Config{
Name: "my-pg-instance",
Kind: cloudsqlpgsrc.SourceKind,
Type: cloudsqlpgsrc.SourceType,
Project: "my-project",
Region: "my-region",
Instance: "my-instance",
@@ -828,19 +828,19 @@ func TestParseToolFileWithAuth(t *testing.T) {
AuthServices: server.AuthServiceConfigs{
"my-google-service": google.Config{
Name: "my-google-service",
Kind: google.AuthServiceKind,
Type: google.AuthServiceType,
ClientID: "my-client-id",
},
"other-google-service": google.Config{
Name: "other-google-service",
Kind: google.AuthServiceKind,
Type: google.AuthServiceType,
ClientID: "other-client-id",
},
},
Tools: server.ToolConfigs{
"example_tool": postgressql.Config{
Name: "example_tool",
Kind: "postgres-sql",
Type: "postgres-sql",
Source: "my-pg-instance",
Description: "some description",
Statement: "SELECT * FROM SQL_STATEMENT;\n",
@@ -972,7 +972,7 @@ func TestEnvVarReplacement(t *testing.T) {
Sources: server.SourceConfigs{
"my-http-instance": httpsrc.Config{
Name: "my-http-instance",
Kind: httpsrc.SourceKind,
Type: httpsrc.SourceType,
BaseURL: "http://test_server/",
Timeout: "10s",
DefaultHeaders: map[string]string{"Authorization": "ACTUAL_HEADER"},
@@ -982,19 +982,19 @@ func TestEnvVarReplacement(t *testing.T) {
AuthServices: server.AuthServiceConfigs{
"my-google-service": google.Config{
Name: "my-google-service",
Kind: google.AuthServiceKind,
Type: google.AuthServiceType,
ClientID: "ACTUAL_CLIENT_ID",
},
"other-google-service": google.Config{
Name: "other-google-service",
Kind: google.AuthServiceKind,
Type: google.AuthServiceType,
ClientID: "ACTUAL_CLIENT_ID_2",
},
},
Tools: server.ToolConfigs{
"example_tool": http.Config{
Name: "example_tool",
Kind: "http",
Type: "http",
Source: "my-instance",
Method: "GET",
Path: "search?name=alice&pet=cat",
@@ -1234,11 +1234,8 @@ func TestPrebuiltTools(t *testing.T) {
bigquery_config, _ := prebuiltconfigs.Get("bigquery")
clickhouse_config, _ := prebuiltconfigs.Get("clickhouse")
cloudsqlpg_config, _ := prebuiltconfigs.Get("cloud-sql-postgres")
cloudsqlpg_admin_config, _ := prebuiltconfigs.Get("cloud-sql-postgres-admin")
cloudsqlmysql_config, _ := prebuiltconfigs.Get("cloud-sql-mysql")
cloudsqlmysql_admin_config, _ := prebuiltconfigs.Get("cloud-sql-mysql-admin")
cloudsqlmssql_config, _ := prebuiltconfigs.Get("cloud-sql-mssql")
cloudsqlmssql_admin_config, _ := prebuiltconfigs.Get("cloud-sql-mssql-admin")
dataplex_config, _ := prebuiltconfigs.Get("dataplex")
firestoreconfig, _ := prebuiltconfigs.Get("firestore")
mysql_config, _ := prebuiltconfigs.Get("mysql")
@@ -1247,8 +1244,6 @@ func TestPrebuiltTools(t *testing.T) {
postgresconfig, _ := prebuiltconfigs.Get("postgres")
spanner_config, _ := prebuiltconfigs.Get("spanner")
spannerpg_config, _ := prebuiltconfigs.Get("spanner-postgres")
sqlite_config, _ := prebuiltconfigs.Get("sqlite")
neo4jconfig, _ := prebuiltconfigs.Get("neo4j")
// Set environment variables
t.Setenv("API_KEY", "your_api_key")
@@ -1323,13 +1318,6 @@ func TestPrebuiltTools(t *testing.T) {
t.Setenv("LOOKER_CLIENT_SECRET", "your_looker_client_secret")
t.Setenv("LOOKER_VERIFY_SSL", "true")
t.Setenv("SQLITE_DATABASE", "test.db")
t.Setenv("NEO4J_URI", "bolt://localhost:7687")
t.Setenv("NEO4J_DATABASE", "neo4j")
t.Setenv("NEO4J_USERNAME", "your_neo4j_user")
t.Setenv("NEO4J_PASSWORD", "your_neo4j_password")
ctx, err := testutils.ContextWithNewLogger()
if err != nil {
t.Fatalf("unexpected error: %s", err)
@@ -1343,39 +1331,9 @@ func TestPrebuiltTools(t *testing.T) {
name: "alloydb postgres admin prebuilt tools",
in: alloydb_admin_config,
wantToolset: server.ToolsetConfigs{
"alloydb_postgres_admin_tools": tools.ToolsetConfig{
Name: "alloydb_postgres_admin_tools",
ToolNames: []string{"create_cluster", "wait_for_operation", "create_instance", "list_clusters", "list_instances", "list_users", "create_user", "get_cluster", "get_instance", "get_user"},
},
},
},
{
name: "cloudsql pg admin prebuilt tools",
in: cloudsqlpg_admin_config,
wantToolset: server.ToolsetConfigs{
"cloud_sql_postgres_admin_tools": tools.ToolsetConfig{
Name: "cloud_sql_postgres_admin_tools",
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation"},
},
},
},
{
name: "cloudsql mysql admin prebuilt tools",
in: cloudsqlmysql_admin_config,
wantToolset: server.ToolsetConfigs{
"cloud_sql_mysql_admin_tools": tools.ToolsetConfig{
Name: "cloud_sql_mysql_admin_tools",
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation"},
},
},
},
{
name: "cloudsql mssql admin prebuilt tools",
in: cloudsqlmssql_admin_config,
wantToolset: server.ToolsetConfigs{
"cloud_sql_mssql_admin_tools": tools.ToolsetConfig{
Name: "cloud_sql_mssql_admin_tools",
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation"},
"alloydb-postgres-admin-tools": tools.ToolsetConfig{
Name: "alloydb-postgres-admin-tools",
ToolNames: []string{"alloydb-create-cluster", "alloydb-operations-get", "alloydb-create-instance", "alloydb-list-clusters", "alloydb-list-instances", "alloydb-list-users", "alloydb-create-user"},
},
},
},
@@ -1383,8 +1341,8 @@ func TestPrebuiltTools(t *testing.T) {
name: "alloydb prebuilt tools",
in: alloydb_config,
wantToolset: server.ToolsetConfigs{
"alloydb_postgres_database_tools": tools.ToolsetConfig{
Name: "alloydb_postgres_database_tools",
"alloydb-postgres-database-tools": tools.ToolsetConfig{
Name: "alloydb-postgres-database-tools",
ToolNames: []string{"execute_sql", "list_tables"},
},
},
@@ -1393,9 +1351,9 @@ func TestPrebuiltTools(t *testing.T) {
name: "bigquery prebuilt tools",
in: bigquery_config,
wantToolset: server.ToolsetConfigs{
"bigquery_database_tools": tools.ToolsetConfig{
Name: "bigquery_database_tools",
ToolNames: []string{"analyze_contribution", "ask_data_insights", "execute_sql", "forecast", "get_dataset_info", "get_table_info", "list_dataset_ids", "list_table_ids"},
"bigquery-database-tools": tools.ToolsetConfig{
Name: "bigquery-database-tools",
ToolNames: []string{"ask_data_insights", "execute_sql", "forecast", "get_dataset_info", "get_table_info", "list_dataset_ids", "list_table_ids"},
},
},
},
@@ -1403,9 +1361,9 @@ func TestPrebuiltTools(t *testing.T) {
name: "clickhouse prebuilt tools",
in: clickhouse_config,
wantToolset: server.ToolsetConfigs{
"clickhouse_database_tools": tools.ToolsetConfig{
Name: "clickhouse_database_tools",
ToolNames: []string{"execute_sql", "list_databases"},
"clickhouse-database-tools": tools.ToolsetConfig{
Name: "clickhouse-database-tools",
ToolNames: []string{"execute_sql"},
},
},
},
@@ -1413,8 +1371,8 @@ func TestPrebuiltTools(t *testing.T) {
name: "cloudsqlpg prebuilt tools",
in: cloudsqlpg_config,
wantToolset: server.ToolsetConfigs{
"cloud_sql_postgres_database_tools": tools.ToolsetConfig{
Name: "cloud_sql_postgres_database_tools",
"cloud-sql-postgres-database-tools": tools.ToolsetConfig{
Name: "cloud-sql-postgres-database-tools",
ToolNames: []string{"execute_sql", "list_tables"},
},
},
@@ -1423,8 +1381,8 @@ func TestPrebuiltTools(t *testing.T) {
name: "cloudsqlmysql prebuilt tools",
in: cloudsqlmysql_config,
wantToolset: server.ToolsetConfigs{
"cloud_sql_mysql_database_tools": tools.ToolsetConfig{
Name: "cloud_sql_mysql_database_tools",
"cloud-sql-mysql-database-tools": tools.ToolsetConfig{
Name: "cloud-sql-mysql-database-tools",
ToolNames: []string{"execute_sql", "list_tables"},
},
},
@@ -1433,8 +1391,8 @@ func TestPrebuiltTools(t *testing.T) {
name: "cloudsqlmssql prebuilt tools",
in: cloudsqlmssql_config,
wantToolset: server.ToolsetConfigs{
"cloud_sql_mssql_database_tools": tools.ToolsetConfig{
Name: "cloud_sql_mssql_database_tools",
"cloud-sql-mssql-database-tools": tools.ToolsetConfig{
Name: "cloud-sql-mssql-database-tools",
ToolNames: []string{"execute_sql", "list_tables"},
},
},
@@ -1443,9 +1401,9 @@ func TestPrebuiltTools(t *testing.T) {
name: "dataplex prebuilt tools",
in: dataplex_config,
wantToolset: server.ToolsetConfigs{
"dataplex_tools": tools.ToolsetConfig{
Name: "dataplex_tools",
ToolNames: []string{"search_entries", "lookup_entry", "search_aspect_types"},
"dataplex-tools": tools.ToolsetConfig{
Name: "dataplex-tools",
ToolNames: []string{"dataplex_search_entries", "dataplex_lookup_entry", "dataplex_search_aspect_types"},
},
},
},
@@ -1453,9 +1411,9 @@ func TestPrebuiltTools(t *testing.T) {
name: "firestore prebuilt tools",
in: firestoreconfig,
wantToolset: server.ToolsetConfigs{
"firestore_database_tools": tools.ToolsetConfig{
Name: "firestore_database_tools",
ToolNames: []string{"get_documents", "add_documents", "update_document", "list_collections", "delete_documents", "query_collection", "get_rules", "validate_rules"},
"firestore-database-tools": tools.ToolsetConfig{
Name: "firestore-database-tools",
ToolNames: []string{"firestore-get-documents", "firestore-add-documents", "firestore-update-document", "firestore-list-collections", "firestore-delete-documents", "firestore-query-collection", "firestore-get-rules", "firestore-validate-rules"},
},
},
},
@@ -1463,8 +1421,8 @@ func TestPrebuiltTools(t *testing.T) {
name: "mysql prebuilt tools",
in: mysql_config,
wantToolset: server.ToolsetConfigs{
"mysql_database_tools": tools.ToolsetConfig{
Name: "mysql_database_tools",
"mysql-database-tools": tools.ToolsetConfig{
Name: "mysql-database-tools",
ToolNames: []string{"execute_sql", "list_tables"},
},
},
@@ -1473,8 +1431,8 @@ func TestPrebuiltTools(t *testing.T) {
name: "mssql prebuilt tools",
in: mssql_config,
wantToolset: server.ToolsetConfigs{
"mssql_database_tools": tools.ToolsetConfig{
Name: "mssql_database_tools",
"mssql-database-tools": tools.ToolsetConfig{
Name: "mssql-database-tools",
ToolNames: []string{"execute_sql", "list_tables"},
},
},
@@ -1483,8 +1441,8 @@ func TestPrebuiltTools(t *testing.T) {
name: "looker prebuilt tools",
in: looker_config,
wantToolset: server.ToolsetConfigs{
"looker_tools": tools.ToolsetConfig{
Name: "looker_tools",
"looker-tools": tools.ToolsetConfig{
Name: "looker-tools",
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look", "make_look", "get_dashboards", "make_dashboard", "add_dashboard_element"},
},
},
@@ -1493,8 +1451,8 @@ func TestPrebuiltTools(t *testing.T) {
name: "postgres prebuilt tools",
in: postgresconfig,
wantToolset: server.ToolsetConfigs{
"postgres_database_tools": tools.ToolsetConfig{
Name: "postgres_database_tools",
"postgres-database-tools": tools.ToolsetConfig{
Name: "postgres-database-tools",
ToolNames: []string{"execute_sql", "list_tables"},
},
},
@@ -1513,32 +1471,12 @@ func TestPrebuiltTools(t *testing.T) {
name: "spanner pg prebuilt tools",
in: spannerpg_config,
wantToolset: server.ToolsetConfigs{
"spanner_postgres_database_tools": tools.ToolsetConfig{
Name: "spanner_postgres_database_tools",
"spanner-postgres-database-tools": tools.ToolsetConfig{
Name: "spanner-postgres-database-tools",
ToolNames: []string{"execute_sql", "execute_sql_dql", "list_tables"},
},
},
},
{
name: "sqlite prebuilt tools",
in: sqlite_config,
wantToolset: server.ToolsetConfigs{
"sqlite_database_tools": tools.ToolsetConfig{
Name: "sqlite_database_tools",
ToolNames: []string{"execute_sql", "list_tables"},
},
},
},
{
name: "neo4j prebuilt tools",
in: neo4jconfig,
wantToolset: server.ToolsetConfigs{
"neo4j_database_tools": tools.ToolsetConfig{
Name: "neo4j_database_tools",
ToolNames: []string{"execute_cypher", "get_schema"},
},
},
},
}
for _, tc := range tcs {

View File

@@ -1 +1 @@
0.14.0
0.13.0

View File

@@ -64,7 +64,7 @@ The structured logging outputs log as JSON:
"timestamp":"2024-11-04T16:45:11.987299-08:00",
"severity":"ERROR",
"logging.googleapis.com/sourceLocation":{...},
"message":"unable to parse tool file at \"tools.yaml\": \"cloud-sql-postgres1\" is not a valid kind of data source"
"message":"unable to parse tool file at \"tools.yaml\": \"cloud-sql-postgres1\" is not a valid type of data source"
}
```

View File

@@ -234,7 +234,7 @@
},
"outputs": [],
"source": [
"version = \"0.14.0\" # x-release-please-version\n",
"version = \"0.13.0\" # x-release-please-version\n",
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
"\n",
"# Make the binary executable\n",

View File

@@ -51,7 +51,7 @@ For more details on configuring different types of sources, see the
### Tools
The `tools` section of your `tools.yaml` defines the actions your agent can
take: what kind of tool it is, which source(s) it affects, what parameters it
take: what type of tool it is, which source(s) it affects, what parameters it
uses, etc.
```yaml

View File

@@ -86,7 +86,7 @@ To install Toolbox as a binary:
```sh
# see releases page for other versions
export VERSION=0.14.0
export VERSION=0.13.0
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
chmod +x toolbox
```
@@ -97,7 +97,7 @@ You can also install Toolbox as a container:
```sh
# see releases page for other versions
export VERSION=0.14.0
export VERSION=0.13.0
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
```
@@ -115,7 +115,7 @@ To install from source, ensure you have the latest version of
[Go installed](https://go.dev/doc/install), and then run the following command:
```sh
go install github.com/googleapis/genai-toolbox@v0.14.0
go install github.com/googleapis/genai-toolbox@v0.13.0
```
{{% /tab %}}

View File

@@ -17,11 +17,6 @@ This guide assumes you have already done the following:
your preferred virtual environment tool for managing dependencies e.g. [venv][install-venv]).
1. Installed [PostgreSQL 16+ and the `psql` client][install-postgres].
[install-python]: https://wiki.python.org/moin/BeginnersGuide/Download
[install-pip]: https://pip.pypa.io/en/stable/installation/
[install-venv]: https://packaging.python.org/en/latest/tutorials/installing-packages/#creating-virtual-environments
[install-postgres]: https://www.postgresql.org/download/
### Cloud Setup (Optional)
{{< regionInclude "quickstart/shared/cloud_setup.md" "cloud_setup" >}}

View File

@@ -49,28 +49,614 @@ from Toolbox.
{{< tabpane persist=header >}}
{{< tab header="LangChain Go" lang="go" >}}
{{< include "quickstart/go/langchain/quickstart.go" >}}
package main
import (
"context"
"encoding/json"
"fmt"
"log"
"os"
"github.com/googleapis/mcp-toolbox-sdk-go/core"
"github.com/tmc/langchaingo/llms"
"github.com/tmc/langchaingo/llms/googleai"
)
// ConvertToLangchainTool converts a generic core.ToolboxTool into a LangChainGo llms.Tool.
func ConvertToLangchainTool(toolboxTool *core.ToolboxTool) llms.Tool {
// Fetch the tool's input schema
inputschema, err := toolboxTool.InputSchema()
if err != nil {
return llms.Tool{}
}
var paramsSchema map[string]any
_ = json.Unmarshal(inputschema, &paramsSchema)
// Convert into LangChain's llms.Tool
return llms.Tool{
Type: "function",
Function: &llms.FunctionDefinition{
Name: toolboxTool.Name(),
Description: toolboxTool.Description(),
Parameters: paramsSchema,
},
}
}
const systemPrompt = `
You're a helpful hotel assistant. You handle hotel searching, booking, and
cancellations. When the user searches for a hotel, mention its name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
`
var queries = []string{
"Find hotels in Basel with Basel in its name.",
"Can you book the hotel Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it.",
"Please book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
}
func main() {
genaiKey := os.Getenv("GOOGLE_API_KEY")
toolboxURL := "http://localhost:5000"
ctx := context.Background()
// Initialize the Google AI client (LLM).
llm, err := googleai.New(ctx, googleai.WithAPIKey(genaiKey), googleai.WithDefaultModel("gemini-1.5-flash"))
if err != nil {
log.Fatalf("Failed to create Google AI client: %v", err)
}
// Initialize the MCP Toolbox client.
toolboxClient, err := core.NewToolboxClient(toolboxURL)
if err != nil {
log.Fatalf("Failed to create Toolbox client: %v", err)
}
// Load the tool using the MCP Toolbox SDK.
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
if err != nil {
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
}
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
langchainTools := make([]llms.Tool, len(tools))
// Convert the loaded ToolboxTools into the format LangChainGo requires.
for i, tool := range tools {
langchainTools[i] = ConvertToLangchainTool(tool)
toolsMap[tool.Name()] = tool
}
// Start the conversation history.
messageHistory := []llms.MessageContent{
llms.TextParts(llms.ChatMessageTypeSystem, systemPrompt),
}
for _, query := range queries {
messageHistory = append(messageHistory, llms.TextParts(llms.ChatMessageTypeHuman, query))
// Make the first call to the LLM, making it aware of the tool.
resp, err := llm.GenerateContent(ctx, messageHistory, llms.WithTools(langchainTools))
if err != nil {
log.Fatalf("LLM call failed: %v", err)
}
respChoice := resp.Choices[0]
assistantResponse := llms.TextParts(llms.ChatMessageTypeAI, respChoice.Content)
for _, tc := range respChoice.ToolCalls {
assistantResponse.Parts = append(assistantResponse.Parts, tc)
}
messageHistory = append(messageHistory, assistantResponse)
// Process each tool call requested by the model.
for _, tc := range respChoice.ToolCalls {
toolName := tc.FunctionCall.Name
tool := toolsMap[toolName]
var args map[string]any
if err := json.Unmarshal([]byte(tc.FunctionCall.Arguments), &args); err != nil {
log.Fatalf("Failed to unmarshal arguments for tool '%s': %v", toolName, err)
}
toolResult, err := tool.Invoke(ctx, args)
if err != nil {
log.Fatalf("Failed to execute tool '%s': %v", toolName, err)
}
if toolResult == "" || toolResult == nil {
toolResult = "Operation completed successfully with no specific return value."
}
// Create the tool call response message and add it to the history.
toolResponse := llms.MessageContent{
Role: llms.ChatMessageTypeTool,
Parts: []llms.ContentPart{
llms.ToolCallResponse{
Name: toolName,
Content: fmt.Sprintf("%v", toolResult),
},
},
}
messageHistory = append(messageHistory, toolResponse)
}
finalResp, err := llm.GenerateContent(ctx, messageHistory)
if err != nil {
log.Fatalf("Final LLM call failed after tool execution: %v", err)
}
// Add the final textual response from the LLM to the history
messageHistory = append(messageHistory, llms.TextParts(llms.ChatMessageTypeAI, finalResp.Choices[0].Content))
fmt.Println(finalResp.Choices[0].Content)
}
}
{{< /tab >}}
{{< tab header="Genkit Go" lang="go" >}}
{{< include "quickstart/go/genkit/quickstart.go" >}}
package main
import (
"context"
"fmt"
"log"
"github.com/googleapis/mcp-toolbox-sdk-go/core"
"github.com/googleapis/mcp-toolbox-sdk-go/tbgenkit"
"github.com/firebase/genkit/go/ai"
"github.com/firebase/genkit/go/genkit"
"github.com/firebase/genkit/go/plugins/googlegenai"
)
const systemPrompt = `
You're a helpful hotel assistant. You handle hotel searching, booking, and
cancellations. When the user searches for a hotel, mention its name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
`
var queries = []string{
"Find hotels in Basel with Basel in its name.",
"Can you book the hotel Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
}
func main() {
ctx := context.Background()
// Create Toolbox Client
toolboxClient, err := core.NewToolboxClient("http://127.0.0.1:5000")
if err != nil {
log.Fatalf("Failed to create Toolbox client: %v", err)
}
// Load the tools using the MCP Toolbox SDK.
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
if err != nil {
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
}
// Initialize Genkit
g, err := genkit.Init(ctx,
genkit.WithPlugins(&googlegenai.GoogleAI{}),
genkit.WithDefaultModel("googleai/gemini-1.5-flash"),
)
if err != nil {
log.Fatalf("Failed to init genkit: %v\n", err)
}
// Create a conversation history
conversationHistory := []*ai.Message{
ai.NewSystemTextMessage(systemPrompt),
}
// Convert your tool to a Genkit tool.
genkitTools := make([]ai.Tool, len(tools))
for i, tool := range tools {
newTool, err := tbgenkit.ToGenkitTool(tool, g)
if err != nil {
log.Fatalf("Failed to convert tool: %v\n", err)
}
genkitTools[i] = newTool
}
toolRefs := make([]ai.ToolRef, len(genkitTools))
for i, tool := range genkitTools {
toolRefs[i] = tool
}
for _, query := range queries {
conversationHistory = append(conversationHistory, ai.NewUserTextMessage(query))
response, err := genkit.Generate(ctx, g,
ai.WithMessages(conversationHistory...),
ai.WithTools(toolRefs...),
ai.WithReturnToolRequests(true),
)
if err != nil {
log.Fatalf("%v\n", err)
}
conversationHistory = append(conversationHistory, response.Message)
parts := []*ai.Part{}
for _, req := range response.ToolRequests() {
tool := genkit.LookupTool(g, req.Name)
if tool == nil {
log.Fatalf("tool %q not found", req.Name)
}
output, err := tool.RunRaw(ctx, req.Input)
if err != nil {
log.Fatalf("tool %q execution failed: %v", tool.Name(), err)
}
parts = append(parts,
ai.NewToolResponsePart(&ai.ToolResponse{
Name: req.Name,
Ref: req.Ref,
Output: output,
}))
}
if len(parts) > 0 {
resp, err := genkit.Generate(ctx, g,
ai.WithMessages(append(response.History(), ai.NewMessage(ai.RoleTool, nil, parts...))...),
ai.WithTools(toolRefs...),
)
if err != nil {
log.Fatal(err)
}
fmt.Println("\n", resp.Text())
conversationHistory = append(conversationHistory, resp.Message)
} else {
fmt.Println("\n", response.Text())
}
}
}
{{< /tab >}}
{{< tab header="Go GenAI" lang="go" >}}
{{< include "quickstart/go/genAI/quickstart.go" >}}
package main
import (
"context"
"encoding/json"
"fmt"
"log"
"os"
"github.com/googleapis/mcp-toolbox-sdk-go/core"
"google.golang.org/genai"
)
// ConvertToGenaiTool translates a ToolboxTool into the genai.FunctionDeclaration format.
func ConvertToGenaiTool(toolboxTool *core.ToolboxTool) *genai.Tool {
inputschema, err := toolboxTool.InputSchema()
if err != nil {
return &genai.Tool{}
}
var paramsSchema *genai.Schema
_ = json.Unmarshal(inputschema, &paramsSchema)
// First, create the function declaration.
funcDeclaration := &genai.FunctionDeclaration{
Name: toolboxTool.Name(),
Description: toolboxTool.Description(),
Parameters: paramsSchema,
}
// Then, wrap the function declaration in a genai.Tool struct.
return &genai.Tool{
FunctionDeclarations: []*genai.FunctionDeclaration{funcDeclaration},
}
}
func printResponse(resp *genai.GenerateContentResponse) {
for _, cand := range resp.Candidates {
if cand.Content != nil {
for _, part := range cand.Content.Parts {
fmt.Println(part.Text)
}
}
}
}
const systemPrompt = `
You're a helpful hotel assistant. You handle hotel searching, booking, and
cancellations. When the user searches for a hotel, mention its name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
`
var queries = []string{
"Find hotels in Basel with Basel in its name.",
"Can you book the hotel Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it.",
"Please book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
}
func main() {
// Setup
ctx := context.Background()
apiKey := os.Getenv("GOOGLE_API_KEY")
toolboxURL := "http://localhost:5000"
// Initialize the Google GenAI client using the explicit ClientConfig.
client, err := genai.NewClient(ctx, &genai.ClientConfig{
APIKey: apiKey,
})
if err != nil {
log.Fatalf("Failed to create Google GenAI client: %v", err)
}
// Initialize the MCP Toolbox client.
toolboxClient, err := core.NewToolboxClient(toolboxURL)
if err != nil {
log.Fatalf("Failed to create Toolbox client: %v", err)
}
// Load the tool using the MCP Toolbox SDK.
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
if err != nil {
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
}
genAITools := make([]*genai.Tool, len(tools))
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
for i, tool := range tools {
genAITools[i] = ConvertToGenaiTool(tool)
toolsMap[tool.Name()] = tool
}
// Set up the generative model with the available tool.
modelName := "gemini-2.0-flash"
// Create the initial content prompt for the model.
messageHistory := []*genai.Content{
genai.NewContentFromText(systemPrompt, genai.RoleUser),
}
config := &genai.GenerateContentConfig{
Tools: genAITools,
ToolConfig: &genai.ToolConfig{
FunctionCallingConfig: &genai.FunctionCallingConfig{
Mode: genai.FunctionCallingConfigModeAny,
},
},
}
for _, query := range queries {
messageHistory = append(messageHistory, genai.NewContentFromText(query, genai.RoleUser))
genContentResp, err := client.Models.GenerateContent(ctx, modelName, messageHistory, config)
if err != nil {
log.Fatalf("LLM call failed for query '%s': %v", query, err)
}
if len(genContentResp.Candidates) > 0 && genContentResp.Candidates[0].Content != nil {
messageHistory = append(messageHistory, genContentResp.Candidates[0].Content)
}
functionCalls := genContentResp.FunctionCalls()
toolResponseParts := []*genai.Part{}
for _, fc := range functionCalls {
toolToInvoke, found := toolsMap[fc.Name]
if !found {
log.Fatalf("Tool '%s' not found in loaded tools map. Check toolset configuration.", fc.Name)
}
toolResult, invokeErr := toolToInvoke.Invoke(ctx, fc.Args)
if invokeErr != nil {
log.Fatalf("Failed to execute tool '%s': %v", fc.Name, invokeErr)
}
// Enhanced Tool Result Handling (retained to prevent nil issues)
toolResultString := ""
if toolResult != nil {
jsonBytes, marshalErr := json.Marshal(toolResult)
if marshalErr == nil {
toolResultString = string(jsonBytes)
} else {
toolResultString = fmt.Sprintf("%v", toolResult)
}
}
responseMap := map[string]any{"result": toolResultString}
toolResponseParts = append(toolResponseParts, genai.NewPartFromFunctionResponse(fc.Name, responseMap))
}
// Add all accumulated tool responses for this turn to the message history.
toolResponseContent := genai.NewContentFromParts(toolResponseParts, "function")
messageHistory = append(messageHistory, toolResponseContent)
finalResponse, err := client.Models.GenerateContent(ctx, modelName, messageHistory, &genai.GenerateContentConfig{})
if err != nil {
log.Fatalf("Error calling GenerateContent (with function result): %v", err)
}
printResponse(finalResponse)
// Add the final textual response from the LLM to the history
if len(finalResponse.Candidates) > 0 && finalResponse.Candidates[0].Content != nil {
messageHistory = append(messageHistory, finalResponse.Candidates[0].Content)
}
}
}
{{< /tab >}}
{{< tab header="OpenAI Go" lang="go" >}}
{{< include "quickstart/go/openAI/quickstart.go" >}}
package main
import (
"context"
"encoding/json"
"log"
"fmt
"github.com/googleapis/mcp-toolbox-sdk-go/core"
openai "github.com/openai/openai-go"
)
// ConvertToOpenAITool converts a ToolboxTool into the go-openai library's Tool format.
func ConvertToOpenAITool(toolboxTool *core.ToolboxTool) openai.ChatCompletionToolParam {
// Get the input schema
jsonSchemaBytes, err := toolboxTool.InputSchema()
if err != nil {
return openai.ChatCompletionToolParam{}
}
// Unmarshal the JSON bytes into FunctionParameters
var paramsSchema openai.FunctionParameters
if err := json.Unmarshal(jsonSchemaBytes, &paramsSchema); err != nil {
return openai.ChatCompletionToolParam{}
}
// Create and return the final tool parameter struct.
return openai.ChatCompletionToolParam{
Function: openai.FunctionDefinitionParam{
Name: toolboxTool.Name(),
Description: openai.String(toolboxTool.Description()),
Parameters: paramsSchema,
},
}
}
const systemPrompt = `
You're a helpful hotel assistant. You handle hotel searching, booking, and
cancellations. When the user searches for a hotel, mention its name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
`
var queries = []string{
"Find hotels in Basel with Basel in its name.",
"Can you book the hotel Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
}
func main() {
// Setup
ctx := context.Background()
toolboxURL := "http://localhost:5000"
openAIClient := openai.NewClient()
// Initialize the MCP Toolbox client.
toolboxClient, err := core.NewToolboxClient(toolboxURL)
if err != nil {
log.Fatalf("Failed to create Toolbox client: %v", err)
}
// Load the tools using the MCP Toolbox SDK.
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
if err != nil {
log.Fatalf("Failed to load tool : %v\nMake sure your Toolbox server is running and the tool is configured.", err)
}
openAITools := make([]openai.ChatCompletionToolParam, len(tools))
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
for i, tool := range tools {
// Convert the Toolbox tool into the openAI FunctionDeclaration format.
openAITools[i] = ConvertToOpenAITool(tool)
// Add tool to a map for lookup later
toolsMap[tool.Name()] = tool
}
params := openai.ChatCompletionNewParams{
Messages: []openai.ChatCompletionMessageParamUnion{
openai.SystemMessage(systemPrompt),
},
Tools: openAITools,
Seed: openai.Int(0),
Model: openai.ChatModelGPT4o,
}
for _, query := range queries {
params.Messages = append(params.Messages, openai.UserMessage(query))
// Make initial chat completion request
completion, err := openAIClient.Chat.Completions.New(ctx, params)
if err != nil {
panic(err)
}
toolCalls := completion.Choices[0].Message.ToolCalls
// Return early if there are no tool calls
if len(toolCalls) == 0 {
log.Println("No function call")
}
// If there was a function call, continue the conversation
params.Messages = append(params.Messages, completion.Choices[0].Message.ToParam())
for _, toolCall := range toolCalls {
toolName := toolCall.Function.Name
toolToInvoke := toolsMap[toolName]
var args map[string]any
err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args)
if err != nil {
panic(err)
}
result, err := toolToInvoke.Invoke(ctx, args)
if err != nil {
log.Fatal("Could not invoke tool", err)
}
params.Messages = append(params.Messages, openai.ToolMessage(result.(string), toolCall.ID))
}
completion, err = openAIClient.Chat.Completions.New(ctx, params)
if err != nil {
panic(err)
}
params.Messages = append(params.Messages, openai.AssistantMessage(query))
fmt.println("\n", completion.Choices[0].Message.Content)
}
}
{{< /tab >}}
{{< /tabpane >}}

View File

@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/$OS/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/$OS/toolbox
```
<!-- {x-release-please-end} -->

View File

@@ -1,37 +0,0 @@
module genai-quickstart
go 1.24.6
require (
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0
google.golang.org/genai v1.24.0
)
require (
cloud.google.com/go v0.121.1 // indirect
cloud.google.com/go/auth v0.16.2 // indirect
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
cloud.google.com/go/compute/metadata v0.7.0 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/google/go-cmp v0.7.0 // indirect
github.com/google/s2a-go v0.1.9 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
github.com/gorilla/websocket v1.5.3 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
go.opentelemetry.io/otel v1.36.0 // indirect
go.opentelemetry.io/otel/metric v1.36.0 // indirect
go.opentelemetry.io/otel/trace v1.36.0 // indirect
golang.org/x/crypto v0.39.0 // indirect
golang.org/x/net v0.41.0 // indirect
golang.org/x/oauth2 v0.30.0 // indirect
golang.org/x/sys v0.33.0 // indirect
golang.org/x/text v0.26.0 // indirect
google.golang.org/api v0.242.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
google.golang.org/grpc v1.73.0 // indirect
google.golang.org/protobuf v1.36.6 // indirect
)

View File

@@ -1,174 +0,0 @@
package main
import (
"context"
"encoding/json"
"fmt"
"log"
"os"
"github.com/googleapis/mcp-toolbox-sdk-go/core"
"google.golang.org/genai"
)
// ConvertToGenaiTool translates a ToolboxTool into the genai.FunctionDeclaration format.
func ConvertToGenaiTool(toolboxTool *core.ToolboxTool) *genai.Tool {
inputschema, err := toolboxTool.InputSchema()
if err != nil {
return &genai.Tool{}
}
var paramsSchema *genai.Schema
_ = json.Unmarshal(inputschema, &paramsSchema)
// First, create the function declaration.
funcDeclaration := &genai.FunctionDeclaration{
Name: toolboxTool.Name(),
Description: toolboxTool.Description(),
Parameters: paramsSchema,
}
// Then, wrap the function declaration in a genai.Tool struct.
return &genai.Tool{
FunctionDeclarations: []*genai.FunctionDeclaration{funcDeclaration},
}
}
func printResponse(resp *genai.GenerateContentResponse) {
for _, cand := range resp.Candidates {
if cand.Content != nil {
for _, part := range cand.Content.Parts {
fmt.Println(part.Text)
}
}
}
}
const systemPrompt = `
You're a helpful hotel assistant. You handle hotel searching, booking, and
cancellations. When the user searches for a hotel, mention its name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
`
var queries = []string{
"Find hotels in Basel with Basel in its name.",
"Can you book the hotel Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it.",
"Please book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
}
func main() {
// Setup
ctx := context.Background()
apiKey := os.Getenv("GOOGLE_API_KEY")
toolboxURL := "http://localhost:5000"
// Initialize the Google GenAI client using the explicit ClientConfig.
client, err := genai.NewClient(ctx, &genai.ClientConfig{
APIKey: apiKey,
})
if err != nil {
log.Fatalf("Failed to create Google GenAI client: %v", err)
}
// Initialize the MCP Toolbox client.
toolboxClient, err := core.NewToolboxClient(toolboxURL)
if err != nil {
log.Fatalf("Failed to create Toolbox client: %v", err)
}
// Load the tool using the MCP Toolbox SDK.
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
if err != nil {
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
}
genAITools := make([]*genai.Tool, len(tools))
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
for i, tool := range tools {
genAITools[i] = ConvertToGenaiTool(tool)
toolsMap[tool.Name()] = tool
}
// Set up the generative model with the available tool.
modelName := "gemini-2.0-flash"
// Create the initial content prompt for the model.
messageHistory := []*genai.Content{
genai.NewContentFromText(systemPrompt, genai.RoleUser),
}
config := &genai.GenerateContentConfig{
Tools: genAITools,
ToolConfig: &genai.ToolConfig{
FunctionCallingConfig: &genai.FunctionCallingConfig{
Mode: genai.FunctionCallingConfigModeAny,
},
},
}
for _, query := range queries {
messageHistory = append(messageHistory, genai.NewContentFromText(query, genai.RoleUser))
genContentResp, err := client.Models.GenerateContent(ctx, modelName, messageHistory, config)
if err != nil {
log.Fatalf("LLM call failed for query '%s': %v", query, err)
}
if len(genContentResp.Candidates) > 0 && genContentResp.Candidates[0].Content != nil {
messageHistory = append(messageHistory, genContentResp.Candidates[0].Content)
}
functionCalls := genContentResp.FunctionCalls()
toolResponseParts := []*genai.Part{}
for _, fc := range functionCalls {
toolToInvoke, found := toolsMap[fc.Name]
if !found {
log.Fatalf("Tool '%s' not found in loaded tools map. Check toolset configuration.", fc.Name)
}
toolResult, invokeErr := toolToInvoke.Invoke(ctx, fc.Args)
if invokeErr != nil {
log.Fatalf("Failed to execute tool '%s': %v", fc.Name, invokeErr)
}
// Enhanced Tool Result Handling (retained to prevent nil issues)
toolResultString := ""
if toolResult != nil {
jsonBytes, marshalErr := json.Marshal(toolResult)
if marshalErr == nil {
toolResultString = string(jsonBytes)
} else {
toolResultString = fmt.Sprintf("%v", toolResult)
}
}
responseMap := map[string]any{"result": toolResultString}
toolResponseParts = append(toolResponseParts, genai.NewPartFromFunctionResponse(fc.Name, responseMap))
}
// Add all accumulated tool responses for this turn to the message history.
toolResponseContent := genai.NewContentFromParts(toolResponseParts, "function")
messageHistory = append(messageHistory, toolResponseContent)
finalResponse, err := client.Models.GenerateContent(ctx, modelName, messageHistory, &genai.GenerateContentConfig{})
if err != nil {
log.Fatalf("Error calling GenerateContent (with function result): %v", err)
}
printResponse(finalResponse)
// Add the final textual response from the LLM to the history
if len(finalResponse.Candidates) > 0 && finalResponse.Candidates[0].Content != nil {
messageHistory = append(messageHistory, finalResponse.Candidates[0].Content)
}
}
}

View File

@@ -1,52 +0,0 @@
module genkit-quickstart
go 1.24.6
require (
github.com/firebase/genkit/go v0.6.2
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0
)
require (
cloud.google.com/go v0.121.1 // indirect
cloud.google.com/go/auth v0.16.2 // indirect
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
cloud.google.com/go/compute/metadata v0.7.0 // indirect
github.com/bahlo/generic-list-go v0.2.0 // indirect
github.com/buger/jsonparser v1.1.1 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/goccy/go-yaml v1.17.1 // indirect
github.com/google/dotprompt/go v0.0.0-20250611200215-bb73406b05ca // indirect
github.com/google/go-cmp v0.7.0 // indirect
github.com/google/s2a-go v0.1.9 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
github.com/gorilla/websocket v1.5.3 // indirect
github.com/invopop/jsonschema v0.13.0 // indirect
github.com/mailru/easyjson v0.9.0 // indirect
github.com/mbleigh/raymond v0.0.0-20250414171441-6b3a58ab9e0a // indirect
github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
github.com/xeipuuv/gojsonschema v1.2.0 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
go.opentelemetry.io/otel v1.36.0 // indirect
go.opentelemetry.io/otel/metric v1.36.0 // indirect
go.opentelemetry.io/otel/sdk v1.36.0 // indirect
go.opentelemetry.io/otel/trace v1.36.0 // indirect
golang.org/x/crypto v0.39.0 // indirect
golang.org/x/net v0.41.0 // indirect
golang.org/x/oauth2 v0.30.0 // indirect
golang.org/x/sys v0.33.0 // indirect
golang.org/x/text v0.26.0 // indirect
google.golang.org/api v0.242.0 // indirect
google.golang.org/genai v1.11.1 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
google.golang.org/grpc v1.73.0 // indirect
google.golang.org/protobuf v1.36.6 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

View File

@@ -1,129 +0,0 @@
package main
import (
"context"
"fmt"
"log"
"github.com/googleapis/mcp-toolbox-sdk-go/core"
"github.com/googleapis/mcp-toolbox-sdk-go/tbgenkit"
"github.com/firebase/genkit/go/ai"
"github.com/firebase/genkit/go/genkit"
"github.com/firebase/genkit/go/plugins/googlegenai"
)
const systemPrompt = `
You're a helpful hotel assistant. You handle hotel searching, booking, and
cancellations. When the user searches for a hotel, mention its name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
`
var queries = []string{
"Find hotels in Basel with Basel in its name.",
"Can you book the hotel Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
}
func main() {
ctx := context.Background()
// Create Toolbox Client
toolboxClient, err := core.NewToolboxClient("http://127.0.0.1:5000")
if err != nil {
log.Fatalf("Failed to create Toolbox client: %v", err)
}
// Load the tools using the MCP Toolbox SDK.
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
if err != nil {
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
}
// Initialize Genkit
g, err := genkit.Init(ctx,
genkit.WithPlugins(&googlegenai.GoogleAI{}),
genkit.WithDefaultModel("googleai/gemini-1.5-flash"),
)
if err != nil {
log.Fatalf("Failed to init genkit: %v\n", err)
}
// Create a conversation history
conversationHistory := []*ai.Message{
ai.NewSystemTextMessage(systemPrompt),
}
// Convert your tool to a Genkit tool.
genkitTools := make([]ai.Tool, len(tools))
for i, tool := range tools {
newTool, err := tbgenkit.ToGenkitTool(tool, g)
if err != nil {
log.Fatalf("Failed to convert tool: %v\n", err)
}
genkitTools[i] = newTool
}
toolRefs := make([]ai.ToolRef, len(genkitTools))
for i, tool := range genkitTools {
toolRefs[i] = tool
}
for _, query := range queries {
conversationHistory = append(conversationHistory, ai.NewUserTextMessage(query))
response, err := genkit.Generate(ctx, g,
ai.WithMessages(conversationHistory...),
ai.WithTools(toolRefs...),
ai.WithReturnToolRequests(true),
)
if err != nil {
log.Fatalf("%v\n", err)
}
conversationHistory = append(conversationHistory, response.Message)
parts := []*ai.Part{}
for _, req := range response.ToolRequests() {
tool := genkit.LookupTool(g, req.Name)
if tool == nil {
log.Fatalf("tool %q not found", req.Name)
}
output, err := tool.RunRaw(ctx, req.Input)
if err != nil {
log.Fatalf("tool %q execution failed: %v", tool.Name(), err)
}
parts = append(parts,
ai.NewToolResponsePart(&ai.ToolResponse{
Name: req.Name,
Ref: req.Ref,
Output: output,
}))
}
if len(parts) > 0 {
resp, err := genkit.Generate(ctx, g,
ai.WithMessages(append(response.History(), ai.NewMessage(ai.RoleTool, nil, parts...))...),
ai.WithTools(toolRefs...),
)
if err != nil {
log.Fatal(err)
}
fmt.Println("\n", resp.Text())
conversationHistory = append(conversationHistory, resp.Message)
} else {
fmt.Println("\n", response.Text())
}
}
}

View File

@@ -1,49 +0,0 @@
module langchan-quickstart
go 1.24.6
require (
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0
github.com/tmc/langchaingo v0.1.13
)
require (
cloud.google.com/go v0.121.1 // indirect
cloud.google.com/go/ai v0.7.0 // indirect
cloud.google.com/go/aiplatform v1.85.0 // indirect
cloud.google.com/go/auth v0.16.2 // indirect
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
cloud.google.com/go/compute/metadata v0.7.0 // indirect
cloud.google.com/go/iam v1.5.2 // indirect
cloud.google.com/go/longrunning v0.6.7 // indirect
cloud.google.com/go/vertexai v0.12.0 // indirect
github.com/dlclark/regexp2 v1.10.0 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/google/generative-ai-go v0.15.1 // indirect
github.com/google/s2a-go v0.1.9 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
github.com/pkoukk/tiktoken-go v0.1.6 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
go.opentelemetry.io/otel v1.36.0 // indirect
go.opentelemetry.io/otel/metric v1.36.0 // indirect
go.opentelemetry.io/otel/trace v1.36.0 // indirect
golang.org/x/crypto v0.39.0 // indirect
golang.org/x/net v0.41.0 // indirect
golang.org/x/oauth2 v0.30.0 // indirect
golang.org/x/sync v0.15.0 // indirect
golang.org/x/sys v0.33.0 // indirect
golang.org/x/text v0.26.0 // indirect
golang.org/x/time v0.12.0 // indirect
google.golang.org/api v0.242.0 // indirect
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
google.golang.org/grpc v1.73.0 // indirect
google.golang.org/protobuf v1.36.6 // indirect
)

View File

@@ -1,149 +0,0 @@
package main
import (
"context"
"encoding/json"
"fmt"
"log"
"os"
"github.com/googleapis/mcp-toolbox-sdk-go/core"
"github.com/tmc/langchaingo/llms"
"github.com/tmc/langchaingo/llms/googleai"
)
// ConvertToLangchainTool converts a generic core.ToolboxTool into a LangChainGo llms.Tool.
func ConvertToLangchainTool(toolboxTool *core.ToolboxTool) llms.Tool {
// Fetch the tool's input schema
inputschema, err := toolboxTool.InputSchema()
if err != nil {
return llms.Tool{}
}
var paramsSchema map[string]any
_ = json.Unmarshal(inputschema, &paramsSchema)
// Convert into LangChain's llms.Tool
return llms.Tool{
Type: "function",
Function: &llms.FunctionDefinition{
Name: toolboxTool.Name(),
Description: toolboxTool.Description(),
Parameters: paramsSchema,
},
}
}
const systemPrompt = `
You're a helpful hotel assistant. You handle hotel searching, booking, and
cancellations. When the user searches for a hotel, mention its name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
`
var queries = []string{
"Find hotels in Basel with Basel in its name.",
"Can you book the hotel Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it.",
"Please book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
}
func main() {
genaiKey := os.Getenv("GOOGLE_API_KEY")
toolboxURL := "http://localhost:5000"
ctx := context.Background()
// Initialize the Google AI client (LLM).
llm, err := googleai.New(ctx, googleai.WithAPIKey(genaiKey), googleai.WithDefaultModel("gemini-1.5-flash"))
if err != nil {
log.Fatalf("Failed to create Google AI client: %v", err)
}
// Initialize the MCP Toolbox client.
toolboxClient, err := core.NewToolboxClient(toolboxURL)
if err != nil {
log.Fatalf("Failed to create Toolbox client: %v", err)
}
// Load the tool using the MCP Toolbox SDK.
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
if err != nil {
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
}
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
langchainTools := make([]llms.Tool, len(tools))
// Convert the loaded ToolboxTools into the format LangChainGo requires.
for i, tool := range tools {
langchainTools[i] = ConvertToLangchainTool(tool)
toolsMap[tool.Name()] = tool
}
// Start the conversation history.
messageHistory := []llms.MessageContent{
llms.TextParts(llms.ChatMessageTypeSystem, systemPrompt),
}
for _, query := range queries {
messageHistory = append(messageHistory, llms.TextParts(llms.ChatMessageTypeHuman, query))
// Make the first call to the LLM, making it aware of the tool.
resp, err := llm.GenerateContent(ctx, messageHistory, llms.WithTools(langchainTools))
if err != nil {
log.Fatalf("LLM call failed: %v", err)
}
respChoice := resp.Choices[0]
assistantResponse := llms.TextParts(llms.ChatMessageTypeAI, respChoice.Content)
for _, tc := range respChoice.ToolCalls {
assistantResponse.Parts = append(assistantResponse.Parts, tc)
}
messageHistory = append(messageHistory, assistantResponse)
// Process each tool call requested by the model.
for _, tc := range respChoice.ToolCalls {
toolName := tc.FunctionCall.Name
tool := toolsMap[toolName]
var args map[string]any
if err := json.Unmarshal([]byte(tc.FunctionCall.Arguments), &args); err != nil {
log.Fatalf("Failed to unmarshal arguments for tool '%s': %v", toolName, err)
}
toolResult, err := tool.Invoke(ctx, args)
if err != nil {
log.Fatalf("Failed to execute tool '%s': %v", toolName, err)
}
if toolResult == "" || toolResult == nil {
toolResult = "Operation completed successfully with no specific return value."
}
// Create the tool call response message and add it to the history.
toolResponse := llms.MessageContent{
Role: llms.ChatMessageTypeTool,
Parts: []llms.ContentPart{
llms.ToolCallResponse{
Name: toolName,
Content: fmt.Sprintf("%v", toolResult),
},
},
}
messageHistory = append(messageHistory, toolResponse)
}
finalResp, err := llm.GenerateContent(ctx, messageHistory)
if err != nil {
log.Fatalf("Final LLM call failed after tool execution: %v", err)
}
// Add the final textual response from the LLM to the history
messageHistory = append(messageHistory, llms.TextParts(llms.ChatMessageTypeAI, finalResp.Choices[0].Content))
fmt.Println(finalResp.Choices[0].Content)
}
}

View File

@@ -1,38 +0,0 @@
module openai-quickstart
go 1.24.6
require (
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0
github.com/openai/openai-go v1.12.0
)
require (
cloud.google.com/go/auth v0.16.2 // indirect
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
cloud.google.com/go/compute/metadata v0.7.0 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/google/s2a-go v0.1.9 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
github.com/tidwall/gjson v1.14.4 // indirect
github.com/tidwall/match v1.1.1 // indirect
github.com/tidwall/pretty v1.2.1 // indirect
github.com/tidwall/sjson v1.2.5 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
go.opentelemetry.io/otel v1.36.0 // indirect
go.opentelemetry.io/otel/metric v1.36.0 // indirect
go.opentelemetry.io/otel/trace v1.36.0 // indirect
golang.org/x/crypto v0.39.0 // indirect
golang.org/x/net v0.41.0 // indirect
golang.org/x/oauth2 v0.30.0 // indirect
golang.org/x/sys v0.33.0 // indirect
golang.org/x/text v0.26.0 // indirect
google.golang.org/api v0.242.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
google.golang.org/grpc v1.73.0 // indirect
google.golang.org/protobuf v1.36.6 // indirect
)

View File

@@ -1,141 +0,0 @@
package main
import (
"context"
"encoding/json"
"fmt"
"log"
"github.com/googleapis/mcp-toolbox-sdk-go/core"
openai "github.com/openai/openai-go"
)
// ConvertToOpenAITool converts a ToolboxTool into the go-openai library's Tool format.
func ConvertToOpenAITool(toolboxTool *core.ToolboxTool) openai.ChatCompletionToolParam {
// Get the input schema
jsonSchemaBytes, err := toolboxTool.InputSchema()
if err != nil {
return openai.ChatCompletionToolParam{}
}
// Unmarshal the JSON bytes into FunctionParameters
var paramsSchema openai.FunctionParameters
if err := json.Unmarshal(jsonSchemaBytes, &paramsSchema); err != nil {
return openai.ChatCompletionToolParam{}
}
// Create and return the final tool parameter struct.
return openai.ChatCompletionToolParam{
Function: openai.FunctionDefinitionParam{
Name: toolboxTool.Name(),
Description: openai.String(toolboxTool.Description()),
Parameters: paramsSchema,
},
}
}
const systemPrompt = `
You're a helpful hotel assistant. You handle hotel searching, booking, and
cancellations. When the user searches for a hotel, mention its name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
`
var queries = []string{
"Find hotels in Basel with Basel in its name.",
"Can you book the hotel Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
}
func main() {
// Setup
ctx := context.Background()
toolboxURL := "http://localhost:5000"
openAIClient := openai.NewClient()
// Initialize the MCP Toolbox client.
toolboxClient, err := core.NewToolboxClient(toolboxURL)
if err != nil {
log.Fatalf("Failed to create Toolbox client: %v", err)
}
// Load the tools using the MCP Toolbox SDK.
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
if err != nil {
log.Fatalf("Failed to load tool : %v\nMake sure your Toolbox server is running and the tool is configured.", err)
}
openAITools := make([]openai.ChatCompletionToolParam, len(tools))
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
for i, tool := range tools {
// Convert the Toolbox tool into the openAI FunctionDeclaration format.
openAITools[i] = ConvertToOpenAITool(tool)
// Add tool to a map for lookup later
toolsMap[tool.Name()] = tool
}
params := openai.ChatCompletionNewParams{
Messages: []openai.ChatCompletionMessageParamUnion{
openai.SystemMessage(systemPrompt),
},
Tools: openAITools,
Seed: openai.Int(0),
Model: openai.ChatModelGPT4o,
}
for _, query := range queries {
params.Messages = append(params.Messages, openai.UserMessage(query))
// Make initial chat completion request
completion, err := openAIClient.Chat.Completions.New(ctx, params)
if err != nil {
panic(err)
}
toolCalls := completion.Choices[0].Message.ToolCalls
// Return early if there are no tool calls
if len(toolCalls) == 0 {
log.Println("No function call")
}
// If there was a function call, continue the conversation
params.Messages = append(params.Messages, completion.Choices[0].Message.ToParam())
for _, toolCall := range toolCalls {
toolName := toolCall.Function.Name
toolToInvoke := toolsMap[toolName]
var args map[string]any
err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args)
if err != nil {
panic(err)
}
result, err := toolToInvoke.Invoke(ctx, args)
if err != nil {
log.Fatal("Could not invoke tool", err)
}
params.Messages = append(params.Messages, openai.ToolMessage(result.(string), toolCall.ID))
}
completion, err = openAIClient.Chat.Completions.New(ctx, params)
if err != nil {
panic(err)
}
params.Messages = append(params.Messages, openai.AssistantMessage(query))
fmt.Println("\n", completion.Choices[0].Message.Content)
}
}

View File

@@ -3,7 +3,7 @@ import { ToolboxClient } from "@toolbox-sdk/core";
const TOOLBOX_URL = "http://127.0.0.1:5000"; // Update if needed
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || 'your-api-key'; // Replace it with your API key
const GOOGLE_API_KEY = 'enter your api here'; // Replace it with your API key
const prompt = `
You're a helpful hotel assistant. You handle hotel searching, booking, and

View File

@@ -2,7 +2,8 @@ import { ToolboxClient } from "@toolbox-sdk/core";
import { genkit } from "genkit";
import { googleAI } from '@genkit-ai/googleai';
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || 'your-api-key'; // Replace it with your API key
// Replace it with your API key
process.env.GOOGLE_API_KEY = 'your-api-key';
const systemPrompt = `
You're a helpful hotel assistant. You handle hotel searching, booking, and
@@ -27,7 +28,7 @@ async function main() {
const ai = genkit({
plugins: [
googleAI({
apiKey: process.env.GEMINI_API_KEY || GOOGLE_API_KEY
apiKey: process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY
})
],
model: googleAI.model('gemini-2.0-flash'),
@@ -85,4 +86,4 @@ async function main() {
}
}
main();
main();

View File

@@ -4,7 +4,8 @@ import { tool } from "@langchain/core/tools";
import { createReactAgent } from "@langchain/langgraph/prebuilt";
import { MemorySaver } from "@langchain/langgraph";
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || 'your-api-key'; // Replace it with your API key
// Replace it with your API key
process.env.GOOGLE_API_KEY = 'your-api-key';
const prompt = `
You're a helpful hotel assistant. You handle hotel searching, booking, and

View File

@@ -4,7 +4,7 @@ import { createMemory, staticBlock, tool } from "llamaindex";
import { ToolboxClient } from "@toolbox-sdk/core";
const TOOLBOX_URL = "http://127.0.0.1:5000"; // Update if needed
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || 'your-api-key'; // Replace it with your API key
process.env.GOOGLE_API_KEY = 'your-api-key'; // Replace it with your API key
const prompt = `
@@ -40,7 +40,7 @@ async function main() {
// Initialize LLM
const llm = gemini({
model: GEMINI_MODEL.GEMINI_2_0_FLASH,
apiKey: GOOGLE_API_KEY,
apiKey: process.env.GOOGLE_API_KEY,
});
const memory = createMemory({

View File

@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/$OS/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.12.0/$OS/toolbox
```
<!-- {x-release-please-end} -->

View File

@@ -48,19 +48,19 @@ to expose your developer assistant tools to a Looker instance:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -37,19 +37,19 @@ description: "Connect your IDE to SQL Server using Toolbox."
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -37,19 +37,19 @@ description: "Connect your IDE to MySQL using Toolbox."
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -1,281 +0,0 @@
---
title: Neo4j using MCP
type: docs
weight: 2
description: "Connect your IDE to Neo4j using Toolbox."
---
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like Neo4j. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a Neo4j instance:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
* [Visual Studio Code][vscode] (Copilot)
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
* [Gemini CLI][geminicli]
* [Gemini Code Assist][geminicodeassist]
[toolbox]: https://github.com/googleapis/genai-toolbox
[cursor]: #configure-your-mcp-client
[windsurf]: #configure-your-mcp-client
[vscode]: #configure-your-mcp-client
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
[geminicli]: #configure-your-mcp-client
[geminicodeassist]: #configure-your-mcp-client
## Set up the database
1. [Create or select a Neo4j instance.](https://neo4j.com/cloud/platform/aura-graph-database/)
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version v0.15.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Verify the installation:
```bash
./toolbox --version
```
## Configure your MCP Client
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"neo4j": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","neo4j","--stdio"],
"env": {
"NEO4J_URI": "",
"NEO4J_DATABASE": "",
"NEO4J_USERNAME": "",
"NEO4J_PASSWORD": ""
}
}
}
}
```
1. Restart Claude code to apply the new configuration.
{{% /tab %}}
{{% tab header="Claude desktop" lang="en" %}}
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"neo4j": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","neo4j","--stdio"],
"env": {
"NEO4J_URI": "",
"NEO4J_DATABASE": "",
"NEO4J_USERNAME": "",
"NEO4J_PASSWORD": ""
}
}
}
}
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"neo4j": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","neo4j","--stdio"],
"env": {
"NEO4J_URI": "",
"NEO4J_DATABASE": "",
"NEO4J_USERNAME": "",
"NEO4J_PASSWORD": ""
}
}
}
}
```
1. You should see a green active status after the server is successfully connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}
1. Create a `.cursor` directory in your project root if it doesn't exist.
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"neo4j": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","neo4j","--stdio"],
"env": {
"NEO4J_URI": "",
"NEO4J_DATABASE": "",
"NEO4J_USERNAME": "",
"NEO4J_PASSWORD": ""
}
}
}
}
```
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcp" : {
"servers": {
"neo4j": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","neo4j","--stdio"],
"env": {
"NEO4J_URI": "",
"NEO4J_DATABASE": "",
"NEO4J_USERNAME": "",
"NEO4J_PASSWORD": ""
}
}
}
}
}
```
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"neo4j": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","neo4j","--stdio"],
"env": {
"NEO4J_URI": "",
"NEO4J_DATABASE": "",
"NEO4J_USERNAME": "",
"NEO4J_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
```json
{
"mcpServers": {
"neo4j": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","neo4j","--stdio"],
"env": {
"NEO4J_URI": "",
"NEO4J_DATABASE": "",
"NEO4J_USERNAME": "",
"NEO4J_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
```json
{
"mcpServers": {
"neo4j": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","neo4j","--stdio"],
"env": {
"NEO4J_URI": "",
"NEO4J_DATABASE": "",
"NEO4J_USERNAME": "",
"NEO4J_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{< /tabpane >}}
## Use Tools
Your AI tool is now connected to Neo4j using MCP. Try asking your AI assistant to get the graph schema or execute Cypher statements.
The following tools are available to the LLM:
1. **get_schema**: extracts the complete database schema, including details about node labels, relationships, properties, constraints, and indexes.
1. **execute_cypher**: executes any arbitrary Cypher statement.
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}

View File

@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -1,255 +0,0 @@
---
title: SQLite using MCP
type: docs
weight: 2
description: "Connect your IDE to SQLite using Toolbox."
---
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like SQLite. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a SQLite instance:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
* [Visual Studio Code][vscode] (Copilot)
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
* [Gemini CLI][geminicli]
* [Gemini Code Assist][geminicodeassist]
[toolbox]: https://github.com/googleapis/genai-toolbox
[cursor]: #configure-your-mcp-client
[windsurf]: #configure-your-mcp-client
[vscode]: #configure-your-mcp-client
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
[geminicli]: #configure-your-mcp-client
[geminicodeassist]: #configure-your-mcp-client
## Set up the database
1. [Create or select a SQLite database file.](https://www.sqlite.org/download.html)
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version V0.10.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Verify the installation:
```bash
./toolbox --version
```
## Configure your MCP Client
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"sqlite": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt", "sqlite", "--stdio"],
"env": {
"SQLITE_DATABASE": "./sample.db"
}
}
}
}
```
1. Restart Claude code to apply the new configuration.
{{% /tab %}}
{{% tab header="Claude desktop" lang="en" %}}
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"sqlite": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt", "sqlite", "--stdio"],
"env": {
"SQLITE_DATABASE": "./sample.db"
}
}
}
}
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"sqlite": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt", "sqlite", "--stdio"],
"env": {
"SQLITE_DATABASE": "./sample.db"
}
}
}
}
```
1. You should see a green active status after the server is successfully connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}
1. Create a `.cursor` directory in your project root if it doesn't exist.
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"sqlite": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt", "sqlite", "--stdio"],
"env": {
"SQLITE_DATABASE": "./sample.db"
}
}
}
}
```
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"servers": {
"sqlite": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","sqlite","--stdio"],
"env": {
"SQLITE_DATABASE": "./sample.db"
}
}
}
}
```
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"sqlite": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","sqlite","--stdio"],
"env": {
"SQLITE_DATABASE": "./sample.db"
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
```json
{
"mcpServers": {
"sqlite": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","sqlite","--stdio"],
"env": {
"SQLITE_DATABASE": "./sample.db"
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
```json
{
"mcpServers": {
"sqlite": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","sqlite","--stdio"],
"env": {
"SQLITE_DATABASE": "./sample.db"
}
}
}
}
```
{{% /tab %}}
{{< /tabpane >}}
## Use Tools
Your AI tool is now connected to SQLite using MCP. Try asking your AI assistant to list tables, create a table, or define and execute other SQL statements.
The following tools are available to the LLM:
1. **list_tables**: lists tables and descriptions
1. **execute_sql**: execute any SQL statement
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}

View File

@@ -19,9 +19,10 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `ALLOYDB_POSTGRES_CLUSTER`: The ID of your AlloyDB cluster.
* `ALLOYDB_POSTGRES_INSTANCE`: The ID of your AlloyDB instance.
* `ALLOYDB_POSTGRES_DATABASE`: The name of the database to connect to.
* `ALLOYDB_POSTGRES_USER`: (Optional) The database username. Defaults to IAM authentication if unspecified.
* `ALLOYDB_POSTGRES_PASSWORD`: (Optional) The password for the database user. Defaults to IAM authentication if unspecified.
* `ALLOYDB_POSTGRES_IP_TYPE`: (Optional) The IP type i.e. "Public" or "Private" (Default: Public).
* `ALLOYDB_POSTGRES_USER`: The database username. Defaults to IAM authentication if unspecified.
* `ALLOYDB_POSTGRES_PASSWORD`: The password for the database user. Defaults to IAM authentication if unspecified.
* `ALLOYDB_POSTGRES_IP_TYPE`: The IP type i.e. "Public
or "Private" (Default: Public).
* **Permissions:**
* **AlloyDB Client** (`roles/alloydb.client`) to connect to the instance.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
@@ -50,14 +51,12 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `--prebuilt` value: `bigquery`
* **Environment Variables:**
* `BIGQUERY_PROJECT`: The GCP project ID.
* `BIGQUERY_LOCATION`: (Optional) The dataset location.
* **Permissions:**
* **BigQuery User** (`roles/bigquery.user`) to execute queries and view metadata.
* **BigQuery Metadata Viewer** (`roles/bigquery.metadataViewer`) to view all datasets.
* **BigQuery Data Editor** (`roles/bigquery.dataEditor`) to create or modify datasets and tables.
* **Gemini for Google Cloud** (`roles/cloudaicompanion.user`) to use the conversational analytics API.
* **Tools:**
* `analyze_contribution`: Use this tool to perform contribution analysis, also called key driver analysis.
* `ask_data_insights`: Use this tool to perform data analysis, get insights, or answer complex questions about the contents of specific BigQuery tables. For more information on required roles, API setup, and IAM configuration, see the setup and authentication section of the [Conversational Analytics API documentation](https://cloud.google.com/gemini/docs/conversational-analytics-api/overview).
* `execute_sql`: Executes a SQL statement.
* `forecast`: Use this tool to forecast time series data.
@@ -76,8 +75,6 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `CLOUD_SQL_MYSQL_DATABASE`: The name of the database to connect to.
* `CLOUD_SQL_MYSQL_USER`: The database username.
* `CLOUD_SQL_MYSQL_PASSWORD`: The password for the database user.
* `CLOUD_SQL_MYSQL_IP_TYPE`: The IP type i.e. "Public
or "Private" (Default: Public).
* **Permissions:**
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the instance.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
@@ -93,9 +90,8 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `CLOUD_SQL_POSTGRES_REGION`: The region of your Cloud SQL instance.
* `CLOUD_SQL_POSTGRES_INSTANCE`: The ID of your Cloud SQL instance.
* `CLOUD_SQL_POSTGRES_DATABASE`: The name of the database to connect to.
* `CLOUD_SQL_POSTGRES_USER`: (Optional) The database username. Defaults to IAM authentication if unspecified.
* `CLOUD_SQL_POSTGRES_PASSWORD`: (Optional) The password for the database user. Defaults to IAM authentication if unspecified.
* `CLOUD_SQL_POSTGRES_IP_TYPE`: (Optional) The IP type i.e. "Public" or "Private" (Default: Public).
* `CLOUD_SQL_POSTGRES_USER`: The database username.
* `CLOUD_SQL_POSTGRES_PASSWORD`: The password for the database user.
* **Permissions:**
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the instance.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
@@ -114,7 +110,6 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `CLOUD_SQL_MSSQL_IP_ADDRESS`: The IP address of the Cloud SQL instance.
* `CLOUD_SQL_MSSQL_USER`: The database username.
* `CLOUD_SQL_MSSQL_PASSWORD`: The password for the database user.
* `CLOUD_SQL_MSSQL_IP_TYPE`: (Optional) The IP type i.e. "Public" or "Private" (Default: Public).
* **Permissions:**
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the instance.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
@@ -140,7 +135,7 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `--prebuilt` value: `firestore`
* **Environment Variables:**
* `FIRESTORE_PROJECT`: The GCP project ID.
* `FIRESTORE_DATABASE`: (Optional) The Firestore database ID. Defaults to "(default)".
* `FIRESTORE_DATABASE`: The Firestore database ID.
* **Permissions:**
* **Cloud Datastore User** (`roles/datastore.user`) to get documents, list collections, and query collections.
* **Firebase Rules Viewer** (`roles/firebaserules.viewer`) to get and validate Firestore rules.
@@ -233,7 +228,6 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `POSTGRES_DATABASE`: The name of the database to connect to.
* `POSTGRES_USER`: The database username.
* `POSTGRES_PASSWORD`: The password for the database user.
* `POSTGRES_QUERY_PARAMS`: (Optional) Raw query to be added to the db connection string.
* **Permissions:**
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
* **Tools:**
@@ -269,28 +263,3 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `execute_sql`: Executes a DML SQL query using the PostgreSQL interface for Spanner.
* `execute_sql_dql`: Executes a DQL SQL query using the PostgreSQL interface for Spanner.
* `list_tables`: Lists tables in the database.
## SQLite
* `--prebuilt` value: `sqlite`
* **Environment Variables:**
* `SQLITE_DATABASE`: The path to the SQLite database file (e.g., `./sample.db`).
* **Permissions:**
* File system read/write permissions for the specified database file.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
## Neo4j
* `--prebuilt` value: `neo4j`
* **Environment Variables:**
* `NEO4J_URI`: The URI of the Neo4j instance (e.g., `bolt://localhost:7687`).
* `NEO4J_DATABASE`: The name of the Neo4j database to connect to.
* `NEO4J_USERNAME`: The username for the Neo4j instance.
* `NEO4J_PASSWORD`: The password for the Neo4j instance.
* **Permissions:**
* **Database-level permissions** are required to execute Cypher queries.
* **Tools:**
* `execute_cypher`: Executes a Cypher query.
* `get_schema`: Retrieves the schema of the Neo4j database.

View File

@@ -28,7 +28,7 @@ The following configurations are placed at the top level of a `tools.yaml` file.
{{< notice tip >}}
If you are accessing Toolbox with multiple applications, each
application should register their own Client ID even if they use the same
"kind" of auth provider.
"type" of auth provider.
{{< /notice >}}
```yaml
@@ -300,4 +300,4 @@ func main() {
}
```
## Kinds of Auth Services
## Types of Auth Services

View File

@@ -55,5 +55,5 @@ instead of hardcoding your secrets into the configuration file.
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|------------------------------------------------------------------|
| kind | string | true | Must be "google". |
| type | string | true | Must be "google". |
| clientId | string | true | Client ID of your application from registering your application. |

View File

@@ -1,36 +0,0 @@
---
title: "AlloyDB Admin"
linkTitle: "AlloyDB Admin"
type: docs
weight: 2
description: >
The "alloydb-admin" source provides a client for the AlloyDB API.
aliases:
- /resources/sources/alloydb-admin
---
## About
The `alloydb-admin` source provides a client to interact with the [Google AlloyDB API](https://cloud.google.com/alloydb/docs/reference/rest). This allows tools to perform administrative tasks on AlloyDB resources, such as managing clusters, instances, and users.
Authentication can be handled in two ways:
1. **Application Default Credentials (ADC):** By default, the source uses ADC to authenticate with the API.
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will expect an OAuth 2.0 access token to be provided by the client (e.g., a web browser) for each request.
## Example
```yaml
sources:
my-alloydb-admin:
kind: alloy-admin
my-oauth-alloydb-admin:
kind: alloydb-admin
useClientOAuth: true
```
## Reference
| **field** | **type** | **required** | **description** |
|----------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "alloydb-admin". |
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |

View File

@@ -33,9 +33,6 @@ cluster][alloydb-free-trial].
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
Run parameterized SQL statements in AlloyDB Postgres.
- [`postgres-list-tables`](../tools/postgres/postgres-list-tables.md)
List tables in an AlloyDB for PostgreSQL database.
### Pre-built Configurations
- [AlloyDB using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/alloydb_pg_mcp/)
@@ -135,7 +132,7 @@ instead of hardcoding your secrets into the configuration file.
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "alloydb-postgres". |
| type | string | true | Must be "alloydb-postgres". |
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
| region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). |
| cluster | string | true | Name of the AlloyDB cluster (e.g. "my-cluster"). |

View File

@@ -36,9 +36,6 @@ avoiding full table scans or complex filters.
## Available Tools
- [`bigquery-analyze-contribution`](../tools/bigquery/bigquery-analyze-contribution.md)
Performs contribution analysis, also called key driver analysis in BigQuery.
- [`bigquery-conversational-analytics`](../tools/bigquery/bigquery-conversational-analytics.md)
Allows conversational interaction with a BigQuery source.
@@ -94,11 +91,15 @@ allows Toolbox to make queries to [BigQuery][bigquery-docs] on behalf of the
client or the end-user.
When using this on-behalf-of authentication, you must ensure that the
identity used has been granted the correct IAM permissions.
identity used has been granted the correct IAM permissions. Currently,
this option is only supported by the following BigQuery tools:
[iam-overview]: <https://cloud.google.com/bigquery/docs/access-control>
[adc]: <https://cloud.google.com/docs/authentication#adc>
[set-adc]: <https://cloud.google.com/docs/authentication/provide-credentials-adc>
- [`bigquery-sql`](../tools/bigquery/bigquery-sql.md)
Run SQL queries directly against BigQuery datasets.
[iam-overview]: https://cloud.google.com/bigquery/docs/access-control
[adc]: https://cloud.google.com/docs/authentication#adc
[set-adc]: https://cloud.google.com/docs/authentication/provide-credentials-adc
## Example
@@ -110,9 +111,6 @@ sources:
kind: "bigquery"
project: "my-project-id"
# location: "US" # Optional: Specifies the location for query jobs.
# allowedDatasets: # Optional: Restricts tool access to a specific list of datasets.
# - "my_dataset_1"
# - "other_project.my_dataset_2"
```
Initialize a BigQuery source that uses the client's access token:
@@ -124,17 +122,13 @@ sources:
project: "my-project-id"
useClientOAuth: true
# location: "US" # Optional: Specifies the location for query jobs.
# allowedDatasets: # Optional: Restricts tool access to a specific list of datasets.
# - "my_dataset_1"
# - "other_project.my_dataset_2"
```
## Reference
| **field** | **type** | **required** | **description** |
|-----------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "bigquery". |
| project | string | true | Id of the Google Cloud project to use for billing and as the default project for BigQuery resources. |
| location | string | false | Specifies the location (e.g., 'us', 'asia-northeast1') in which to run the query job. This location must match the location of any tables referenced in the query. Defaults to the table's location or 'US' if the location cannot be determined. [Learn More](https://cloud.google.com/bigquery/docs/locations) |
| allowedDatasets | []string | false | An optional list of dataset IDs that tools using this source are allowed to access. If provided, any tool operation attempting to access a dataset not in this list will be rejected. To enforce this, two types of operations are also disallowed: 1) Dataset-level operations (e.g., `CREATE SCHEMA`), and 2) operations where table access cannot be statically analyzed (e.g., `EXECUTE IMMEDIATE`, `CREATE PROCEDURE`). If a single dataset is provided, it will be treated as the default for prebuilt tools. |
| useClientOAuth | bool | false | If true, forwards the client's OAuth access token from the "Authorization" header to downstream queries. |
| **field** | **type** | **required** | **description** |
|----------------|:--------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| type | string | true | Must be "bigquery". |
| project | string | true | Id of the Google Cloud project to use for billing and as the default project for BigQuery resources. |
| location | string | false | Specifies the location (e.g., 'us', 'asia-northeast1') in which to run the query job. This location must match the location of any tables referenced in the query. Defaults to the table's location or 'US' if the location cannot be determined. [Learn More](https://cloud.google.com/bigquery/docs/locations) |
| useClientOAuth | bool | false | If true, forwards the client's OAuth access token from the "Authorization" header to downstream queries. |

View File

@@ -70,6 +70,6 @@ sources:
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|-------------------------------------------------------------------------------|
| kind | string | true | Must be "bigtable". |
| type | string | true | Must be "bigtable". |
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
| instance | string | true | Name of the Bigtable instance. |

View File

@@ -81,7 +81,7 @@ instead of hardcoding your secrets into the configuration file.
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|------------------------------------------------------------------------------------|
| kind | string | true | Must be "clickhouse". |
| type | string | true | Must be "clickhouse". |
| host | string | true | IP address or hostname to connect to (e.g. "127.0.0.1" or "clickhouse.example.com") |
| port | string | true | Port to connect to (e.g. "8443" for HTTPS, "8123" for HTTP) |
| database | string | true | Name of the ClickHouse database to connect to (e.g. "my_database"). |

View File

@@ -1,36 +0,0 @@
---
title: "Cloud Monitoring"
type: docs
weight: 1
description: >
A "cloud-monitoring" source provides a client for the Cloud Monitoring API.
aliases:
- /resources/sources/cloud-monitoring
---
## About
The `cloud-monitoring` source provides a client to interact with the [Google Cloud Monitoring API](https://cloud.google.com/monitoring/api). This allows tools to access cloud monitoring metrics explorer and run promql queries.
Authentication can be handled in two ways:
1. **Application Default Credentials (ADC):** By default, the source uses ADC to authenticate with the API.
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will expect an OAuth 2.0 access token to be provided by the client (e.g., a web browser) for each request.
## Example
```yaml
sources:
my-cloud-monitoring:
kind: cloud-monitoring
my-oauth-cloud-monitoring:
kind: cloud-monitoring
useClientOAuth: true
```
## Reference
| **field** | **type** | **required** | **description** |
|----------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "cloud-monitoring". |
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |

View File

@@ -1,36 +0,0 @@
---
title: "Cloud SQL Admin"
type: docs
weight: 1
description: >
A "cloud-sql-admin" source provides a client for the Cloud SQL Admin API.
aliases:
- /resources/sources/cloud-sql-admin
---
## About
The `cloud-sql-admin` source provides a client to interact with the [Google Cloud SQL Admin API](https://cloud.google.com/sql/docs/mysql/admin-api/v1). This allows tools to perform administrative tasks on Cloud SQL instances, such as creating users and databases.
Authentication can be handled in two ways:
1. **Application Default Credentials (ADC):** By default, the source uses ADC to authenticate with the API.
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will expect an OAuth 2.0 access token to be provided by the client (e.g., a web browser) for each request.
## Example
```yaml
sources:
my-cloud-sql-admin:
kind: cloud-sql-admin
my-oauth-cloud-sql-admin:
kind: cloud-sql-admin
useClientOAuth: true
```
## Reference
| **field** | **type** | **required** | **description** |
|----------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "cloud-sql-admin". |
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |

View File

@@ -106,7 +106,7 @@ instead of hardcoding your secrets into the configuration file.
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|---------------------------------------------------------------------------------------------|
| kind | string | true | Must be "cloud-sql-mssql". |
| type | string | true | Must be "cloud-sql-mssql". |
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
| region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). |
| instance | string | true | Name of the Cloud SQL instance within the cluster (e.g. "my-instance"). |

View File

@@ -28,9 +28,6 @@ to a database by following these instructions][csql-mysql-quickstart].
- [`mysql-execute-sql`](../tools/mysql/mysql-execute-sql.md)
Run parameterized SQL queries in Cloud SQL for MySQL.
- [`mysql-list-tables`](../tools/mysql/mysql-list-tables.md)
List tables in a Cloud SQL for MySQL database.
### Pre-built Configurations
- [Cloud SQL for MySQL using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_mysql_mcp/)
@@ -109,7 +106,7 @@ instead of hardcoding your secrets into the configuration file.
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|---------------------------------------------------------------------------------------------|
| kind | string | true | Must be "cloud-sql-mysql". |
| type | string | true | Must be "cloud-sql-mysql". |
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
| region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). |
| instance | string | true | Name of the Cloud SQL instance within the cluster (e.g. "my-instance"). |

View File

@@ -28,9 +28,6 @@ to a database by following these instructions][csql-pg-quickstart].
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
Run parameterized SQL statements in PostgreSQL.
- [`postgres-list-tables`](../tools/postgres/postgres-list-tables.md)
List tables in a PostgreSQL database.
### Pre-built Configurations
- [Cloud SQL for Postgres using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_pg_mcp/)
@@ -134,7 +131,7 @@ instead of hardcoding your secrets into the configuration file.
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "cloud-sql-postgres". |
| type | string | true | Must be "cloud-sql-postgres". |
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
| region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). |
| instance | string | true | Name of the Cloud SQL instance within the cluster (e.g. "my-instance"). |

View File

@@ -37,7 +37,7 @@ For more details about alternate addresses and custom ports refer to [Managing C
| **field** | **type** | **required** | **description** |
|----------------------|:--------:|:------------:|---------------------------------------------------------|
| kind | string | true | Must be "couchbase". |
| type | string | true | Must be "couchbase". |
| connectionString | string | true | Connection string for the Couchbase cluster. |
| bucket | string | true | Name of the bucket to connect to. |
| scope | string | true | Name of the scope within the bucket. |

View File

@@ -320,5 +320,5 @@ Logical operators are case-sensitive. `OR` and `AND` are acceptable whereas `or`
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|----------------------------------------------------------------------------------|
| kind | string | true | Must be "dataplex". |
| type | string | true | Must be "dataplex". |
| project | string | true | ID of the GCP project used for quota and billing purposes (e.g. "my-project-id").|

View File

@@ -59,7 +59,7 @@ instead of hardcoding your secrets into the configuration file.
| **Field** | **Type** | **Required** | **Description** |
|-------------|:--------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "dgraph". |
| type | string | true | Must be "dgraph". |
| dgraphUrl | string | true | Connection URI (e.g. "<https://xxx.cloud.dgraph.io>", "<https://localhost:8080>"). |
| user | string | false | Name of the Dgraph user to connect as (e.g., "groot"). |
| password | string | false | Password of the Dgraph user (e.g., "password"). |

View File

@@ -51,7 +51,7 @@ instead of hardcoding your secrets into the configuration file.
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|------------------------------------------------------------------------|
| kind | string | true | Must be "firebird". |
| type | string | true | Must be "firebird". |
| host | string | true | IP address to connect to (e.g. "127.0.0.1") |
| port | string | true | Port to connect to (e.g. "3050") |
| database | string | true | Path to the Firebird database file (e.g. "/var/lib/firebird/data/test.fdb"). |

View File

@@ -72,6 +72,6 @@ sources:
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|----------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "firestore". |
| type | string | true | Must be "firestore". |
| project | string | true | Id of the GCP project that contains the Firestore database (e.g. "my-project-id"). |
| database | string | false | Name of the Firestore database to connect to. Defaults to "(default)" if not specified. |

View File

@@ -44,7 +44,7 @@ instead of hardcoding your secrets into the configuration file.
| **field** | **type** | **required** | **description** |
|------------------------|:-----------------:|:------------:|------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "http". |
| type | string | true | Must be "http". |
| baseUrl | string | true | The base URL for the HTTP requests (e.g., `https://api.example.com`). |
| timeout | string | false | The timeout for HTTP requests (e.g., "5s", "1m", refer to [ParseDuration][parse-duration-doc] for more examples). Defaults to 30s. |
| headers | map[string]string | false | Default headers to include in the HTTP requests. |

View File

@@ -59,7 +59,7 @@ instead of hardcoding your secrets into the configuration file.
| **field** | **type** | **required** | **description** |
| -------------------- | :------: | :----------: | ----------------------------------------------------------------------------------------- |
| kind | string | true | Must be "looker". |
| type | string | true | Must be "looker". |
| base_url | string | true | The URL of your Looker server with no trailing /). |
| client_id | string | false | The client id assigned by Looker. |
| client_secret | string | false | The client secret assigned by Looker. |

View File

@@ -28,5 +28,5 @@ sources:
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|-------------------------------------------------------------------|
| kind | string | true | Must be "mongodb". |
| type | string | true | Must be "mongodb". |
| uri | string | true | connection string to connect to MongoDB |

View File

@@ -55,7 +55,7 @@ instead of hardcoding your secrets into the configuration file.
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "mssql". |
| type | string | true | Must be "mssql". |
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
| port | string | true | Port to connect to (e.g. "1433"). |
| database | string | true | Name of the SQL Server database to connect to (e.g. "my_db"). |

View File

@@ -22,9 +22,6 @@ reliability, performance, and ease of use.
- [`mysql-execute-sql`](../tools/mysql/mysql-execute-sql.md)
Run parameterized SQL queries in MySQL.
- [`mysql-list-tables`](../tools/mysql/mysql-list-tables.md)
List tables in a MySQL database.
## Requirements
### Database User
@@ -60,7 +57,7 @@ instead of hardcoding your secrets into the configuration file.
| **field** | **type** | **required** | **description** |
| ------------ | :------: | :----------: | ----------------------------------------------------------------------------------------------- |
| kind | string | true | Must be "mysql". |
| type | string | true | Must be "mysql". |
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
| port | string | true | Port to connect to (e.g. "3306"). |
| database | string | true | Name of the MySQL database to connect to (e.g. "my_db"). |

View File

@@ -51,7 +51,7 @@ instead of hardcoding your secrets into the configuration file.
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|----------------------------------------------------------------------|
| kind | string | true | Must be "neo4j". |
| type | string | true | Must be "neo4j". |
| uri | string | true | Connect URI ("bolt://localhost", "neo4j+s://xxx.databases.neo4j.io") |
| user | string | true | Name of the Neo4j user to connect as (e.g. "neo4j"). |
| password | string | true | Password of the Neo4j user (e.g. "my-password"). |

View File

@@ -45,7 +45,7 @@ instead of hardcoding your secrets into the configuration file.
| **field** | **type** | **required** | **description** |
| ------------ | :------: | :----------: |-------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "oceanbase". |
| type | string | true | Must be "oceanbase". |
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
| port | string | true | Port to connect to (e.g. "2881"). |
| database | string | true | Name of the OceanBase database to connect to (e.g. "my_db"). |

View File

@@ -23,9 +23,6 @@ reputation for reliability, feature robustness, and performance.
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
Run parameterized SQL statements in PostgreSQL.
- [`postgres-list-tables`](../tools/postgres/postgres-list-tables.md)
List tables in a PostgreSQL database.
### Pre-built Configurations
- [PostgreSQL using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/postgres_mcp/)
@@ -62,7 +59,7 @@ instead of hardcoding your secrets into the configuration file.
| **field** | **type** | **required** | **description** |
|-------------|:------------------:|:------------:|------------------------------------------------------------------------|
| kind | string | true | Must be "postgres". |
| type | string | true | Must be "postgres". |
| host | string | true | IP address to connect to (e.g. "127.0.0.1") |
| port | string | true | Port to connect to (e.g. "5432") |
| database | string | true | Name of the Postgres database to connect to (e.g. "my_db"). |

View File

@@ -91,7 +91,7 @@ sources:
| **field** | **type** | **required** | **description** |
|----------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "memorystore-redis". |
| type | string | true | Must be "memorystore-redis". |
| address | string | true | Primary endpoint for the Memorystore Redis instance to connect to. |
| username | string | false | If you are using a non-default user, specify the user name here. If you are using Memorystore for Redis, leave this field blank |
| password | string | false | If you have [Redis AUTH][auth] enabled, specify the AUTH string here |

View File

@@ -71,7 +71,7 @@ sources:
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "spanner". |
| type | string | true | Must be "spanner". |
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
| instance | string | true | Name of the Spanner instance. |
| database | string | true | Name of the database on the Spanner instance |

View File

@@ -26,14 +26,6 @@ SQLite has the following notable characteristics:
- [`sqlite-sql`](../tools/sqlite/sqlite-sql.md)
Run SQL queries against a local SQLite database.
- [`sqlite-execute-sql`](../tools/sqlite/sqlite-execute-sql.md)
Run parameterized SQL statements in SQlite.
### Pre-built Configurations
- [SQLite using MCP](../../how-to/connect-ide/sqlite_mcp.md)
Connect your IDE to SQlite using Toolbox.
## Requirements
@@ -69,7 +61,7 @@ sources:
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "sqlite". |
| type | string | true | Must be "sqlite". |
| database | string | true | Path to SQLite database file, or ":memory:" for an in-memory database. |
### Connection Properties

View File

@@ -72,7 +72,7 @@ instead of hardcoding your secrets into the configuration file.
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------|
| kind | string | true | Must be "tidb". |
| type | string | true | Must be "tidb". |
| host | string | true | IP address or hostname to connect to (e.g. "127.0.0.1" or "gateway01.*.tidbcloud.com"). |
| port | string | true | Port to connect to (typically "4000" for TiDB). |
| database | string | true | Name of the TiDB database to connect to (e.g. "my_db"). |

View File

@@ -49,7 +49,7 @@ instead of hardcoding your secrets into the configuration file.
| **field** | **type** | **required** | **description** |
|-------------|:------------------:|:------------:|------------------------------------------------------------------------|
| kind | string | true | Must be "trino". |
| type | string | true | Must be "trino". |
| host | string | true | Trino coordinator hostname (e.g. "trino.example.com") |
| port | string | true | Trino coordinator port (e.g. "8080", "8443") |
| user | string | false | Username for authentication (e.g. "analyst"). Optional for anonymous access. |

View File

@@ -65,7 +65,7 @@ sources:
| **field** | **type** | **required** | **description** |
|--------------|:--------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "valkey". |
| type | string | true | Must be "valkey". |
| address | []string | true | Endpoints for the Valkey instance to connect to. |
| username | string | false | If you are using a non-default user, specify the user name here. If you are using Memorystore for Valkey, leave this field blank |
| password | string | false | Password for the Valkey instance |

View File

@@ -1,44 +0,0 @@
---
title: "YugabyteDB"
type: docs
weight: 1
description: >
YugabyteDB is a high-performance, distributed SQL database.
---
## About
[YugabyteDB][yugabytedb] is a high-performance, distributed SQL database designed for global, internet-scale applications, with full PostgreSQL compatibility.
[yugabytedb]: https://www.yugabyte.com/
## Example
```yaml
sources:
my-yb-source:
kind: yugabytedb
host: 127.0.0.1
port: 5433
database: yugabyte
user: ${USER_NAME}
password: ${PASSWORD}
loadBalance: true
topologyKeys: cloud.region.zone1:1,cloud.region.zone2:2
```
## Reference
| **field** | **type** | **required** | **description** |
|-----------------------------------|:--------:|:------------:|------------------------------------------------------------------------|
| kind | string | true | Must be "yugabytedb". |
| host | string | true | IP address to connect to. |
| port | integer | true | Port to connect to. The default port is 5433. |
| database | string | true | Name of the YugabyteDB database to connect to. The default database name is yugabyte. |
| user | string | true | Name of the YugabyteDB user to connect as. The default user is yugabyte. |
| password | string | true | Password of the YugabyteDB user. The default password is yugabyte. |
| loadBalance | boolean | false | If true, enable uniform load balancing. The default loadBalance value is false. |
| topologyKeys | string | false | Comma-separated geo-locations in the form cloud.region.zone:priority to enable topology-aware load balancing. Ignored if loadBalance is false. It is null by default. |
| ybServersRefreshInterval | integer | false | The interval (in seconds) to refresh the servers list; ignored if loadBalance is false. The default value of ybServersRefreshInterval is 300. |
| fallbackToTopologyKeysOnly | boolean | false | If set to true and topologyKeys are specified, only connect to nodes specified in topologyKeys. By defualt, this is set to false. |
| failedHostReconnectDelaySecs | integer | false | Time (in seconds) to wait before trying to connect to failed nodes. The default value of is 5. |

View File

@@ -259,4 +259,4 @@ tools:
- other-auth-service
```
## Kinds of tools
## Types of tools

View File

@@ -1,7 +0,0 @@
---
title: "AlloyDB for PostgreSQL"
type: docs
weight: 1
description: >
Tools for AlloyDB for PostgreSQL.
---

View File

@@ -1,37 +0,0 @@
---
title: "alloydb-get-cluster"
type: docs
weight: 1
description: >
The "alloydb-get-cluster" tool retrieves details for a specific AlloyDB cluster.
aliases:
- /resources/tools/alloydb-get-cluster
---
## About
The `alloydb-get-cluster` tool retrieves detailed information for a single, specified AlloyDB cluster. It is compatible with [alloydb-admin](../../sources/alloydb-admin.md) source.
| Parameter | Type | Description | Required |
| :--------- | :----- | :--------------------------------------------------------------------------------------- | :------- |
| `projectId` | string | The GCP project ID to get cluster for. | Yes |
| `locationId` | string | The location of the cluster (e.g., 'us-central1'). | Yes |
| `clusterId` | string | The ID of the cluster to retrieve. | Yes |
> **Note**
> This tool authenticates using the credentials configured in its [alloydb-admin](../../sources/alloydb-admin.md) source which can be either [Application Default Credentials](https://cloud.google.com/docs/authentication/application-default-credentials) or client-side OAuth.
## Example
```yaml
tools:
get_specific_cluster:
kind: alloydb-get-cluster
source: my-alloydb-admin-source
description: Use this tool to retrieve details for a specific AlloyDB cluster.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be alloydb-get-cluster. | |
| source | string | true | The name of an `alloydb-admin` source. |
| description | string | true | Description of the tool that is passed to the agent. |

View File

@@ -1,38 +0,0 @@
---
title: "alloydb-get-instance"
type: docs
weight: 1
description: >
The "alloydb-get-instance" tool retrieves details for a specific AlloyDB instance.
aliases:
- /resources/tools/alloydb-get-instance
---
## About
The `alloydb-get-instance` tool retrieves detailed information for a single, specified AlloyDB instance. It is compatible with [alloydb-admin](../../sources/alloydb-admin.md) source.
| Parameter | Type | Description | Required |
| :--------- | :----- | :--------------------------------------------------------------------------------------- | :------- |
| `project` | string | The GCP project ID to get instance for. | Yes |
| `location` | string | The location of the instance (e.g., 'us-central1'). | Yes |
| `cluster` | string | The ID of the cluster. | Yes |
| `instance` | string | The ID of the instance to retrieve. | Yes |
> **Note**
> This tool authenticates using the credentials configured in its [alloydb-admin](../../sources/alloydb-admin.md) source which can be either [Application Default Credentials](https://cloud.google.com/docs/authentication/application-default-credentials) or client-side OAuth.
## Example
```yaml
tools:
get_specific_instance:
kind: alloydb-get-instance
source: my-alloydb-admin-source
description: Use this tool to retrieve details for a specific AlloyDB instance.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be alloydb-get-instance. | |
| source | string | true | The name of an `alloydb-admin` source. |
| description | string | true | Description of the tool that is passed to the agent. |

View File

@@ -1,38 +0,0 @@
---
title: "alloydb-get-user"
type: docs
weight: 1
description: >
The "alloydb-get-user" tool retrieves details for a specific AlloyDB user.
aliases:
- /resources/tools/alloydb-get-user
---
## About
The `alloydb-get-user` tool retrieves detailed information for a single, specified AlloyDB user. It is compatible with [alloydb-admin](../../sources/alloydb-admin.md) source.
| Parameter | Type | Description | Required |
| :--------- | :----- | :--------------------------------------------------------------------------------------- | :------- |
| `project` | string | The GCP project ID to get user for. | Yes |
| `location` | string | The location of the cluster (e.g., 'us-central1'). | Yes |
| `cluster` | string | The ID of the cluster to retrieve the user from. | Yes |
| `user` | string | The ID of the user to retrieve. | Yes |
> **Note**
> This tool authenticates using the credentials configured in its [alloydb-admin](../../sources/alloydb-admin.md) source which can be either [Application Default Credentials](https://cloud.google.com/docs/authentication/application-default-credentials) or client-side OAuth.
## Example
```yaml
tools:
get_specific_user:
kind: alloydb-get-user
source: my-alloydb-admin-source
description: Use this tool to retrieve details for a specific AlloyDB user.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be alloydb-get-user. | |
| source | string | true | The name of an `alloydb-admin` source. |
| description | string | true | Description of the tool that is passed to the agent. |

View File

@@ -1,38 +0,0 @@
---
title: "alloydb-list-clusters"
type: docs
weight: 1
description: >
The "alloydb-list-clusters" tool lists the AlloyDB clusters in a given project and location.
aliases:
- /resources/tools/alloydb-list-clusters
---
## About
The `alloydb-list-clusters` tool retrieves AlloyDB cluster information for all or specified locations in a given project. It is compatible with [alloydb-admin](../../sources/alloydb-admin.md) source.
`alloydb-list-clusters` tool lists the detailed information of AlloyDB cluster(cluster name, state, configuration, etc) for a given project and location. The tool takes the following input parameters:
| Parameter | Type | Description | Required |
| :--------- | :----- | :--------------------------------------------------------------------------------------- | :------- |
| `projectId` | string | The GCP project ID to list clusters for. | Yes |
| `locationId` | string | The location to list clusters in (e.g., 'us-central1'). Use `-` for all locations. Default: `-`.| No |
> **Note**
> This tool authenticates using the credentials configured in its [alloydb-admin](../../sources/alloydb-admin.md) source which can be either [Application Default Credentials](https://cloud.google.com/docs/authentication/application-default-credentials) or client-side OAuth.
## Example
```yaml
tools:
list_clusters:
kind: alloydb-list-clusters
source: alloydb-admin-source
description: Use this tool to list all AlloyDB clusters in a given project and location.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be alloydb-list-clusters. | |
| source | string | true | The name of an `alloydb-admin` source. |
| description | string | true | Description of the tool that is passed to the agent. |

View File

@@ -1,39 +0,0 @@
---
title: "alloydb-list-instances"
type: docs
weight: 1
description: >
The "alloydb-list-instances" tool lists the AlloyDB instances for a given project, cluster and location.
aliases:
- /resources/tools/alloydb-list-instances
---
## About
The `alloydb-list-instances` tool retrieves AlloyDB instance information for all or specified clusters and locations in a given project. It is compatible with [alloydb-admin](../../sources/alloydb-admin.md) source.
`alloydb-list-instances` tool lists the detailed information of AlloyDB instances (instance name, type, IP address, state, configuration, etc) for a given project, cluster and location. The tool takes the following input parameters:
| Parameter | Type | Description | Required |
| :--------- | :----- | :--------------------------------------------------------------------------------------- | :------- |
| `projectId` | string | The GCP project ID to list instances for. | Yes |
| `clusterId` | string | The ID of the cluster to list instances from. Use '-' to get results for all clusters. Default: `-`.| No |
| `locationId` | string | The location of the cluster (e.g., 'us-central1'). Use '-' to get results for all locations. Default: `-`.| No |
> **Note**
> This tool authenticates using the credentials configured in its [alloydb-admin](../../sources/alloydb-admin.md) source which can be either [Application Default Credentials](https://cloud.google.com/docs/authentication/application-default-credentials) or client-side OAuth.
## Example
```yaml
tools:
list_instances:
kind: alloydb-list-instances
source: alloydb-admin-source
description: Use this tool to list all AlloyDB instances for a given project, cluster and location.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be alloydb-list-instances. | |
| source | string | true | The name of an `alloydb-admin` source. |
| description | string | true | Description of the tool that is passed to the agent. |

View File

@@ -1,38 +0,0 @@
---
title: "alloydb-list-users"
type: docs
weight: 1
description: >
The "alloydb-list-users" tool lists all database users within an AlloyDB cluster.
aliases:
- /resources/tools/alloydb-list-users
---
## About
The `alloydb-list-users` tool lists all database users within an AlloyDB cluster. It is compatible with [alloydb-admin](../../sources/alloydb-admin.md) source.
The tool takes the following input parameters:
| Parameter | Type | Description | Required |
| :--------- | :----- | :--------------------------------------------------------------------------------------- | :------- |
| `projectId` | string | The GCP project ID to list users for. | Yes |
| `clusterId` | string | The ID of the cluster to list users from. | Yes |
| `locationId` | string | The location of the cluster (e.g., 'us-central1'). | Yes |
> **Note**
> This tool authenticates using the credentials configured in its [alloydb-admin](../../sources/alloydb-admin.md) source which can be either [Application Default Credentials](https://cloud.google.com/docs/authentication/application-default-credentials) or client-side OAuth.
## Example
```yaml
tools:
list_users:
kind: alloydb-list-users
source: alloydb-admin-source
description: Use this tool to list all database users within an AlloyDB cluster
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be alloydb-list-users. | |
| source | string | true | The name of an `alloydb-admin` source. |
| description | string | true | Description of the tool that is passed to the agent. |

View File

@@ -105,7 +105,7 @@ tools:
| **field** | **type** | **required** | **description** |
|--------------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------|
| kind | string | true | Must be "alloydb-ai-nl". |
| type | string | true | Must be "alloydb-ai-nl". |
| source | string | true | Name of the AlloyDB source the natural language query should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |
| nlConfig | string | true | The name of the `nl_config` in AlloyDB |

View File

@@ -1,52 +0,0 @@
---
title: "bigquery-analyze-contribution"
type: docs
weight: 1
description: >
A "bigquery-analyze-contribution" tool performs contribution analysis in BigQuery.
aliases:
- /resources/tools/bigquery-analyze-contribution
---
## About
A `bigquery-analyze-contribution` tool performs contribution analysis in BigQuery by creating a temporary `CONTRIBUTION_ANALYSIS` model and then querying it with `ML.GET_INSIGHTS` to find top contributors for a given metric.
It's compatible with the following sources:
- [bigquery](../../sources/bigquery.md)
`bigquery-analyze-contribution` takes the following parameters:
- **input_data** (string, required): The data that contain the test and control data to analyze. This can be a fully qualified BigQuery table ID (e.g., `my-project.my_dataset.my_table`) or a SQL query that returns the data.
- **contribution_metric** (string, required): The name of the column that contains the metric to analyze. This can be SUM(metric_column_name), SUM(numerator_metric_column_name)/SUM(denominator_metric_column_name) or SUM(metric_sum_column_name)/COUNT(DISTINCT categorical_column_name) depending the type of metric to analyze.
- **is_test_col** (string, required): The name of the column that identifies whether a row is in the test or control group. The column must contain boolean values.
- **dimension_id_cols** (array of strings, optional): An array of column names that uniquely identify each dimension.
- **top_k_insights_by_apriori_support** (integer, optional): The number of top insights to return, ranked by apriori support. Default to '30'.
- **pruning_method** (string, optional): The method to use for pruning redundant insights. Can be `'NO_PRUNING'` or `'PRUNE_REDUNDANT_INSIGHTS'`. Defaults to `'PRUNE_REDUNDANT_INSIGHTS'`.
## Example
```yaml
tools:
contribution_analyzer:
kind: bigquery-analyze-contribution
source: my-bigquery-source
description: Use this tool to run contribution analysis on a dataset in BigQuery.
```
## Sample Prompt
You can prepare a sample table following https://cloud.google.com/bigquery/docs/get-contribution-analysis-insights.
And use the following sample prompts to call this tool:
- What drives the changes in sales in the table `bqml_tutorial.iowa_liquor_sales_sum_data`? Use the project id myproject.
- Analyze the contribution for the `total_sales` metric in the table `bqml_tutorial.iowa_liquor_sales_sum_data`. The test group is identified by the `is_test` column. The dimensions are `store_name`, `city`, `vendor_name`, `category_name` and `item_description`.
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|------------------------------------------------------------|
| kind | string | true | Must be "bigquery-analyze-contribution". |
| source | string | true | Name of the source the tool should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -48,7 +48,7 @@ tools:
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "bigquery-conversational-analytics". |
| type | string | true | Must be "bigquery-conversational-analytics". |
| source | string | true | Name of the source for chat. |
| description | string | true | Description of the tool
that is passed to the LLM. |

View File

@@ -33,6 +33,6 @@ tools:
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "bigquery-execute-sql". |
| type | string | true | Must be "bigquery-execute-sql". |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -44,6 +44,6 @@ You can use the following sample prompts to call this tool:
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "bigquery-forecast". |
| type | string | true | Must be "bigquery-forecast". |
| source | string | true | Name of the source the forecast tool should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -34,6 +34,6 @@ tools:
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "bigquery-get-dataset-info". |
| type | string | true | Must be "bigquery-get-dataset-info". |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -34,6 +34,6 @@ tools:
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "bigquery-get-table-info". |
| type | string | true | Must be "bigquery-get-table-info". |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -33,6 +33,6 @@ tools:
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "bigquery-list-dataset-ids". |
| type | string | true | Must be "bigquery-list-dataset-ids". |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -15,19 +15,10 @@ It's compatible with the following sources:
- [bigquery](../../sources/bigquery.md)
`bigquery-list-table-ids` accepts the following parameters:
- **`dataset`** (required): Specifies the dataset from which to list table IDs.
- **`project`** (optional): Defines the Google Cloud project ID. If not provided,
the tool defaults to the project from the source configuration.
The tool's behavior regarding these parameters is influenced by the
`allowedDatasets` restriction on the `bigquery` source:
- **Without `allowedDatasets` restriction:** The tool can list tables from any
dataset specified by the `dataset` and `project` parameters.
- **With `allowedDatasets` restriction:** Before listing tables, the tool verifies
that the requested dataset is in the allowed list. If it is not, the request is
denied. If only one dataset is specified in the `allowedDatasets` list, it
will be used as the default value for the `dataset` parameter.
`bigquery-list-table-ids` takes a required `dataset` parameter to specify the dataset
from which to list table IDs. It also optionally accepts a `project` parameter to
define the Google Cloud project ID. If the `project` parameter is not provided, the
tool defaults to using the project defined in the source configuration.
## Example
@@ -43,6 +34,6 @@ tools:
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "bigquery-list-table-ids". |
| type | string | true | Must be "bigquery-list-table-ids". |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -97,7 +97,7 @@ tools:
| **field** | **type** | **required** | **description** |
|--------------------|:------------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "bigquery-sql". |
| type | string | true | Must be "bigquery-sql". |
| source | string | true | Name of the source the GoogleSQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |
| statement | string | true | The GoogleSQL statement to execute. |

View File

@@ -102,7 +102,7 @@ tools:
| **field** | **type** | **required** | **description** |
|--------------------|:------------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "bigtable-sql". |
| type | string | true | Must be "bigtable-sql". |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |
| statement | string | true | SQL statement to execute on. |

View File

@@ -41,6 +41,6 @@ tools:
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|---------------------------------------------------------|
| kind | string | true | Must be "clickhouse-execute-sql". |
| type | string | true | Must be "clickhouse-execute-sql". |
| source | string | true | Name of the ClickHouse source to execute SQL against. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,53 +0,0 @@
---
title: "clickhouse-list-databases"
type: docs
weight: 3
description: >
A "clickhouse-list-databases" tool lists all databases in a ClickHouse instance.
aliases:
- /resources/tools/clickhouse-list-databases
---
## About
A `clickhouse-list-databases` tool lists all available databases in a
ClickHouse instance. It's compatible with the [clickhouse](../../sources/clickhouse.md) source.
This tool executes the `SHOW DATABASES` command and returns a list of all
databases accessible to the configured user, making it useful for database
discovery and exploration tasks.
## Example
```yaml
tools:
list_clickhouse_databases:
kind: clickhouse-list-databases
source: my-clickhouse-instance
description: List all available databases in the ClickHouse instance
```
## Return Value
The tool returns an array of objects, where each object contains:
- `name`: The name of the database
Example response:
```json
[
{"name": "default"},
{"name": "system"},
{"name": "analytics"},
{"name": "user_data"}
]
```
## Reference
| **field** | **type** | **required** | **description** |
|--------------------|:------------------:|:------------:|-----------------------------------------------------------|
| kind | string | true | Must be "clickhouse-list-databases". |
| source | string | true | Name of the ClickHouse source to list databases from. |
| description | string | true | Description of the tool that is passed to the LLM. |
| authRequired | array of string | false | Authentication services required to use this tool. |
| parameters | array of Parameter | false | Parameters for the tool (typically not used). |

View File

@@ -73,7 +73,7 @@ tools:
| **field** | **type** | **required** | **description** |
|--------------------|:------------------:|:------------:|-----------------------------------------------------------|
| kind | string | true | Must be "clickhouse-sql". |
| type | string | true | Must be "clickhouse-sql". |
| source | string | true | Name of the ClickHouse source to execute SQL against. |
| description | string | true | Description of the tool that is passed to the LLM. |
| statement | string | true | The SQL statement template to execute. |

View File

@@ -1,7 +0,0 @@
---
title: "Cloud SQL"
type: docs
weight: 1
description: >
Tools that work with Cloud SQL.
---

View File

@@ -1,39 +0,0 @@
---
title: cloud-sql-create-database
type: docs
weight: 10
description: >
Create a new database in a Cloud SQL instance.
---
The `cloud-sql-create-database` tool creates a new database in a specified Cloud SQL instance.
{{< notice info >}}
This tool uses a `source` of kind `cloud-sql-admin`.
{{< /notice >}}
## Example
```yaml
tools:
create-cloud-sql-database:
kind: cloud-sql-create-database
source: my-cloud-sql-admin-source
description: "Creates a new database in a Cloud SQL instance."
```
## Reference
| **field** | **type** | **required** | **description** |
| ----------- | :------: | :----------: | ------------------------------------------------ |
| kind | string | true | Must be "cloud-sql-create-database". |
| source | string | true | The name of the `cloud-sql-admin` source to use. |
| description | string | false | A description of the tool. |
## Input Parameters
| **parameter** | **type** | **required** | **description** |
| ------------- | :------: | :----------: | ------------------------------------------------------------------ |
| project | string | true | The project ID. |
| instance | string | true | The ID of the instance where the database will be created. |
| name | string | true | The name for the new database. Must be unique within the instance. |

View File

@@ -1,31 +0,0 @@
---
title: cloud-sql-create-users
type: docs
weight: 10
description: >
Create a new user in a Cloud SQL instance.
---
The `cloud-sql-create-users` tool creates a new user in a specified Cloud SQL instance. It can create both built-in and IAM users.
{{< notice info >}}
This tool uses a `source` of kind `cloud-sql-admin`.
{{< /notice >}}
## Example
```yaml
tools:
create-cloud-sql-user:
kind: cloud-sql-create-users
source: my-cloud-sql-admin-source
description: "Creates a new user in a Cloud SQL instance. Both built-in and IAM users are supported. IAM users require an email account as the user name. IAM is the more secure and recommended way to manage users. The agent should always ask the user what type of user they want to create. For more information, see https://cloud.google.com/sql/docs/postgres/add-manage-iam-users"
```
## Reference
| **field** | **type** | **required** | **description** |
| ------------ | :-------: | :----------: | ------------------------------------------------ |
| kind | string | true | Must be "cloud-sql-create-users". |
| description | string | false | A description of the tool. |
| source | string | true | The name of the `cloud-sql-admin` source to use. |

Some files were not shown because too many files have changed in this diff Show More