Compare commits

..

2 Commits

Author SHA1 Message Date
Prerna Kakkar
e2ec345e4a Merge branch 'main' into envvariable 2025-07-16 11:50:21 +00:00
Prerna Kakkar
6f569bc950 [feat]: Create tool to update mcp settings file for data plane with relevant env variables for alloydb 2025-07-16 07:54:42 +00:00
326 changed files with 1280 additions and 31343 deletions

View File

@@ -62,6 +62,7 @@ steps:
postgressql \
postgresexecutesql
- id: "alloydb-pg"
name: golang:1
waitFor: ["compile-test-binary"]
@@ -120,7 +121,8 @@ steps:
- "BIGTABLE_PROJECT=$PROJECT_ID"
- "BIGTABLE_INSTANCE=$_BIGTABLE_INSTANCE"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID"]
secretEnv:
["CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
@@ -152,26 +154,6 @@ steps:
bigquery \
bigquery
- id: "dataplex"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "DATAPLEX_PROJECT=$PROJECT_ID"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Dataplex" \
dataplex \
dataplex
- id: "postgres"
name: golang:1
waitFor: ["compile-test-binary"]
@@ -272,7 +254,8 @@ steps:
- "CLOUD_SQL_MYSQL_DATABASE=$_DATABASE_NAME"
- "CLOUD_SQL_MYSQL_REGION=$_REGION"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLOUD_SQL_MYSQL_USER", "CLOUD_SQL_MYSQL_PASS", "CLIENT_ID"]
secretEnv:
["CLOUD_SQL_MYSQL_USER", "CLOUD_SQL_MYSQL_PASS", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
@@ -384,7 +367,7 @@ steps:
sqlite
- id: "couchbase"
name: golang:1
name : golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
@@ -392,8 +375,7 @@ steps:
- "COUCHBASE_SCOPE=$_COUCHBASE_SCOPE"
- "COUCHBASE_BUCKET=$_COUCHBASE_BUCKET"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv:
["COUCHBASE_CONNECTION", "COUCHBASE_USER", "COUCHBASE_PASS", "CLIENT_ID"]
secretEnv: ["COUCHBASE_CONNECTION", "COUCHBASE_USER", "COUCHBASE_PASS", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
@@ -406,7 +388,7 @@ steps:
couchbase
- id: "redis"
name: golang:1
name : golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
@@ -423,9 +405,9 @@ steps:
"Redis" \
redis \
redis
- id: "valkey"
name: golang:1
name : golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
@@ -443,94 +425,7 @@ steps:
"Valkey" \
valkey \
valkey
- id: "firestore"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "FIRESTORE_PROJECT=$PROJECT_ID"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Firestore" \
firestore \
firestore
- id: "looker"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "FIRESTORE_PROJECT=$PROJECT_ID"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
- "LOOKER_VERIFY_SSL=$_LOOKER_VERIFY_SSL"
secretEnv:
[
"CLIENT_ID",
"LOOKER_BASE_URL",
"LOOKER_CLIENT_ID",
"LOOKER_CLIENT_SECRET",
]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Looker" \
looker \
looker
- id: "alloydbwaitforoperation"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "API_KEY=$(gcloud auth print-access-token)"
secretEnv: ["CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Alloydb Wait for Operation" \
utility \
utility/alloydbwaitforoperation
- id: "tidb"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "TIDB_DATABASE=$_DATABASE_NAME"
- "TIDB_HOST=$_TIDB_HOST"
- "TIDB_PORT=$_TIDB_PORT"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID", "TIDB_USER", "TIDB_PASS"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"TiDB" \
tidb \
tidbsql tidbexecutesql
availableSecrets:
secretManager:
@@ -584,16 +479,7 @@ availableSecrets:
env: REDIS_PASS
- versionName: projects/$PROJECT_ID/secrets/memorystore_valkey_address/versions/latest
env: VALKEY_ADDRESS
- versionName: projects/107716898620/secrets/looker_base_url/versions/latest
env: LOOKER_BASE_URL
- versionName: projects/107716898620/secrets/looker_client_id/versions/latest
env: LOOKER_CLIENT_ID
- versionName: projects/107716898620/secrets/looker_client_secret/versions/latest
env: LOOKER_CLIENT_SECRET
- versionName: projects/107716898620/secrets/tidb_user/versions/latest
env: TIDB_USER
- versionName: projects/107716898620/secrets/tidb_pass/versions/latest
env: TIDB_PASS
options:
logging: CLOUD_LOGGING_ONLY
@@ -624,7 +510,4 @@ substitutions:
_MSSQL_PORT: "1433"
_DGRAPHURL: "https://play.dgraph.io"
_COUCHBASE_BUCKET: "couchbase-bucket"
_COUCHBASE_SCOPE: "couchbase-scope"
_LOOKER_VERIFY_SSL: "true"
_TIDB_HOST: 127.0.0.1
_TIDB_PORT: "4000"
_COUCHBASE_SCOPE: "couchbase-scope"

View File

@@ -2,10 +2,8 @@
# Arguments:
# $1: Display name for logs (e.g., "Cloud SQL Postgres")
# $2: Integration test's package name (e.g., cloudsqlpg)
# $3, $4, ...: Tool package names for grep (e.g., postgressql), if the
# integration test specifically check a separate package inside a folder, please
# specify the full path instead (e.g., postgressql/postgresexecutesql)
# $2: Source package name (e.g., cloudsqlpg)
# $3, $4, ...: Tool package names for grep (e.g., postgressql)
DISPLAY_NAME="$1"
SOURCE_PACKAGE_NAME="$2"
@@ -59,4 +57,4 @@ if awk -v coverage="$coverage_numeric" 'BEGIN {exit !(coverage < 50)}'; then
exit 1
else
echo "Coverage for ${DISPLAY_NAME} is sufficient."
fi
fi

View File

@@ -1,7 +1,8 @@
assign_issues:
- kurtisvg
- Yuan325
- duwenxin99
- averikitsch
- akitsch
assign_issues_by:
- labels:
- 'product: bigquery'
@@ -10,6 +11,7 @@ assign_issues_by:
- shobsi
- jiaxunwu
assign_prs:
- kurtisvg
- Yuan325
- duwenxin99
- averikitsch
- akitsch

24
.github/labels.yaml vendored
View File

@@ -50,7 +50,7 @@
color: ffffc7
description: Desirable enhancement or fix. May not be included in next release.
- name: 'do not merge'
- name: do not merge
color: d93f0b
description: Indicates a pull request not ready for merge, due to either quality
or timing.
@@ -65,26 +65,28 @@
color: ededed
description: Release please has completed a release for this.
- name: 'blunderbuss: assign'
color: 3DED97
description: Have blunderbuss assign this to someone new.
- name: 'tests: run'
color: 3DED97
description: Label to trigger Github Action tests.
- name: 'docs: deploy-preview'
color: BFDADC
description: Label to trigger Github Action docs preview.
- name: 'status: help wanted'
- name: 'status: contribution welcome'
color: 8befd7
description: 'Status: Unplanned work open to contributions from the community.'
- name: 'status: feedback wanted'
description: Status - Contributions welcome.
- name: 'status: awaiting response'
color: 8befd7
description: 'Status: waiting for feedback from community or issue author.'
description: Status - Awaiting response from author.
- name: 'status: awaiting codeowners'
color: 8befd7
description: Status - Awaiting response from code owners.
# Product Labels
- name: 'product: bigquery'
color: 5065c7
description: 'Product: Assigned to the BigQuery team.'
description: Product - Assigned to the BigQuery team.

View File

@@ -18,28 +18,19 @@ releaseType: simple
versionFile: "cmd/version.txt"
extraFiles: [
"README.md",
"docs/en/getting-started/colab_quickstart.ipynb",
"docs/en/getting-started/introduction/_index.md",
"docs/en/getting-started/local_quickstart.md",
"docs/en/getting-started/local_quickstart_js.md",
"docs/en/getting-started/local_quickstart_go.md",
"docs/en/getting-started/mcp_quickstart/_index.md",
"docs/en/samples/alloydb/_index.md",
"docs/en/samples/bigquery/local_quickstart.md",
"docs/en/samples/bigquery/mcp_quickstart/_index.md",
"docs/en/getting-started/colab_quickstart.ipynb",
"docs/en/samples/bigquery/colab_quickstart_bigquery.ipynb",
"docs/en/samples/looker/looker_gemini.md",
"docs/en/samples/looker/looker_mcp_inspector.md",
"docs/en/how-to/connect-ide/alloydb_pg_mcp.md",
"docs/en/how-to/connect-ide/alloydb_pg_admin_mcp.md",
"docs/en/how-to/connect-ide/bigquery_mcp.md",
"docs/en/how-to/connect-ide/cloud_sql_pg_mcp.md",
"docs/en/how-to/connect-ide/cloud_sql_mssql_mcp.md",
"docs/en/how-to/connect-ide/cloud_sql_mysql_mcp.md",
"docs/en/how-to/connect-ide/firestore_mcp.md",
"docs/en/how-to/connect-ide/looker_mcp.md",
"docs/en/how-to/connect-ide/mysql_mcp.md",
"docs/en/how-to/connect-ide/mssql_mcp.md",
"docs/en/how-to/connect-ide/postgres_mcp.md",
"docs/en/how-to/connect-ide/spanner_mcp.md",
"docs/en/how-to/connect-ide/alloydb_pg_mcp.md",
"docs/en/how-to/connect-ide/cloud_sql_mysql_mcp.md",
"docs/en/how-to/connect-ide/cloud_sql_pg_mcp.md",
"docs/en/how-to/connect-ide/postgres_mcp.md",
"docs/en/how-to/connect-ide/cloud_sql_mssql_mcp.md",
]

View File

@@ -55,7 +55,7 @@ jobs:
node-version: "22"
- name: Cache dependencies
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}

View File

@@ -51,8 +51,6 @@ jobs:
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
# Checkout the PR's HEAD commit (supports forks).
ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
- name: Setup Hugo
@@ -67,7 +65,7 @@ jobs:
node-version: "22"
- name: Cache dependencies
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}

View File

@@ -26,4 +26,4 @@ jobs:
contents: 'read'
uses: ./.github/workflows/cloud_build_failure_reporter.yml
with:
trigger_names: "toolbox-test-nightly,toolbox-test-on-merge,toolbox-continuous-release"
trigger_names: "toolbox-test-nightly,toolbox-test-on-merge"

7
.gitignore vendored
View File

@@ -4,9 +4,6 @@
# vscode
.vscode/
# idea
.idea/
# npm
node_modules
@@ -17,7 +14,3 @@ node_modules
# coverage
.coverage
# executable
genai-toolbox
toolbox

View File

@@ -1,63 +1,5 @@
# Changelog
## [0.11.0](https://github.com/googleapis/genai-toolbox/compare/v0.11.0...v0.11.0) (2025-08-05)
### ⚠ BREAKING CHANGES
* **tools/bigquery-sql:** Ensure invoke always returns a non-null value ([#1020](https://github.com/googleapis/genai-toolbox/issues/1020)) ([9af55b6](https://github.com/googleapis/genai-toolbox/commit/9af55b651d836f268eda342ea27380e7c9967c94))
* **tools/bigquery-execute-sql:** Update the return messages ([#1034](https://github.com/googleapis/genai-toolbox/issues/1034)) ([051e686](https://github.com/googleapis/genai-toolbox/commit/051e686476a781ca49f7617764d507916a1188b8))
### Features
* Add TiDB source and tool ([#829](https://github.com/googleapis/genai-toolbox/issues/829)) ([6eaf36a](https://github.com/googleapis/genai-toolbox/commit/6eaf36ac8505d523fa4f5a4ac3c97209fd688cef))
* Interactive web UI for Toolbox ([#1065](https://github.com/googleapis/genai-toolbox/issues/1065)) ([8749b03](https://github.com/googleapis/genai-toolbox/commit/8749b030035e65361047c4ead13dfacb8e9a9b59))
* **prebuiltconfigs/cloud-sql-postgres:** Introduce additional parameter to limit context in list tables ([#1062](https://github.com/googleapis/genai-toolbox/issues/1062)) ([c3a58e1](https://github.com/googleapis/genai-toolbox/commit/c3a58e1d1678dc14d8de5006511df597fd75faa3))
* **tools/looker-query-url:** Add support for `looker-query-url` tool ([#1015](https://github.com/googleapis/genai-toolbox/issues/1015)) ([327ddf0](https://github.com/googleapis/genai-toolbox/commit/327ddf0439058aa5ecd2c7ae8251fcde6aeff18c))
* **tools/dataplex-lookup-entry:** Add support for `dataplex-lookup-entry` tool ([#1009](https://github.com/googleapis/genai-toolbox/issues/1009)) ([5fa1660](https://github.com/googleapis/genai-toolbox/commit/5fa1660fc8631989b4d13abea205b6426bb506a5))
### Bug Fixes
* **tools/bigquery,mssql,mysql,postgres,spanner,tidb:** Add query logging to execute-sql tools ([#1069](https://github.com/googleapis/genai-toolbox/issues/1069)) ([0527532]([https://github.com/googleapis/genai-toolbox/commit/0527532bd7085ef9eb8f9c30f430a2f2f35cef32))
## [0.10.0](https://github.com/googleapis/genai-toolbox/compare/v0.9.0...v0.10.0) (2025-07-25)
### Features
* Add `Map` parameters support ([#928](https://github.com/googleapis/genai-toolbox/issues/928)) ([4468bc9](https://github.com/googleapis/genai-toolbox/commit/4468bc920bbf27dce4ab160197587b7c12fcd20f))
* Add Dataplex source and tool ([#847](https://github.com/googleapis/genai-toolbox/issues/847)) ([30c16a5](https://github.com/googleapis/genai-toolbox/commit/30c16a559e8d49a9a717935269e69b97ec25519a))
* Add Looker source and tool ([#923](https://github.com/googleapis/genai-toolbox/issues/923)) ([c67e01b](https://github.com/googleapis/genai-toolbox/commit/c67e01bcf998e7b884be30ebb1fd277c89ed6ffc))
* Add support for null optional parameter ([#802](https://github.com/googleapis/genai-toolbox/issues/802)) ([a817b12](https://github.com/googleapis/genai-toolbox/commit/a817b120ca5e09ce80eb8d7544ebbe81fc28b082)), closes [#736](https://github.com/googleapis/genai-toolbox/issues/736)
* **prebuilt/alloydb-admin-config:** Add alloydb control plane as a prebuilt config ([#937](https://github.com/googleapis/genai-toolbox/issues/937)) ([0b28b72](https://github.com/googleapis/genai-toolbox/commit/0b28b72aa0ca2cdc87afbddbeb7f4dbb9688593d))
* **prebuilt/mysql,prebuilt/mssql:** Add generic mysql and mssql prebuilt tools ([#983](https://github.com/googleapis/genai-toolbox/issues/983)) ([c600c30](https://github.com/googleapis/genai-toolbox/commit/c600c30374443b6106c1f10b60cd334fd202789b))
* **server/mcp:** Support MCP version 2025-06-18 ([#898](https://github.com/googleapis/genai-toolbox/issues/898)) ([313d3ca](https://github.com/googleapis/genai-toolbox/commit/313d3ca0d084a3a6e7ac9a21a862aa31bf3edadd))
* **sources/mssql:** Add support for encrypt connection parameter ([#874](https://github.com/googleapis/genai-toolbox/issues/874)) ([14a868f](https://github.com/googleapis/genai-toolbox/commit/14a868f2a0780b94c2ca104419b2ff098778303b))
* **sources/firestore:** Add Firestore as Source ([#786](https://github.com/googleapis/genai-toolbox/issues/786)) ([2bb790e](https://github.com/googleapis/genai-toolbox/commit/2bb790e4f8194b677fe0ba40122d409d0e3e687e))
* **sources/mongodb:** Add MongoDB Source ([#969](https://github.com/googleapis/genai-toolbox/issues/969)) ([74dbd61](https://github.com/googleapis/genai-toolbox/commit/74dbd6124daab6192dd880dbd1d15f36861abf74))
* **tools/alloydb-wait-for-operation:** Add wait for operation tool with exponential backoff ([#920](https://github.com/googleapis/genai-toolbox/issues/920)) ([3f6ec29](https://github.com/googleapis/genai-toolbox/commit/3f6ec2944ede18ee02b10157cc048145bdaec87a))
* **tools/mongodb-aggregate:** Add MongoDB `aggregate` Tools ([#977](https://github.com/googleapis/genai-toolbox/issues/977)) ([bd399bb](https://github.com/googleapis/genai-toolbox/commit/bd399bb0fb7134469345ed9a1111ea4209440867))
* **tools/mongodb-delete:** Add MongoDB `delete` Tools ([#974](https://github.com/googleapis/genai-toolbox/issues/974)) ([78e9752](https://github.com/googleapis/genai-toolbox/commit/78e9752f620e065246f3e7b9d37062e492247c8a))
* **tools/mongodb-find:** Add MongoDB `find` Tools ([#970](https://github.com/googleapis/genai-toolbox/issues/970)) ([a747475](https://github.com/googleapis/genai-toolbox/commit/a7474752d8d7ea7af1e80a3c4533d2fd4154d897))
* **tools/mongodb-insert:** Add MongoDB `insert` Tools ([#975](https://github.com/googleapis/genai-toolbox/issues/975)) ([4c63f0c](https://github.com/googleapis/genai-toolbox/commit/4c63f0c1e402817a0c8fec611635e99290308d0e))
* **tools/mongodb-update:** Add MongoDB `update` Tools ([#972](https://github.com/googleapis/genai-toolbox/issues/972)) ([dfde52c](https://github.com/googleapis/genai-toolbox/commit/dfde52ca9a8e25e2f3944f52b4c2e307072b6c37))
* **tools/neo4j-execute-cypher:** Add neo4j-execute-cypher for Neo4j sources ([#946](https://github.com/googleapis/genai-toolbox/issues/946)) ([81d0505](https://github.com/googleapis/genai-toolbox/commit/81d05053b2e08338fd6eabe4849c309064f76b6b))
* **tools/neo4j-schema:** Add neo4j-schema tool ([#978](https://github.com/googleapis/genai-toolbox/issues/978)) ([be7db3d](https://github.com/googleapis/genai-toolbox/commit/be7db3dff263625ce64fdb726e81164996b7a708))
* **tools/wait:** Create wait for tool ([#885](https://github.com/googleapis/genai-toolbox/issues/885)) ([ed5ef4c](https://github.com/googleapis/genai-toolbox/commit/ed5ef4caea10ba1dbc49c0fc0a0d2b91cf341d3b))
### Bug Fixes
* Fix document preview pipeline for forked PRs ([#950](https://github.com/googleapis/genai-toolbox/issues/950)) ([481cc60](https://github.com/googleapis/genai-toolbox/commit/481cc608bae807d9e92497bc8863066916f7ef21))
* **prebuilt/firestore:** Mark database field as required in the firestore prebuilt tools ([#959](https://github.com/googleapis/genai-toolbox/issues/959)) ([15417d4](https://github.com/googleapis/genai-toolbox/commit/15417d4e0c7b173e81edbbeb672e53884d186104))
* **prebuilt/cloud-sql-mssql:** Correct source reference for execute_sql tool in cloud-sql-mssql.yaml prebuilt config ([#938](https://github.com/googleapis/genai-toolbox/issues/938)) ([d16728e](https://github.com/googleapis/genai-toolbox/commit/d16728e5c603eab37700876a6ddacbf709fd5823))
* **prebuilt/cloud-sql-mysql:** Update list_table tool ([#924](https://github.com/googleapis/genai-toolbox/issues/924)) ([2083ba5](https://github.com/googleapis/genai-toolbox/commit/2083ba50483951e9ee6101bb832aa68823cd96a5))
* Replace 'float' with 'number' in McpManifest ([#985](https://github.com/googleapis/genai-toolbox/issues/985)) ([59e23e1](https://github.com/googleapis/genai-toolbox/commit/59e23e17250a516e3931996114f32ac6526a4f8e))
* **server/api:** Add logger to context in tool invoke handler ([#891](https://github.com/googleapis/genai-toolbox/issues/891)) ([8ce311f](https://github.com/googleapis/genai-toolbox/commit/8ce311f256481e8f11ecb4aa505b95a562f394ef))
* **sources/looker:** Add agent tag to Looker API calls. ([#966](https://github.com/googleapis/genai-toolbox/issues/966)) ([f55dd6f](https://github.com/googleapis/genai-toolbox/commit/f55dd6fcd099f23bd89df62b268c4a53d16f3bac))
* **tools/bigquery-execute-sql:** Ensure invoke always returns a non-null value ([#925](https://github.com/googleapis/genai-toolbox/issues/925)) ([9a55b80](https://github.com/googleapis/genai-toolbox/commit/9a55b804821a6ccfcd157bcfaee7e599c4a5cb63))
* **tools/mysqlsql:** Unmarshal json data from database during invoke ([#979](https://github.com/googleapis/genai-toolbox/issues/979)) ([ccc3498](https://github.com/googleapis/genai-toolbox/commit/ccc3498cf0a4c43eb909e3850b9e6f582cd48f2a)), closes [#840](https://github.com/googleapis/genai-toolbox/issues/840)
## [0.9.0](https://github.com/googleapis/genai-toolbox/compare/v0.8.0...v0.9.0) (2025-07-11)

View File

@@ -81,7 +81,7 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s
#### 2. Implement the New Tool
We recommend looking at an [example tool
implementation](https://github.com/googleapis/genai-toolbox/tree/main/internal/tools/postgres/postgressql).
implementation](https://github.com/googleapis/genai-toolbox/tree/main/internal/tools/postgressql).
* **Create a new directory** under `internal/tools` for your tool type (e.g.,
`internal/tools/newdb` or `internal/tools/newdb<tool_name>`).
@@ -134,7 +134,7 @@ tools.
5. (Optional) [RunToolInvokeWithTemplateParameters][temp-param]: tests for [template
parameters][temp-param-doc]. Only run this test if template
parameters apply to your tool.
* **Add the new database to the test config** in
[integration.cloudbuild.yaml](.ci/integration.cloudbuild.yaml).

View File

@@ -114,7 +114,7 @@ To install Toolbox as a binary:
<!-- {x-release-please-start-version} -->
```sh
# see releases page for other versions
export VERSION=0.11.0
export VERSION=0.9.0
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
chmod +x toolbox
```
@@ -127,23 +127,12 @@ You can also install Toolbox as a container:
```sh
# see releases page for other versions
export VERSION=0.11.0
export VERSION=0.9.0
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
```
</details>
<details>
<summary>Homebrew</summary>
To install Toolbox using Homebrew on macOS or Linux:
```sh
brew install mcp-toolbox
```
</details>
<details>
<summary>Compile from source</summary>
@@ -151,7 +140,7 @@ To install from source, ensure you have the latest version of
[Go installed](https://go.dev/doc/install), and then run the following command:
```sh
go install github.com/googleapis/genai-toolbox@v0.11.0
go install github.com/googleapis/genai-toolbox@v0.9.0
```
<!-- {x-release-please-end} -->
@@ -165,18 +154,8 @@ execute `toolbox` to start the server:
```sh
./toolbox --tools-file "tools.yaml"
```
> [!NOTE]
> Toolbox enables dynamic reloading by default. To disable, use the
> `--disable-reload` flag.
#### Homebrew Users
If you installed Toolbox using Homebrew, the `toolbox` binary is available in your system path. You can start the server with the same command:
```sh
toolbox --tools-file "tools.yaml"
```
> Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
You can use `toolbox help` for a full list of flags! To stop the server, send a
terminate signal (`ctrl+c` on most platforms).
@@ -511,7 +490,6 @@ For more detailed instructions on using the Toolbox Core SDK, see the
"github.com/firebase/genkit/go/ai"
"github.com/firebase/genkit/go/genkit"
"github.com/googleapis/mcp-toolbox-sdk-go/core"
"github.com/googleapis/mcp-toolbox-sdk-go/tbgenkit"
"github.com/invopop/jsonschema"
)
@@ -527,12 +505,30 @@ For more detailed instructions on using the Toolbox Core SDK, see the
// Framework agnostic tool
tool, err := client.LoadTool("toolName", ctx)
// Convert the tool using the tbgenkit package
// Use this tool with Genkit Go
genkitTool, err := tbgenkit.ToGenkitTool(tool, g)
if err != nil {
log.Fatalf("Failed to convert tool: %v\n", err)
// Fetch the tool's input schema
inputschema, err := tool.InputSchema()
var schema *jsonschema.Schema
_ = json.Unmarshal(inputschema, &schema)
executeFn := func(ctx *ai.ToolContext, input any) (string, error) {
result, err := tool.Invoke(ctx, input.(map[string]any))
if err != nil {
// Propagate errors from the tool invocation.
return "", err
}
return result.(string), nil
}
// Use this tool with Genkit Go
genkitTool := genkit.DefineToolWithInputSchema(
g,
tool.Name(),
tool.Description(),
schema,
executeFn,
)
}
```

View File

@@ -51,53 +51,20 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigtable"
_ "github.com/googleapis/genai-toolbox/internal/tools/couchbase"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexlookupentry"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchentries"
_ "github.com/googleapis/genai-toolbox/internal/tools/dgraph"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoredeletedocuments"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetdocuments"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetrules"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorelistcollections"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequerycollection"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules"
_ "github.com/googleapis/genai-toolbox/internal/tools/http"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdimensions"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetexplores"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetfilters"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetlooks"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmeasures"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmodels"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetparameters"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakelook"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquery"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquerysql"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerqueryurl"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrunlook"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbaggregate"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeletemany"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeleteone"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfind"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfindone"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertmany"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertone"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdatemany"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdateone"
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jcypher"
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jexecutecypher"
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema"
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
_ "github.com/googleapis/genai-toolbox/internal/tools/redis"
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannersql"
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlitesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/alloydbwaitforoperation"
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/envvariable"
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/wait"
_ "github.com/googleapis/genai-toolbox/internal/tools/valkey"
@@ -110,12 +77,8 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmysql"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg"
_ "github.com/googleapis/genai-toolbox/internal/sources/couchbase"
_ "github.com/googleapis/genai-toolbox/internal/sources/dataplex"
_ "github.com/googleapis/genai-toolbox/internal/sources/dgraph"
_ "github.com/googleapis/genai-toolbox/internal/sources/firestore"
_ "github.com/googleapis/genai-toolbox/internal/sources/http"
_ "github.com/googleapis/genai-toolbox/internal/sources/looker"
_ "github.com/googleapis/genai-toolbox/internal/sources/mongodb"
_ "github.com/googleapis/genai-toolbox/internal/sources/mssql"
_ "github.com/googleapis/genai-toolbox/internal/sources/mysql"
_ "github.com/googleapis/genai-toolbox/internal/sources/neo4j"
@@ -123,7 +86,6 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/sources/redis"
_ "github.com/googleapis/genai-toolbox/internal/sources/spanner"
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
_ "github.com/googleapis/genai-toolbox/internal/sources/tidb"
_ "github.com/googleapis/genai-toolbox/internal/sources/valkey"
)
@@ -222,10 +184,9 @@ func NewCommand(opts ...Option) *Command {
flags.BoolVar(&cmd.cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.")
flags.StringVar(&cmd.cfg.TelemetryOTLP, "telemetry-otlp", "", "Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318')")
flags.StringVar(&cmd.cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", "Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. Allowed: 'alloydb-postgres-admin', alloydb-postgres', 'bigquery', 'cloud-sql-mysql', 'cloud-sql-postgres', 'cloud-sql-mssql', 'dataplex', 'firestore', 'looker', 'mssql', 'mysql', 'postgres', 'spanner', 'spanner-postgres'.")
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", "Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. Allowed: 'alloydb-postgres', 'bigquery', 'cloud-sql-mysql', 'cloud-sql-postgres', 'cloud-sql-mssql', 'postgres', 'spanner', 'spanner-postgres'.")
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
// wrap RunE command so that we have access to original Command object
cmd.RunE = func(*cobra.Command, []string) error { return run(cmd) }
@@ -802,9 +763,6 @@ func run(cmd *Command) error {
return errMsg
}
cmd.logger.InfoContext(ctx, "Server ready to serve!")
if cmd.cfg.UI {
cmd.logger.InfoContext(ctx, "Toolbox UI is up and running at: http://localhost:5000/ui")
}
go func() {
defer close(srvErr)

View File

@@ -1161,17 +1161,11 @@ func TestSingleEdit(t *testing.T) {
}
func TestPrebuiltTools(t *testing.T) {
alloydb_admin_config, _ := prebuiltconfigs.Get("alloydb-postgres-admin")
alloydb_config, _ := prebuiltconfigs.Get("alloydb-postgres")
bigquery_config, _ := prebuiltconfigs.Get("bigquery")
cloudsqlpg_config, _ := prebuiltconfigs.Get("cloud-sql-postgres")
cloudsqlmysql_config, _ := prebuiltconfigs.Get("cloud-sql-mysql")
cloudsqlmssql_config, _ := prebuiltconfigs.Get("cloud-sql-mssql")
dataplex_config, _ := prebuiltconfigs.Get("dataplex")
firestoreconfig, _ := prebuiltconfigs.Get("firestore")
mysql_config, _ := prebuiltconfigs.Get("mysql")
mssql_config, _ := prebuiltconfigs.Get("mssql")
looker_config, _ := prebuiltconfigs.Get("looker")
postgresconfig, _ := prebuiltconfigs.Get("postgres")
spanner_config, _ := prebuiltconfigs.Get("spanner")
spannerpg_config, _ := prebuiltconfigs.Get("spanner-postgres")
@@ -1184,16 +1178,6 @@ func TestPrebuiltTools(t *testing.T) {
in []byte
wantToolset server.ToolsetConfigs
}{
{
name: "alloydb postgres admin prebuilt tools",
in: alloydb_admin_config,
wantToolset: server.ToolsetConfigs{
"alloydb-postgres-admin-tools": tools.ToolsetConfig{
Name: "alloydb-postgres-admin-tools",
ToolNames: []string{"alloydb-create-cluster", "alloydb-operations-get", "alloydb-create-instance"},
},
},
},
{
name: "alloydb prebuilt tools",
in: alloydb_config,
@@ -1244,56 +1228,6 @@ func TestPrebuiltTools(t *testing.T) {
},
},
},
{
name: "dataplex prebuilt tools",
in: dataplex_config,
wantToolset: server.ToolsetConfigs{
"dataplex-tools": tools.ToolsetConfig{
Name: "dataplex-tools",
ToolNames: []string{"dataplex_search_entries", "dataplex_lookup_entry"},
},
},
},
{
name: "firestore prebuilt tools",
in: firestoreconfig,
wantToolset: server.ToolsetConfigs{
"firestore-database-tools": tools.ToolsetConfig{
Name: "firestore-database-tools",
ToolNames: []string{"firestore-get-documents", "firestore-list-collections", "firestore-delete-documents", "firestore-query-collection", "firestore-get-rules", "firestore-validate-rules"},
},
},
},
{
name: "mysql prebuilt tools",
in: mysql_config,
wantToolset: server.ToolsetConfigs{
"mysql-database-tools": tools.ToolsetConfig{
Name: "mysql-database-tools",
ToolNames: []string{"execute_sql", "list_tables"},
},
},
},
{
name: "mssql prebuilt tools",
in: mssql_config,
wantToolset: server.ToolsetConfigs{
"mssql-database-tools": tools.ToolsetConfig{
Name: "mssql-database-tools",
ToolNames: []string{"execute_sql", "list_tables"},
},
},
},
{
name: "looker prebuilt tools",
in: looker_config,
wantToolset: server.ToolsetConfigs{
"looker-tools": tools.ToolsetConfig{
Name: "looker-tools",
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look", "make_look"},
},
},
},
{
name: "postgres prebuilt tools",
in: postgresconfig,

View File

@@ -1 +1 @@
0.11.0
0.9.0

View File

@@ -4,12 +4,12 @@ type: docs
notoc: false
weight: 1
description: >
All of Toolbox's documentation.
All of Toolbox's documentation.
---
<html>
<head>
<link rel="canonical" href="getting-started/introduction/"/>
<meta http-equiv="refresh" content="0;url=getting-started/introduction/"/>
<meta http-equiv="refresh" content="0;url=getting-started/introduction"/>
</head>
</html>

View File

@@ -3,7 +3,7 @@ title: "Telemetry"
type: docs
weight: 2
description: >
An overview of telemetry and observability in Toolbox.
An overview of telemetry and observability in Toolbox.
---
## About
@@ -158,7 +158,7 @@ enabled:
- [Cloud Logging API](https://cloud.google.com/logging/docs/api/enable-api)
- [Cloud Monitoring API](https://cloud.google.com/monitoring/api/enable-api)
- [Cloud Trace API](https://console.cloud.google.com/apis/enableflow?apiid=cloudtrace.googleapis.com)
- [Cloud Trace API](https://cloud.google.com/apis/enableflow?apiid=cloudtrace.googleapis.com)
{{< /notice >}}
#### OTLP Exporter
@@ -177,7 +177,7 @@ It receives telemetry data, transforms it, and then exports data to backends
that can store it permanently. Toolbox provide an option to export telemetry
data to user's choice of backend(s) that are compatible with the Open Telemetry
Protocol (OTLP). If you would like to use a collector, please refer to this
[Export Telemetry using the Otel Collector](../../how-to/export_telemetry.md).
[Export Telemetry using the Otel Collector](../how-to/export_telemetry.md).
### Flags

View File

@@ -234,7 +234,7 @@
},
"outputs": [],
"source": [
"version = \"0.11.0\" # x-release-please-version\n",
"version = \"0.9.0\" # x-release-please-version\n",
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
"\n",
"# Make the binary executable\n",

View File

@@ -1,7 +1,7 @@
---
title: "Configuration"
type: docs
weight: 6
weight: 4
description: >
How to configure Toolbox's tools.yaml file.
---

View File

@@ -86,7 +86,7 @@ To install Toolbox as a binary:
```sh
# see releases page for other versions
export VERSION=0.11.0
export VERSION=0.9.0
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
chmod +x toolbox
```
@@ -97,17 +97,10 @@ You can also install Toolbox as a container:
```sh
# see releases page for other versions
export VERSION=0.11.0
export VERSION=0.9.0
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
```
{{% /tab %}}
{{% tab header="Homebrew" lang="en" %}}
To install Toolbox using Homebrew on macOS or Linux:
```sh
brew install mcp-toolbox
```
{{% /tab %}}
{{% tab header="Compile from source" lang="en" %}}
@@ -115,7 +108,7 @@ To install from source, ensure you have the latest version of
[Go installed](https://go.dev/doc/install), and then run the following command:
```sh
go install github.com/googleapis/genai-toolbox@v0.11.0
go install github.com/googleapis/genai-toolbox@v0.9.0
```
{{% /tab %}}
@@ -130,34 +123,15 @@ execute `toolbox` to start the server:
```sh
./toolbox --tools-file "tools.yaml"
```
{{< notice note >}}
Toolbox enables dynamic reloading by default. To disable, use the
`--disable-reload` flag.
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
{{< /notice >}}
#### Launching Toolbox UI
To launch Toolbox's interactive UI, use the `--ui` flag. This allows you to test tools and toolsets
with features such as authorized parameters. To learn more, visit [Toolbox UI](../../how-to/use-toolbox-ui/index.md).
```sh
./toolbox --ui
```
#### Homebrew Users
If you installed Toolbox using Homebrew, the `toolbox` binary is available in your system path. You can start the server with the same command:
```sh
toolbox --tools-file "tools.yaml"
```
You can use `toolbox help` for a full list of flags! To stop the server, send a
terminate signal (`ctrl+c` on most platforms).
For more detailed documentation on deploying to different environments, check
out the resources in the [How-to section](../../how-to/)
out the resources in the [How-to section](../../how-to/_index.md)
### Integrating your application
@@ -165,7 +139,6 @@ Once your server is up and running, you can load the tools into your
application. See below the list of Client SDKs for using various frameworks:
#### Python
{{< tabpane text=true persist=header >}}
{{% tab header="Core" lang="en" %}}
@@ -178,7 +151,7 @@ from toolbox_core import ToolboxClient
# update the url to point to your server
async with ToolboxClient("http://127.0.0.1:5000") as client:
async with ToolboxClient("<http://127.0.0.1:5000>") as client:
# these tools can be passed to your application!
tools = await client.load_toolset("toolset_name")
@@ -199,7 +172,7 @@ from toolbox_langchain import ToolboxClient
# update the url to point to your server
async with ToolboxClient("http://127.0.0.1:5000") as client:
async with ToolboxClient("<http://127.0.0.1:5000>") as client:
# these tools can be passed to your application!
tools = client.load_toolset()
@@ -220,7 +193,7 @@ from toolbox_llamaindex import ToolboxClient
# update the url to point to your server
async with ToolboxClient("http://127.0.0.1:5000") as client:
async with ToolboxClient("<http://127.0.0.1:5000>") as client:
# these tools can be passed to your application
@@ -330,7 +303,7 @@ const toolboxTools = await client.loadToolset('toolsetName');
const getTool = (toolboxTool) => tool({
name: toolboxTool.getName(),
description: toolboxTool.getDescription(),
parameters: toolboxTool.getParamSchema(),
parameters: toolboxTool.getParams(),
execute: toolboxTool
});;
@@ -448,7 +421,6 @@ import (
"github.com/firebase/genkit/go/ai"
"github.com/firebase/genkit/go/genkit"
"github.com/googleapis/mcp-toolbox-sdk-go/core"
"github.com/googleapis/mcp-toolbox-sdk-go/tbgenkit"
"github.com/invopop/jsonschema"
)
@@ -470,12 +442,33 @@ func main() {
log.Fatalf("Failed to load tools: %v", err)
}
// Convert the tool using the tbgenkit package
// Use this tool with Genkit Go
genkitTool, err := tbgenkit.ToGenkitTool(tool, g)
// Fetch the tool's input schema
inputschema, err := tool.InputSchema()
if err != nil {
log.Fatalf("Failed to convert tool: %v\n", err)
log.Fatalf("Failed to fetch inputSchema: %v", err)
}
var schema *jsonschema.Schema
_ = json.Unmarshal(inputschema, &schema)
executeFn := func(ctx *ai.ToolContext, input any) (string, error) {
result, err := tool.Invoke(ctx, input.(map[string]any))
if err != nil {
// Propagate errors from the tool invocation.
return "", err
}
return result.(string), nil
}
// Use this tool with Genkit Go
genkitTool := genkit.DefineToolWithInputSchema(
g,
tool.Name(),
tool.Description(),
schema,
executeFn,
)
}
{{< /highlight >}}
@@ -592,6 +585,4 @@ func main() {
For more detailed instructions on using the Toolbox Go SDK, see the
[project's README](https://github.com/googleapis/mcp-toolbox-sdk-go/blob/main/core/README.md).
For end-to-end samples on using the Toolbox Go SDK with orchestration
frameworks, see the [project's
samples](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main/core/samples)
For end-to-end samples on using the Toolbox Go SDK with orchestration frameworks, see the [project's samples](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main/core/samples)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 76 KiB

After

Width:  |  Height:  |  Size: 154 KiB

View File

@@ -19,9 +19,7 @@ This guide assumes you have already done the following:
### Cloud Setup (Optional)
If you plan to use **Google Clouds Vertex AI** with your agent (e.g., using
`vertexai=True` or a Google GenAI model), follow these one-time setup steps for
local development:
If you plan to use **Google Clouds Vertex AI** with your agent (e.g., using `vertexai=True` or a Google GenAI model), follow these one-time setup steps for local development:
1. [Install the Google Cloud CLI](https://cloud.google.com/sdk/docs/install)
1. [Set up Application Default Credentials (ADC)](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment)
@@ -137,7 +135,7 @@ postgres` and a password next time.
```sql
INSERT INTO hotels(id, name, location, price_tier, checkin_date, checkout_date, booked)
VALUES
VALUES
(1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-22', '2024-04-20', B'0'),
(2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', B'0'),
(3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', B'0'),
@@ -156,6 +154,7 @@ postgres` and a password next time.
\q
```
## Step 2: Install and configure Toolbox
In this section, we will download Toolbox, configure our tools in a
@@ -171,7 +170,7 @@ In this section, we will download Toolbox, configure our tools in a
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/$OS/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.9.0/$OS/toolbox
```
<!-- {x-release-please-end} -->
@@ -272,10 +271,8 @@ In this section, we will download Toolbox, configure our tools in a
```bash
./toolbox --tools-file "tools.yaml"
```
{{< notice note >}}
Toolbox enables dynamic reloading by default. To disable, use the
`--disable-reload` flag.
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
{{< /notice >}}
## Step 3: Connect your agent to Toolbox

View File

@@ -1,927 +0,0 @@
---
title: "Go Quickstart (Local)"
type: docs
weight: 4
description: >
How to get started running Toolbox locally with [Go](https://github.com/googleapis/mcp-toolbox-sdk-go), PostgreSQL, and orchestration frameworks such as [LangChain Go](https://tmc.github.io/langchaingo/docs/), [GenkitGo](https://genkit.dev/go/docs/get-started-go/), [Go GenAI](https://github.com/googleapis/go-genai) and [OpenAI Go](https://github.com/openai/openai-go).
---
## Before you begin
This guide assumes you have already done the following:
1. Installed [Go (v1.24.2 or higher)].
1. Installed [PostgreSQL 16+ and the `psql` client][install-postgres].
### Cloud Setup (Optional)
If you plan to use **Google Clouds Vertex AI** with your agent (e.g., using
Gemini or PaLM models), follow these one-time setup steps:
1. [Install the Google Cloud CLI]
1. [Set up Application Default Credentials (ADC)]
1. Set your project and enable Vertex AI
```bash
gcloud config set project YOUR_PROJECT_ID
gcloud services enable aiplatform.googleapis.com
```
[Go (v1.24.2 or higher)]: https://go.dev/doc/install
[install-postgres]: https://www.postgresql.org/download/
[Install the Google Cloud CLI]: https://cloud.google.com/sdk/docs/install
[Set up Application Default Credentials (ADC)]:
https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment
## Step 1: Set up your database
In this section, we will create a database, insert some data that needs to be
accessed by our agent, and create a database user for Toolbox to connect with.
1. Connect to postgres using the `psql` command:
```bash
psql -h 127.0.0.1 -U postgres
```
Here, `postgres` denotes the default postgres superuser.
{{< notice info >}}
#### **Having trouble connecting?**
* **Password Prompt:** If you are prompted for a password for the `postgres`
user and do not know it (or a blank password doesn't work), your PostgreSQL
installation might require a password or a different authentication method.
* **`FATAL: role "postgres" does not exist`:** This error means the default
`postgres` superuser role isn't available under that name on your system.
* **`Connection refused`:** Ensure your PostgreSQL server is actually running.
You can typically check with `sudo systemctl status postgresql` and start it
with `sudo systemctl start postgresql` on Linux systems.
<br/>
#### **Common Solution**
For password issues or if the `postgres` role seems inaccessible directly, try
switching to the `postgres` operating system user first. This user often has
permission to connect without a password for local connections (this is called
peer authentication).
```bash
sudo -i -u postgres
psql -h 127.0.0.1
```
Once you are in the `psql` shell using this method, you can proceed with the
database creation steps below. Afterwards, type `\q` to exit `psql`, and then
`exit` to return to your normal user shell.
If desired, once connected to `psql` as the `postgres` OS user, you can set a
password for the `postgres` *database* user using: `ALTER USER postgres WITH
PASSWORD 'your_chosen_password';`. This would allow direct connection with `-U
postgres` and a password next time.
{{< /notice >}}
1. Create a new database and a new user:
{{< notice tip >}}
For a real application, it's best to follow the principle of least permission
and only grant the privileges your application needs.
{{< /notice >}}
```sql
CREATE USER toolbox_user WITH PASSWORD 'my-password';
CREATE DATABASE toolbox_db;
GRANT ALL PRIVILEGES ON DATABASE toolbox_db TO toolbox_user;
ALTER DATABASE toolbox_db OWNER TO toolbox_user;
```
1. End the database session:
```bash
\q
```
(If you used `sudo -i -u postgres` and then `psql`, remember you might also
need to type `exit` after `\q` to leave the `postgres` user's shell
session.)
1. Connect to your database with your new user:
```bash
psql -h 127.0.0.1 -U toolbox_user -d toolbox_db
```
1. Create a table using the following command:
```sql
CREATE TABLE hotels(
id INTEGER NOT NULL PRIMARY KEY,
name VARCHAR NOT NULL,
location VARCHAR NOT NULL,
price_tier VARCHAR NOT NULL,
checkin_date DATE NOT NULL,
checkout_date DATE NOT NULL,
booked BIT NOT NULL
);
```
1. Insert data into the table.
```sql
INSERT INTO hotels(id, name, location, price_tier, checkin_date, checkout_date, booked)
VALUES
(1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-22', '2024-04-20', B'0'),
(2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', B'0'),
(3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', B'0'),
(4, 'Radisson Blu Lucerne', 'Lucerne', 'Midscale', '2024-04-24', '2024-04-05', B'0'),
(5, 'Best Western Bern', 'Bern', 'Upper Midscale', '2024-04-23', '2024-04-01', B'0'),
(6, 'InterContinental Geneva', 'Geneva', 'Luxury', '2024-04-23', '2024-04-28', B'0'),
(7, 'Sheraton Zurich', 'Zurich', 'Upper Upscale', '2024-04-27', '2024-04-02', B'0'),
(8, 'Holiday Inn Basel', 'Basel', 'Upper Midscale', '2024-04-24', '2024-04-09', B'0'),
(9, 'Courtyard Zurich', 'Zurich', 'Upscale', '2024-04-03', '2024-04-13', B'0'),
(10, 'Comfort Inn Bern', 'Bern', 'Midscale', '2024-04-04', '2024-04-16', B'0');
```
1. End the database session:
```bash
\q
```
## Step 2: Install and configure Toolbox
In this section, we will download Toolbox, configure our tools in a
`tools.yaml`, and then run the Toolbox server.
1. Download the latest version of Toolbox as a binary:
{{< notice tip >}}
Select the
[correct binary](https://github.com/googleapis/genai-toolbox/releases)
corresponding to your OS and CPU architecture.
{{< /notice >}}
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/$OS/toolbox
```
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Write the following into a `tools.yaml` file. Be sure to update any fields
such as `user`, `password`, or `database` that you may have customized in the
previous step.
{{< notice tip >}}
In practice, use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}
```yaml
sources:
my-pg-source:
kind: postgres
host: 127.0.0.1
port: 5432
database: toolbox_db
user: ${USER_NAME}
password: ${PASSWORD}
tools:
search-hotels-by-name:
kind: postgres-sql
source: my-pg-source
description: Search for hotels based on name.
parameters:
- name: name
type: string
description: The name of the hotel.
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
search-hotels-by-location:
kind: postgres-sql
source: my-pg-source
description: Search for hotels based on location.
parameters:
- name: location
type: string
description: The location of the hotel.
statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%';
book-hotel:
kind: postgres-sql
source: my-pg-source
description: >-
Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
parameters:
- name: hotel_id
type: string
description: The ID of the hotel to book.
statement: UPDATE hotels SET booked = B'1' WHERE id = $1;
update-hotel:
kind: postgres-sql
source: my-pg-source
description: >-
Update a hotel's check-in and check-out dates by its ID. Returns a message
indicating whether the hotel was successfully updated or not.
parameters:
- name: hotel_id
type: string
description: The ID of the hotel to update.
- name: checkin_date
type: string
description: The new check-in date of the hotel.
- name: checkout_date
type: string
description: The new check-out date of the hotel.
statement: >-
UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3
as date) WHERE id = $1;
cancel-hotel:
kind: postgres-sql
source: my-pg-source
description: Cancel a hotel by its ID.
parameters:
- name: hotel_id
type: string
description: The ID of the hotel to cancel.
statement: UPDATE hotels SET booked = B'0' WHERE id = $1;
toolsets:
my-toolset:
- search-hotels-by-name
- search-hotels-by-location
- book-hotel
- update-hotel
- cancel-hotel
```
For more info on tools, check out the `Resources` section of the docs.
1. Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
```bash
./toolbox --tools-file "tools.yaml"
```
{{< notice note >}}
Toolbox enables dynamic reloading by default. To disable, use the
`--disable-reload` flag.
{{< /notice >}}
## Step 3: Connect your agent to Toolbox
In this section, we will write and run an agent that will load the Tools
from Toolbox.
1. Initialize a go module:
```bash
go mod init main
```
1. In a new terminal, install the
[SDK](https://pkg.go.dev/github.com/googleapis/mcp-toolbox-sdk-go).
```bash
go get github.com/googleapis/mcp-toolbox-sdk-go
```
1. Create a new file named `hotelagent.go` and copy the following code to create
an agent:
{{< tabpane persist=header >}}
{{< tab header="LangChain Go" lang="go" >}}
package main
import (
"context"
"encoding/json"
"fmt"
"log"
"os"
"github.com/googleapis/mcp-toolbox-sdk-go/core"
"github.com/tmc/langchaingo/llms"
"github.com/tmc/langchaingo/llms/googleai"
)
// ConvertToLangchainTool converts a generic core.ToolboxTool into a LangChainGo llms.Tool.
func ConvertToLangchainTool(toolboxTool *core.ToolboxTool) llms.Tool {
// Fetch the tool's input schema
inputschema, err := toolboxTool.InputSchema()
if err != nil {
return llms.Tool{}
}
var paramsSchema map[string]any
_ = json.Unmarshal(inputschema, &paramsSchema)
// Convert into LangChain's llms.Tool
return llms.Tool{
Type: "function",
Function: &llms.FunctionDefinition{
Name: toolboxTool.Name(),
Description: toolboxTool.Description(),
Parameters: paramsSchema,
},
}
}
const systemPrompt = `
You're a helpful hotel assistant. You handle hotel searching, booking, and
cancellations. When the user searches for a hotel, mention its name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
`
var queries = []string{
"Find hotels in Basel with Basel in its name.",
"Can you book the hotel Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it.",
"Please book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
}
func main() {
genaiKey := os.Getenv("GOOGLE_API_KEY")
toolboxURL := "http://localhost:5000"
ctx := context.Background()
// Initialize the Google AI client (LLM).
llm, err := googleai.New(ctx, googleai.WithAPIKey(genaiKey), googleai.WithDefaultModel("gemini-1.5-flash"))
if err != nil {
log.Fatalf("Failed to create Google AI client: %v", err)
}
// Initialize the MCP Toolbox client.
toolboxClient, err := core.NewToolboxClient(toolboxURL)
if err != nil {
log.Fatalf("Failed to create Toolbox client: %v", err)
}
// Load the tool using the MCP Toolbox SDK.
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
if err != nil {
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
}
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
langchainTools := make([]llms.Tool, len(tools))
// Convert the loaded ToolboxTools into the format LangChainGo requires.
for i, tool := range tools {
langchainTools[i] = ConvertToLangchainTool(tool)
toolsMap[tool.Name()] = tool
}
// Start the conversation history.
messageHistory := []llms.MessageContent{
llms.TextParts(llms.ChatMessageTypeSystem, systemPrompt),
}
for _, query := range queries {
messageHistory = append(messageHistory, llms.TextParts(llms.ChatMessageTypeHuman, query))
// Make the first call to the LLM, making it aware of the tool.
resp, err := llm.GenerateContent(ctx, messageHistory, llms.WithTools(langchainTools))
if err != nil {
log.Fatalf("LLM call failed: %v", err)
}
respChoice := resp.Choices[0]
assistantResponse := llms.TextParts(llms.ChatMessageTypeAI, respChoice.Content)
for _, tc := range respChoice.ToolCalls {
assistantResponse.Parts = append(assistantResponse.Parts, tc)
}
messageHistory = append(messageHistory, assistantResponse)
// Process each tool call requested by the model.
for _, tc := range respChoice.ToolCalls {
toolName := tc.FunctionCall.Name
tool := toolsMap[toolName]
var args map[string]any
if err := json.Unmarshal([]byte(tc.FunctionCall.Arguments), &args); err != nil {
log.Fatalf("Failed to unmarshal arguments for tool '%s': %v", toolName, err)
}
toolResult, err := tool.Invoke(ctx, args)
if err != nil {
log.Fatalf("Failed to execute tool '%s': %v", toolName, err)
}
if toolResult == "" || toolResult == nil {
toolResult = "Operation completed successfully with no specific return value."
}
// Create the tool call response message and add it to the history.
toolResponse := llms.MessageContent{
Role: llms.ChatMessageTypeTool,
Parts: []llms.ContentPart{
llms.ToolCallResponse{
Name: toolName,
Content: fmt.Sprintf("%v", toolResult),
},
},
}
messageHistory = append(messageHistory, toolResponse)
}
finalResp, err := llm.GenerateContent(ctx, messageHistory)
if err != nil {
log.Fatalf("Final LLM call failed after tool execution: %v", err)
}
// Add the final textual response from the LLM to the history
messageHistory = append(messageHistory, llms.TextParts(llms.ChatMessageTypeAI, finalResp.Choices[0].Content))
fmt.Println(finalResp.Choices[0].Content)
}
}
{{< /tab >}}
{{< tab header="Genkit Go" lang="go" >}}
package main
import (
"context"
"fmt"
"log"
"github.com/googleapis/mcp-toolbox-sdk-go/core"
"github.com/googleapis/mcp-toolbox-sdk-go/tbgenkit"
"github.com/firebase/genkit/go/ai"
"github.com/firebase/genkit/go/genkit"
"github.com/firebase/genkit/go/plugins/googlegenai"
)
const systemPrompt = `
You're a helpful hotel assistant. You handle hotel searching, booking, and
cancellations. When the user searches for a hotel, mention its name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
`
var queries = []string{
"Find hotels in Basel with Basel in its name.",
"Can you book the hotel Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
}
func main() {
ctx := context.Background()
// Create Toolbox Client
toolboxClient, err := core.NewToolboxClient("http://127.0.0.1:5000")
if err != nil {
log.Fatalf("Failed to create Toolbox client: %v", err)
}
// Load the tools using the MCP Toolbox SDK.
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
if err != nil {
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
}
// Initialize Genkit
g, err := genkit.Init(ctx,
genkit.WithPlugins(&googlegenai.GoogleAI{}),
genkit.WithDefaultModel("googleai/gemini-1.5-flash"),
)
if err != nil {
log.Fatalf("Failed to init genkit: %v\n", err)
}
// Create a conversation history
conversationHistory := []*ai.Message{
ai.NewSystemTextMessage(systemPrompt),
}
// Convert your tool to a Genkit tool.
genkitTools := make([]ai.Tool, len(tools))
for i, tool := range tools {
newTool, err := tbgenkit.ToGenkitTool(tool, g)
if err != nil {
log.Fatalf("Failed to convert tool: %v\n", err)
}
genkitTools[i] = newTool
}
toolRefs := make([]ai.ToolRef, len(genkitTools))
for i, tool := range genkitTools {
toolRefs[i] = tool
}
for _, query := range queries {
conversationHistory = append(conversationHistory, ai.NewUserTextMessage(query))
response, err := genkit.Generate(ctx, g,
ai.WithMessages(conversationHistory...),
ai.WithTools(toolRefs...),
ai.WithReturnToolRequests(true),
)
if err != nil {
log.Fatalf("%v\n", err)
}
conversationHistory = append(conversationHistory, response.Message)
parts := []*ai.Part{}
for _, req := range response.ToolRequests() {
tool := genkit.LookupTool(g, req.Name)
if tool == nil {
log.Fatalf("tool %q not found", req.Name)
}
output, err := tool.RunRaw(ctx, req.Input)
if err != nil {
log.Fatalf("tool %q execution failed: %v", tool.Name(), err)
}
parts = append(parts,
ai.NewToolResponsePart(&ai.ToolResponse{
Name: req.Name,
Ref: req.Ref,
Output: output,
}))
}
if len(parts) > 0 {
resp, err := genkit.Generate(ctx, g,
ai.WithMessages(append(response.History(), ai.NewMessage(ai.RoleTool, nil, parts...))...),
ai.WithTools(toolRefs...),
)
if err != nil {
log.Fatal(err)
}
fmt.Println("\n", resp.Text())
conversationHistory = append(conversationHistory, resp.Message)
} else {
fmt.Println("\n", response.Text())
}
}
}
{{< /tab >}}
{{< tab header="Go GenAI" lang="go" >}}
package main
import (
"context"
"encoding/json"
"fmt"
"log"
"os"
"github.com/googleapis/mcp-toolbox-sdk-go/core"
"google.golang.org/genai"
)
// ConvertToGenaiTool translates a ToolboxTool into the genai.FunctionDeclaration format.
func ConvertToGenaiTool(toolboxTool *core.ToolboxTool) *genai.Tool {
inputschema, err := toolboxTool.InputSchema()
if err != nil {
return &genai.Tool{}
}
var paramsSchema *genai.Schema
_ = json.Unmarshal(inputschema, &paramsSchema)
// First, create the function declaration.
funcDeclaration := &genai.FunctionDeclaration{
Name: toolboxTool.Name(),
Description: toolboxTool.Description(),
Parameters: paramsSchema,
}
// Then, wrap the function declaration in a genai.Tool struct.
return &genai.Tool{
FunctionDeclarations: []*genai.FunctionDeclaration{funcDeclaration},
}
}
func printResponse(resp *genai.GenerateContentResponse) {
for _, cand := range resp.Candidates {
if cand.Content != nil {
for _, part := range cand.Content.Parts {
fmt.Println(part.Text)
}
}
}
}
const systemPrompt = `
You're a helpful hotel assistant. You handle hotel searching, booking, and
cancellations. When the user searches for a hotel, mention its name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
`
var queries = []string{
"Find hotels in Basel with Basel in its name.",
"Can you book the hotel Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it.",
"Please book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
}
func main() {
// Setup
ctx := context.Background()
apiKey := os.Getenv("GOOGLE_API_KEY")
toolboxURL := "http://localhost:5000"
// Initialize the Google GenAI client using the explicit ClientConfig.
client, err := genai.NewClient(ctx, &genai.ClientConfig{
APIKey: apiKey,
})
if err != nil {
log.Fatalf("Failed to create Google GenAI client: %v", err)
}
// Initialize the MCP Toolbox client.
toolboxClient, err := core.NewToolboxClient(toolboxURL)
if err != nil {
log.Fatalf("Failed to create Toolbox client: %v", err)
}
// Load the tool using the MCP Toolbox SDK.
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
if err != nil {
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
}
genAITools := make([]*genai.Tool, len(tools))
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
for i, tool := range tools {
genAITools[i] = ConvertToGenaiTool(tool)
toolsMap[tool.Name()] = tool
}
// Set up the generative model with the available tool.
modelName := "gemini-2.0-flash"
// Create the initial content prompt for the model.
messageHistory := []*genai.Content{
genai.NewContentFromText(systemPrompt, genai.RoleUser),
}
config := &genai.GenerateContentConfig{
Tools: genAITools,
ToolConfig: &genai.ToolConfig{
FunctionCallingConfig: &genai.FunctionCallingConfig{
Mode: genai.FunctionCallingConfigModeAny,
},
},
}
for _, query := range queries {
messageHistory = append(messageHistory, genai.NewContentFromText(query, genai.RoleUser))
genContentResp, err := client.Models.GenerateContent(ctx, modelName, messageHistory, config)
if err != nil {
log.Fatalf("LLM call failed for query '%s': %v", query, err)
}
if len(genContentResp.Candidates) > 0 && genContentResp.Candidates[0].Content != nil {
messageHistory = append(messageHistory, genContentResp.Candidates[0].Content)
}
functionCalls := genContentResp.FunctionCalls()
toolResponseParts := []*genai.Part{}
for _, fc := range functionCalls {
toolToInvoke, found := toolsMap[fc.Name]
if !found {
log.Fatalf("Tool '%s' not found in loaded tools map. Check toolset configuration.", fc.Name)
}
toolResult, invokeErr := toolToInvoke.Invoke(ctx, fc.Args)
if invokeErr != nil {
log.Fatalf("Failed to execute tool '%s': %v", fc.Name, invokeErr)
}
// Enhanced Tool Result Handling (retained to prevent nil issues)
toolResultString := ""
if toolResult != nil {
jsonBytes, marshalErr := json.Marshal(toolResult)
if marshalErr == nil {
toolResultString = string(jsonBytes)
} else {
toolResultString = fmt.Sprintf("%v", toolResult)
}
}
responseMap := map[string]any{"result": toolResultString}
toolResponseParts = append(toolResponseParts, genai.NewPartFromFunctionResponse(fc.Name, responseMap))
}
// Add all accumulated tool responses for this turn to the message history.
toolResponseContent := genai.NewContentFromParts(toolResponseParts, "function")
messageHistory = append(messageHistory, toolResponseContent)
finalResponse, err := client.Models.GenerateContent(ctx, modelName, messageHistory, &genai.GenerateContentConfig{})
if err != nil {
log.Fatalf("Error calling GenerateContent (with function result): %v", err)
}
printResponse(finalResponse)
// Add the final textual response from the LLM to the history
if len(finalResponse.Candidates) > 0 && finalResponse.Candidates[0].Content != nil {
messageHistory = append(messageHistory, finalResponse.Candidates[0].Content)
}
}
}
{{< /tab >}}
{{< tab header="OpenAI Go" lang="go" >}}
package main
import (
"context"
"encoding/json"
"log"
"github.com/googleapis/mcp-toolbox-sdk-go/core"
openai "github.com/openai/openai-go"
)
// ConvertToOpenAITool converts a ToolboxTool into the go-openai library's Tool format.
func ConvertToOpenAITool(toolboxTool *core.ToolboxTool) openai.ChatCompletionToolParam {
// Get the input schema
jsonSchemaBytes, err := toolboxTool.InputSchema()
if err != nil {
return openai.ChatCompletionToolParam{}
}
// Unmarshal the JSON bytes into FunctionParameters
var paramsSchema openai.FunctionParameters
if err := json.Unmarshal(jsonSchemaBytes, &paramsSchema); err != nil {
return openai.ChatCompletionToolParam{}
}
// Create and return the final tool parameter struct.
return openai.ChatCompletionToolParam{
Function: openai.FunctionDefinitionParam{
Name: toolboxTool.Name(),
Description: openai.String(toolboxTool.Description()),
Parameters: paramsSchema,
},
}
}
const systemPrompt = `
You're a helpful hotel assistant. You handle hotel searching, booking, and
cancellations. When the user searches for a hotel, mention its name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
`
var queries = []string{
"Find hotels in Basel with Basel in its name.",
"Can you book the hotel Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
}
func main() {
// Setup
ctx := context.Background()
toolboxURL := "http://localhost:5000"
openAIClient := openai.NewClient()
// Initialize the MCP Toolbox client.
toolboxClient, err := core.NewToolboxClient(toolboxURL)
if err != nil {
log.Fatalf("Failed to create Toolbox client: %v", err)
}
// Load the tools using the MCP Toolbox SDK.
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
if err != nil {
log.Fatalf("Failed to load tool : %v\nMake sure your Toolbox server is running and the tool is configured.", err)
}
openAITools := make([]openai.ChatCompletionToolParam, len(tools))
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
for i, tool := range tools {
// Convert the Toolbox tool into the openAI FunctionDeclaration format.
openAITools[i] = ConvertToOpenAITool(tool)
// Add tool to a map for lookup later
toolsMap[tool.Name()] = tool
}
params := openai.ChatCompletionNewParams{
Messages: []openai.ChatCompletionMessageParamUnion{
openai.SystemMessage(systemPrompt),
},
Tools: openAITools,
Seed: openai.Int(0),
Model: openai.ChatModelGPT4o,
}
for _, query := range queries {
params.Messages = append(params.Messages, openai.UserMessage(query))
// Make initial chat completion request
completion, err := openAIClient.Chat.Completions.New(ctx, params)
if err != nil {
panic(err)
}
toolCalls := completion.Choices[0].Message.ToolCalls
// Return early if there are no tool calls
if len(toolCalls) == 0 {
log.Println("No function call")
}
// If there is a was a function call, continue the conversation
params.Messages = append(params.Messages, completion.Choices[0].Message.ToParam())
for _, toolCall := range toolCalls {
toolName := toolCall.Function.Name
toolToInvoke := toolsMap[toolName]
var args map[string]any
err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args)
if err != nil {
panic(err)
}
result, err := toolToInvoke.Invoke(ctx, args)
if err != nil {
log.Fatal("Could not invoke tool", err)
}
params.Messages = append(params.Messages, openai.ToolMessage(result.(string), toolCall.ID))
}
completion, err = openAIClient.Chat.Completions.New(ctx, params)
if err != nil {
panic(err)
}
params.Messages = append(params.Messages, openai.AssistantMessage(query))
println("\n", completion.Choices[0].Message.Content)
}
}
{{< /tab >}}
{{< /tabpane >}}
1. Ensure all dependencies are installed:
```sh
go mod tidy
```
1. Run your agent, and observe the results:
```sh
go run hotelagent.go
```
{{< notice info >}}
For more information, visit the [Go SDK
repo](https://github.com/googleapis/mcp-toolbox-sdk-go).
{{</ notice >}}

View File

@@ -3,7 +3,7 @@ title: "JS Quickstart (Local)"
type: docs
weight: 3
description: >
How to get started running Toolbox locally with [JavaScript](https://github.com/googleapis/mcp-toolbox-sdk-js), PostgreSQL, and orchestration frameworks such as [LangChain](https://js.langchain.com/docs/introduction/), [GenkitJS](https://genkit.dev/docs/get-started/), and [LlamaIndex](https://ts.llamaindex.ai/).
How to get started running Toolbox locally with [JavaScript](https://github.com/googleapis/mcp-toolbox-sdk-python), PostgreSQL, and orchestration frameworks such as [LangChain](https://js.langchain.com/docs/introduction/) and [GenkitJS](https://genkit.dev/docs/get-started/).
---
## Before you begin
@@ -15,8 +15,7 @@ This guide assumes you have already done the following:
### Cloud Setup (Optional)
If you plan to use **Google Clouds Vertex AI** with your agent (e.g., using
Gemini or PaLM models), follow these one-time setup steps:
If you plan to use **Google Clouds Vertex AI** with your agent (e.g., using Gemini or PaLM models), follow these one-time setup steps:
1. [Install the Google Cloud CLI]
1. [Set up Application Default Credentials (ADC)]
@@ -30,8 +29,8 @@ Gemini or PaLM models), follow these one-time setup steps:
[Node.js (v18 or higher)]: https://nodejs.org/
[install-postgres]: https://www.postgresql.org/download/
[Install the Google Cloud CLI]: https://cloud.google.com/sdk/docs/install
[Set up Application Default Credentials (ADC)]:
https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment
[Set up Application Default Credentials (ADC)]: https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment
## Step 1: Set up your database
@@ -133,7 +132,7 @@ postgres` and a password next time.
```sql
INSERT INTO hotels(id, name, location, price_tier, checkin_date, checkout_date, booked)
VALUES
VALUES
(1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-22', '2024-04-20', B'0'),
(2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', B'0'),
(3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', B'0'),
@@ -167,7 +166,7 @@ In this section, we will download Toolbox, configure our tools in a
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/$OS/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.9.0/$OS/toolbox
```
<!-- {x-release-please-end} -->
@@ -268,7 +267,6 @@ In this section, we will download Toolbox, configure our tools in a
```bash
./toolbox --tools-file "tools.yaml"
```
{{< notice note >}}
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
{{< /notice >}}
@@ -287,21 +285,18 @@ from Toolbox.
1. In a new terminal, install the [SDK](https://www.npmjs.com/package/@toolbox-sdk/core).
```bash
npm install @toolbox-sdk/core
npm install langchain @toolbox-sdk/core
```
1. Install other required dependencies
{{< tabpane persist=header >}}
{{< tab header="LangChain" lang="bash" >}}
npm install langchain @langchain/google-genai
npm install langchain @langchain/google-vertexai
{{< /tab >}}
{{< tab header="GenkitJS" lang="bash" >}}
npm install genkit @genkit-ai/vertexai
{{< /tab >}}
{{< tab header="LlamaIndex" lang="bash" >}}
npm install llamaindex @llamaindex/google @llamaindex/workflow
{{< /tab >}}
{{< /tabpane >}}
1. Create a new file named `hotelAgent.js` and copy the following code to create an agent:
@@ -309,7 +304,7 @@ npm install llamaindex @llamaindex/google @llamaindex/workflow
{{< tabpane persist=header >}}
{{< tab header="LangChain" lang="js" >}}
import { ChatGoogleGenerativeAI } from "@langchain/google-genai";
import { ChatVertexAI } from "@langchain/google-vertexai";
import { ToolboxClient } from "@toolbox-sdk/core";
import { tool } from "@langchain/core/tools";
import { createReactAgent } from "@langchain/langgraph/prebuilt";
@@ -336,10 +331,11 @@ const queries = [
];
async function runApplication() {
const model = new ChatGoogleGenerativeAI({
const model = new ChatVertexAI({
model: "gemini-2.0-flash",
});
const client = new ToolboxClient("http://127.0.0.1:5000");
const toolboxTools = await client.loadToolset("my-toolset");
@@ -364,6 +360,7 @@ async function runApplication() {
},
};
for (const query of queries) {
const agentOutput = await agent.invoke(
{
@@ -466,105 +463,20 @@ async function run() {
}
})
);
conversationHistory.push(...toolResponses);
// Call the AI again with the tool results.
response = await ai.generate({ messages: conversationHistory, tools });
conversationHistory.push(response.message);
}
console.log(response.text);
}
}
run();
{{< /tab >}}
{{< tab header="LlamaIndex" lang="js" >}}
import { gemini, GEMINI_MODEL } from "@llamaindex/google";
import { agent } from "@llamaindex/workflow";
import { createMemory, staticBlock, tool } from "llamaindex";
import { ToolboxClient } from "@toolbox-sdk/core";
const TOOLBOX_URL = "http://127.0.0.1:5000"; // Update if needed
process.env.GOOGLE_API_KEY = 'your-api-key'; // Replace it with your API key
const prompt = `
You're a helpful hotel assistant. You handle hotel searching, booking and cancellations.
When the user searches for a hotel, mention its name, id, location and price tier.
Always mention hotel ids while performing any searches — this is very important for operations.
For any bookings or cancellations, please provide the appropriate confirmation.
Update check-in or check-out dates if mentioned by the user.
Don't ask for confirmations from the user.
`;
const queries = [
"Find hotels in Basel with Basel in its name.",
"Can you book the Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
];
async function main() {
// Connect to MCP Toolbox
const client = new ToolboxClient(TOOLBOX_URL);
const toolboxTools = await client.loadToolset("my-toolset");
const tools = toolboxTools.map((toolboxTool) => {
return tool({
name: toolboxTool.getName(),
description: toolboxTool.getDescription(),
parameters: toolboxTool.getParamSchema(),
execute: toolboxTool,
});
});
// Initialize LLM
const llm = gemini({
model: GEMINI_MODEL.GEMINI_2_0_FLASH,
apiKey: process.env.GOOGLE_API_KEY,
});
const memory = createMemory({
memoryBlocks: [
staticBlock({
content: prompt,
}),
],
});
// Create the Agent
const myAgent = agent({
tools: tools,
llm,
memory,
systemPrompt: prompt,
});
for (const query of queries) {
const result = await myAgent.run(query);
const output = result.data.result;
console.log(`\nUser: ${query}`);
if (typeof output === "string") {
console.log(output.trim());
} else if (typeof output === "object" && "text" in output) {
console.log(output.text.trim());
} else {
console.log(JSON.stringify(output));
}
}
//You may observe some extra logs during execution due to the run method provided by Llama.
console.log("Agent run finished.");
}
main();
{{< /tab >}}
{{< /tabpane >}}
1. Run your agent, and observe the results:
@@ -575,4 +487,4 @@ main();
{{< notice info >}}
For more information, visit the [JS SDK repo](https://github.com/googleapis/mcp-toolbox-sdk-js).
{{</ notice >}}
{{</ notice >}}

View File

@@ -1,9 +1,9 @@
---
title: "Quickstart (MCP)"
type: docs
weight: 5
weight: 3
description: >
How to get started running Toolbox locally with MCP Inspector.
How to get started running Toolbox locally with MCP Inspector.
---
## Overview
@@ -71,7 +71,7 @@ access by our agent, and create a database user for Toolbox to connect with.
```sql
INSERT INTO hotels(id, name, location, price_tier, checkin_date, checkout_date, booked)
VALUES
VALUES
(1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-22', '2024-04-20', B'0'),
(2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', B'0'),
(3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', B'0'),
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/$OS/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.9.0/$OS/toolbox
```
<!-- {x-release-please-end} -->
@@ -200,7 +200,7 @@ In this section, we will download Toolbox, configure our tools in a
```
For more info on tools, check out the
[Tools](../../resources/tools/) section.
[Tools](../../resources/tools/_index.md) section.
1. Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
@@ -218,8 +218,7 @@ In this section, we will download Toolbox, configure our tools in a
1. Type `y` when it asks to install the inspector package.
1. It should show the following when the MCP Inspector is up and running (please
take note of `<YOUR_SESSION_TOKEN>`):
1. It should show the following when the MCP Inspector is up and running (please take note of <YOUR_SESSION_TOKEN>):
```bash
Starting MCP inspector...
@@ -233,12 +232,11 @@ In this section, we will download Toolbox, configure our tools in a
1. Open the above link in your browser.
1. For `Transport Type`, select `Streamable HTTP`.
1. For `Transport Type`, select `SSE`.
1. For `URL`, type in `http://127.0.0.1:5000/mcp`.
1. For `URL`, type in `http://127.0.0.1:5000/mcp/sse`.
1. For `Configuration` -> `Proxy Session Token`, make sure
`<YOUR_SESSION_TOKEN>` is present.
1. For `Authentication` -> `Proxy Session Token`, make sure <YOUR_SESSION_TOKEN> is present.
1. Click Connect.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 32 KiB

After

Width:  |  Height:  |  Size: 22 KiB

View File

@@ -1,14 +0,0 @@
---
title: "AlloyDB Admin API using MCP"
type: docs
weight: 2
description: >
Create your AlloyDB database with MCP Toolbox.
---
<html>
<head>
<link rel="canonical" href="https://cloud.google.com/alloydb/docs/create-database-with-mcp-toolbox"/>
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/alloydb/docs/create-database-with-mcp-toolbox"/>
</head>
</html>

View File

@@ -7,7 +7,7 @@ description: >
---
<html>
<head>
<link rel="canonical" href="https://cloud.google.com/alloydb/docs/connect-ide-using-mcp-toolbox"/>
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/alloydb/docs/connect-ide-using-mcp-toolbox"/>
<link rel="canonical" href="https://cloud.google.com/alloydb/docs/pre-built-tools-with-mcp-toolbox"/>
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/alloydb/docs/pre-built-tools-with-mcp-toolbox"/>
</head>
</html>

View File

@@ -1,333 +0,0 @@
---
title: "Firestore using MCP"
type: docs
weight: 2
description: >
Connect your IDE to Firestore using Toolbox.
---
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is
an open protocol for connecting Large Language Models (LLMs) to data sources
like Firestore. This guide covers how to use [MCP Toolbox for Databases][toolbox]
to expose your developer assistant tools to a Firestore instance:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
* [Visual Studio Code][vscode] (Copilot)
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
* [Gemini CLI][geminicli]
* [Gemini Code Assist][geminicodeassist]
[toolbox]: https://github.com/googleapis/genai-toolbox
[cursor]: #configure-your-mcp-client
[windsurf]: #configure-your-mcp-client
[vscode]: #configure-your-mcp-client
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
[geminicli]: #configure-your-mcp-client
[geminicodeassist]: #configure-your-mcp-client
## Set up Firestore
1. Create or select a Google Cloud project.
* [Create a new
project](https://cloud.google.com/resource-manager/docs/creating-managing-projects)
* [Select an existing
project](https://cloud.google.com/resource-manager/docs/creating-managing-projects#identifying_projects)
1. [Enable the Firestore
API](https://console.cloud.google.com/apis/library/firestore.googleapis.com)
for your project.
1. [Create a Firestore
database](https://cloud.google.com/firestore/docs/create-database-web-mobile-client-library)
if you haven't already.
1. Set up authentication for your local environment.
* [Install gcloud CLI](https://cloud.google.com/sdk/docs/install)
* Run `gcloud auth application-default login` to authenticate
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
to your OS and CPU architecture. You are required to use Toolbox version
V0.10.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/windows/amd64/toolbox
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Verify the installation:
```bash
./toolbox --version
```
## Configure your MCP Client
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"firestore": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","firestore","--stdio"],
"env": {
"FIRESTORE_PROJECT": "your-project-id",
"FIRESTORE_DATABASE": "(default)"
}
}
}
}
```
1. Restart Claude code to apply the new configuration.
{{% /tab %}}
{{% tab header="Claude desktop" lang="en" %}}
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"firestore": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","firestore","--stdio"],
"env": {
"FIRESTORE_PROJECT": "your-project-id",
"FIRESTORE_DATABASE": "(default)"
}
}
}
}
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap
the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"firestore": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","firestore","--stdio"],
"env": {
"FIRESTORE_PROJECT": "your-project-id",
"FIRESTORE_DATABASE": "(default)"
}
}
}
}
```
1. You should see a green active status after the server is successfully
connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}
1. Create a `.cursor` directory in your project root if it doesn't exist.
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"firestore": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","firestore","--stdio"],
"env": {
"FIRESTORE_PROJECT": "your-project-id",
"FIRESTORE_DATABASE": "(default)"
}
}
}
}
```
1. [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
Settings > MCP**. You should see a green active status after the server is
successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"firestore": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","firestore","--stdio"],
"env": {
"FIRESTORE_PROJECT": "your-project-id",
"FIRESTORE_DATABASE": "(default)"
}
}
}
}
```
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"firestore": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","firestore","--stdio"],
"env": {
"FIRESTORE_PROJECT": "your-project-id",
"FIRESTORE_DATABASE": "(default)"
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini
CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it, create
a `settings.json` file.
1. Add the following configuration, replace the environment variables with your
values, and then save:
```json
{
"mcpServers": {
"firestore": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","firestore","--stdio"],
"env": {
"FIRESTORE_PROJECT": "your-project-id",
"FIRESTORE_DATABASE": "(default)"
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code
Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist)
extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it, create
a `settings.json` file.
1. Add the following configuration, replace the environment variables with your
values, and then save:
```json
{
"mcpServers": {
"firestore": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","firestore","--stdio"],
"env": {
"FIRESTORE_PROJECT": "your-project-id",
"FIRESTORE_DATABASE": "(default)"
}
}
}
}
```
{{% /tab %}}
{{< /tabpane >}}
## Use Tools
Your AI tool is now connected to Firestore using MCP. Try asking your AI
assistant to list collections, get documents, query collections, or manage
security rules.
The following tools are available to the LLM:
1. **firestore-get-documents**: Gets multiple documents from Firestore by their
paths
1. **firestore-list-collections**: List Firestore collections for a given parent
path
1. **firestore-delete-documents**: Delete multiple documents from Firestore
1. **firestore-query-collection**: Query documents from a collection with
filtering, ordering, and limit options
1. **firestore-get-rules**: Retrieves the active Firestore security rules for
the current project
1. **firestore-validate-rules**: Validates Firestore security rules syntax and
errors
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}

View File

@@ -1,276 +0,0 @@
---
title: "Looker using MCP"
type: docs
weight: 2
description: >
Connect your IDE to Looker using Toolbox.
---
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is
an open protocol for connecting Large Language Models (LLMs) to data sources
like Postgres. This guide covers how to use [MCP Toolbox for Databases][toolbox]
to expose your developer assistant tools to a Looker instance:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
* [Visual Studio Code][vscode] (Copilot)
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
[toolbox]: https://github.com/googleapis/genai-toolbox
[cursor]: #configure-your-mcp-client
[windsurf]: #configure-your-mcp-client
[vscode]: #configure-your-mcp-client
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
## Set up Looker
1. Get a Looker Client ID and Client Secret. Follow the directions
[here](https://cloud.google.com/looker/docs/api-auth#authentication_with_an_sdk).
1. Have the base URL of your Looker instance available. It is likely
something like `https://looker.example.com`. In some cases the API is
listening at a different port, and you will need to use
`https://looker.example.com:19999` instead.
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
to your OS and CPU architecture. You are required to use Toolbox version
v0.10.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Verify the installation:
```bash
./toolbox --version
```
## Configure your MCP Client
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"looker-toolbox": {
"command": "./PATH/TO/toolbox",
"args": ["--stdio", "--prebuilt", "looker"],
"env": {
"LOOKER_BASE_URL": "https://looker.example.com",
"LOOKER_CLIENT_ID": "",
"LOOKER_CLIENT_SECRET": "",
"LOOKER_VERIFY_SSL": "true"
}
}
}
}
```
1. Restart Claude Code to apply the new configuration.
{{% /tab %}}
{{% tab header="Claude desktop" lang="en" %}}
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"looker-toolbox": {
"command": "./PATH/TO/toolbox",
"args": ["--stdio", "--prebuilt", "looker"],
"env": {
"LOOKER_BASE_URL": "https://looker.example.com",
"LOOKER_CLIENT_ID": "",
"LOOKER_CLIENT_SECRET": "",
"LOOKER_VERIFY_SSL": "true"
}
}
}
}
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap
the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"looker-toolbox": {
"command": "./PATH/TO/toolbox",
"args": ["--stdio", "--prebuilt", "looker"],
"env": {
"LOOKER_BASE_URL": "https://looker.example.com",
"LOOKER_CLIENT_ID": "",
"LOOKER_CLIENT_SECRET": "",
"LOOKER_VERIFY_SSL": "true"
}
}
}
}
```
1. You should see a green active status after the server is successfully
connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}
1. Create a `.cursor` directory in your project root if it doesn't exist.
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"looker-toolbox": {
"command": "./PATH/TO/toolbox",
"args": ["--stdio", "--prebuilt", "looker"],
"env": {
"LOOKER_BASE_URL": "https://looker.example.com",
"LOOKER_CLIENT_ID": "",
"LOOKER_CLIENT_SECRET": "",
"LOOKER_VERIFY_SSL": "true"
}
}
}
}
```
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
Settings > MCP**. You should see a green active status after the server is
successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"looker-toolbox": {
"command": "./PATH/TO/toolbox",
"args": ["--stdio", "--prebuilt", "looker"],
"env": {
"LOOKER_BASE_URL": "https://looker.example.com",
"LOOKER_CLIENT_ID": "",
"LOOKER_CLIENT_SECRET": "",
"LOOKER_VERIFY_SSL": "true"
}
}
}
}
```
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"looker-toolbox": {
"command": "./PATH/TO/toolbox",
"args": ["--stdio", "--prebuilt", "looker"],
"env": {
"LOOKER_BASE_URL": "https://looker.example.com",
"LOOKER_CLIENT_ID": "",
"LOOKER_CLIENT_SECRET": "",
"LOOKER_VERIFY_SSL": "true"
}
}
}
}
```
{{% /tab %}}
{{< /tabpane >}}
## Use Tools
Your AI tool is now connected to Looker using MCP. Try asking your AI
assistant to list models, explores, dimensions, and measures. Run a
query, retrieve the SQL for a query, and run a saved Look.
The following tools are available to the LLM:
1. **get_models**: list the LookML models in Looker
1. **get_explores**: list the explores in a given model
1. **get_dimensions**: list the dimensions in a given explore
1. **get_measures**: list the measures in a given explore
1. **get_filters**: list the filters in a given explore
1. **get_parameters**: list the parameters in a given explore
1. **query**: Run a query and return the data
1. **query_sql**: Return the SQL generated by Looker for a query
1. **query_url**: Return a link to the query in Looker for further exploration
1. **get_looks**: Return the saved Looks that match a title or description
1. **run_look**: Run a saved Look and return the data
1. **make_look**: Create a saved Look in Looker and return the URL
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}

View File

@@ -1,289 +0,0 @@
---
title: SQL Server using MCP
type: docs
weight: 2
description: "Connect your IDE to SQL Server using Toolbox."
---
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like SQL Server. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a SQL Server instance:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
* [Visual Studio Code][vscode] (Copilot)
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
* [Gemini CLI][geminicli]
* [Gemini Code Assist][geminicodeassist]
[toolbox]: https://github.com/googleapis/genai-toolbox
[cursor]: #configure-your-mcp-client
[windsurf]: #configure-your-mcp-client
[vscode]: #configure-your-mcp-client
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
[geminicli]: #configure-your-mcp-client
[geminicodeassist]: #configure-your-mcp-client
## Set up the database
1. [Create or select a SQL Server instance.](https://www.microsoft.com/en-us/sql-server/sql-server-downloads)
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version V0.10.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Verify the installation:
```bash
./toolbox --version
```
## Configure your MCP Client
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
1. Restart Claude code to apply the new configuration.
{{% /tab %}}
{{% tab header="Claude desktop" lang="en" %}}
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
1. You should see a green active status after the server is successfully connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}
1. Create a `.cursor` directory in your project root if it doesn't exist.
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcp" : {
"servers": {
"cloud-sql-sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
}
```
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{< /tabpane >}}
## Use Tools
Your AI tool is now connected to SQL Server using MCP. Try asking your AI assistant to list tables, create a table, or define and execute other SQL statements.
The following tools are available to the LLM:
1. **list_tables**: lists tables and descriptions
1. **execute_sql**: execute any SQL statement
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}

View File

@@ -1,287 +0,0 @@
---
title: MySQL using MCP
type: docs
weight: 2
description: "Connect your IDE to MySQL using Toolbox."
---
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like MySQL. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a MySQL instance:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
* [Visual Studio Code][vscode] (Copilot)
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
* [Gemini CLI][geminicli]
* [Gemini Code Assist][geminicodeassist]
[toolbox]: https://github.com/googleapis/genai-toolbox
[cursor]: #configure-your-mcp-client
[windsurf]: #configure-your-mcp-client
[vscode]: #configure-your-mcp-client
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
[geminicli]: #configure-your-mcp-client
[geminicodeassist]: #configure-your-mcp-client
## Set up the database
1. [Create or select a MySQL instance.](https://dev.mysql.com/downloads/installer/)
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version V0.10.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Verify the installation:
```bash
./toolbox --version
```
## Configure your MCP Client
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt", "mysql", "--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
1. Restart Claude code to apply the new configuration.
{{% /tab %}}
{{% tab header="Claude desktop" lang="en" %}}
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt", "mysql", "--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt", "mysql", "--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
1. You should see a green active status after the server is successfully connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}
1. Create a `.cursor` directory in your project root if it doesn't exist.
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt", "mysql", "--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mysql","--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mysql","--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mysql","--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mysql","--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{< /tabpane >}}
## Use Tools
Your AI tool is now connected to MySQL using MCP. Try asking your AI assistant to list tables, create a table, or define and execute other SQL statements.
The following tools are available to the LLM:
1. **list_tables**: lists tables and descriptions
1. **execute_sql**: execute any SQL statement
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}

View File

@@ -52,19 +52,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/linux/amd64/toolbox
curl -O <https://storage.googleapis.com/genai-toolbox/v0.9.0/linux/amd64/toolbox>
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/arm64/toolbox
curl -O <https://storage.googleapis.com/genai-toolbox/v0.9.0/darwin/arm64/toolbox>
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/amd64/toolbox
curl -O <https://storage.googleapis.com/genai-toolbox/v0.9.0/darwin/amd64/toolbox>
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/windows/amd64/toolbox.exe
curl -O <https://storage.googleapis.com/genai-toolbox/v0.9.0/windows/amd64/toolbox>
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -28,9 +28,8 @@ Toolbox currently supports the following versions of MCP specification:
The auth implementation in Toolbox is not supported in MCP's auth specification.
This includes:
* [Authenticated Parameters](../resources/tools/#authenticated-parameters)
* [Authorized Invocations](../resources/tools/#authorized-invocations)
* [Authenticated Parameters](../resources/tools/_index.md#authenticated-parameters)
* [Authorized Invocations](../resources/tools/_index.md#authorized-invocations)
## Connecting to Toolbox with an MCP client
@@ -40,7 +39,7 @@ This includes:
MCP is only compatible with Toolbox version 0.3.0 and above.
{{< /notice >}}
1. [Install](../getting-started/introduction/#installing-the-server)
1. [Install](../getting-started/introduction/_index.md#installing-the-server)
Toolbox version 0.3.0+.
1. Make sure you've set up and initialized your database.
@@ -63,15 +62,14 @@ remote HTTP server. Logs will be set to the `warn` level by default. `debug` and
`info` logs are not supported with stdio.
{{< notice note >}}
Toolbox enables dynamic reloading by default. To disable, use the
`--disable-reload` flag.
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
{{< /notice >}}
### Connecting via HTTP
Toolbox supports the HTTP transport protocol with and without SSE.
{{< tabpane text=true >}} {{% tab header="HTTP with SSE (deprecated)" lang="en" %}}
{{< tabpane text=true >}} {{% tab header="HTTP with SSE" lang="en" %}}
Add the following configuration to your MCP client configuration:
```bash
@@ -132,8 +130,8 @@ testing and debugging Toolbox server.
1. Click the `Connect` button. It might take awhile to spin up Toolbox. Voila!
You should be able to inspect your toolbox tools!
{{% /tab %}}
{{% tab header="HTTP with SSE (deprecated)" lang="en" %}}
1. [Run Toolbox](../getting-started/introduction/#running-the-server).
{{% tab header="HTTP with SSE" lang="en" %}}
1. [Run Toolbox](../getting-started/introduction/_index.md#running-the-server).
1. In a separate terminal, run Inspector directly through `npx`:
@@ -150,7 +148,7 @@ testing and debugging Toolbox server.
tools!
{{% /tab %}}
{{% tab header="Streamable HTTP" lang="en" %}}
1. [Run Toolbox](../getting-started/introduction/#running-the-server).
1. [Run Toolbox](../getting-started/introduction/_index.md#running-the-server).
1. In a separate terminal, run Inspector directly through `npx`:

View File

@@ -79,7 +79,7 @@ database are in the same VPC network.
Create a `tools.yaml` file that contains your configuration for Toolbox. For
details, see the
[configuration](../resources/sources/)
[configuration](https://googleapis.github.io/genai-toolbox/resources/sources/)
section.
## Deploy to Cloud Run
@@ -125,7 +125,7 @@ section.
--region us-central1 \
--set-secrets "/app/tools.yaml=tools:latest" \
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080" \
# TODO(dev): update the following to match your VPC if necessary
# TODO(dev): update the following to match your VPC if necessary
--network default \
--subnet default
# --allow-unauthenticated # https://cloud.google.com/run/docs/authenticating/public#gcloud
@@ -133,16 +133,22 @@ section.
## Connecting with Toolbox Client SDK
You can connect to Toolbox Cloud Run instances directly through the SDK.
You can connect to Toolbox Cloud Run instances directly through the SDK
1. [Set up `Cloud Run Invoker` role
access](https://cloud.google.com/run/docs/securing/managing-access#service-add-principals)
to your Cloud Run service.
1. (Only for local runs) Set up [Application Default
1. Set up [Application Default
Credentials](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment)
for the principle you set up the `Cloud Run Invoker` role access to.
{{< notice tip >}}
If you're working in some other environment than local, set up [environment
specific Default
Credentials](https://cloud.google.com/docs/authentication/provide-credentials-adc).
{{< /notice >}}
1. Run the following to retrieve a non-deterministic URL for the cloud run service:
```bash
@@ -154,11 +160,9 @@ You can connect to Toolbox Cloud Run instances directly through the SDK.
```python
from toolbox_core import ToolboxClient, auth_methods
auth_token_provider = auth_methods.aget_google_id_token # can also use sync method
# Replace with the Cloud Run service URL generated in the previous step.
URL = "https://cloud-run-url.app"
auth_token_provider = auth_methods.aget_google_id_token(URL) # can also use sync method
async with ToolboxClient(
URL,
client_headers={"Authorization": auth_token_provider},

Binary file not shown.

Before

Width:  |  Height:  |  Size: 36 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 298 KiB

View File

@@ -1,106 +0,0 @@
---
title: "Toolbox UI"
type: docs
weight: 1
description: >
How to effectively use Toolbox UI.
---
Toolbox UI is a built-in web interface that allows users to visually inspect and test out configured resources such as tools and toolsets.
## Launching Toolbox UI
To launch Toolbox's interactive UI, use the `--ui` flag.
```sh
./toolbox --ui
```
Toolbox UI will be served from the same host and port as the Toolbox Server, with the `/ui` suffix. Once Toolbox
is launched, the following INFO log with Toolbox UI's url will be shown:
```bash
INFO "Toolbox UI is up and running at: http://localhost:5000/ui"
```
## Navigating the Tools Page
The tools page shows all tools loaded from your configuration file. This corresponds to the default toolset (represented by an empty string). Each tool's name on this page will exactly match its name in the configuration
file.
To view details for a specific tool, click on the tool name. The main content area will be populated
with the tool name, description, and available parameters.
![Tools Page](./tools.png)
### Invoking a Tool
1. Click on a Tool
2. Enter appropriate parameters in each parameter field
3. Click "Run Tool"
4. Done! Your results will appear in the response field
5. (Optional) Uncheck "Prettify JSON" to format the response as plain text
![Run Tool Demo GIF](./run-tool.gif)
### Optional Parameters
Toolbox allows users to add [optional parameters](../../resources/tools/#basic-parameters) with or without a default value.
To exclude a parameter, uncheck the box to the right of an associated parameter, and that parameter will not be
included in the request body. If the parameter is not sent, Toolbox will either use it as `nil` value or the `default` value, if configured. If the parameter is required, Toolbox will throw an error.
When the box is checked, parameter will be sent exactly as entered in the response field (e.g. empty string).
![Optional Parameter checked example](./optional-param-checked.png)
![Optional Parameter unchecked example](./optional-param-unchecked.png)
### Editing Headers
To edit headers, press the "Edit Headers" button to display the header modal. Within this modal,
users can make direct edits by typing into the header's text area.
Toolbox UI validates that the headers are in correct JSON format. Other header-related errors (e.g.,
incorrect header names or values required by the tool) will be reported in the Response section
after running the tool.
![Edit Headers](./edit-headers.png)
#### Google OAuth
Currently, Toolbox supports Google OAuth 2.0 as an AuthService, which allows tools to utilize
authorized parameters. When a tool uses an authorized parameter, the parameter will be displayed
but not editable, as it will be populated from the authentication token.
To provide the token, add your Google OAuth ID Token to the request header using the "Edit Headers"
button and modal described above. The key should be the name of your AuthService as defined in
your tool configuration file, suffixed with `_token`. The value should be your ID token as a string.
1. Select a tool that requires [authenticated parameters]()
2. The auth parameter's text field is greyed out. This is because it cannot be entered manually and will
be parsed from the resolved auth token
3. To update request headers with the token, select "Edit Headers"
4. Checkout the dropdown "How to extract Google OAuth ID Token manually" for guidance on retrieving ID token
5. Paste the request header
6. Click "Save"
7. Click "Run Tool"
```json
{
"Content-Type": "application/json",
"my-google-auth_token": "YOUR_ID_TOKEN_HERE"
}
```
![Using Authenticated Parameter GIF](./edit-headers.gif)
## Navigating the Toolsets Page
Through the toolsets page, users can search for a specific toolset to retrieve tools from. Simply
enter the toolset name in the search bar, and press "Enter" to retrieve the associated tools.
If the toolset name is not defined within the tools configuration file, an error message will be
displayed.
![Toolsets Page](./toolsets.png)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.4 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 269 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 136 KiB

View File

@@ -3,11 +3,11 @@ title: "AuthServices"
type: docs
weight: 1
description: >
AuthServices represent services that handle authentication and authorization.
AuthServices represent services that handle authentication and authorization.
---
AuthServices represent services that handle authentication and authorization. It
can primarily be used by [Tools](../tools/) in two different ways:
can primarily be used by [Tools](../tools) in two different ways:
- [**Authorized Invocation**][auth-invoke] is when a tool
is validated by the auth service before the call can be invoked. Toolbox

View File

@@ -3,7 +3,7 @@ title: "Google Sign-In"
type: docs
weight: 1
description: >
Use Google Sign-In for Oauth 2.0 flow and token lifecycle.
Use Google Sign-In for Oauth 2.0 flow and token lifecycle.
---
## Getting Started

View File

@@ -22,25 +22,6 @@ cluster][alloydb-free-trial].
[alloydb-docs]: https://cloud.google.com/alloydb/docs
[alloydb-free-trial]: https://cloud.google.com/alloydb/docs/create-free-trial-cluster
## Available Tools
- [`alloydb-ai-nl`](../tools/alloydbainl/alloydb-ai-nl.md)
Use natural language queries on AlloyDB, powered by AlloyDB AI.
- [`postgres-sql`](../tools/postgres/postgres-sql.md)
Execute SQL queries as prepared statements in AlloyDB Postgres.
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
Run parameterized SQL statements in AlloyDB Postgres.
### Pre-built Configurations
- [AlloyDB using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/alloydb_pg_mcp/)
Connect your IDE to AlloyDB using Toolbox.
- [AlloyDB Admin API using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/alloydb_pg_admin_mcp/)
Create your AlloyDB database with MCP Toolbox.
## Requirements
### IAM Permissions

View File

@@ -34,31 +34,6 @@ avoiding full table scans or complex filters.
[bigquery-quickstart-cli]: https://cloud.google.com/bigquery/docs/quickstarts/quickstart-command-line
[bigquery-googlesql]: https://cloud.google.com/bigquery/docs/reference/standard-sql/
## Available Tools
- [`bigquery-sql`](../tools/bigquery/bigquery-sql.md)
Run SQL queries directly against BigQuery datasets.
- [`bigquery-execute-sql`](../tools/bigquery/bigquery-execute-sql.md)
Execute structured queries using parameters.
- [`bigquery-get-dataset-info`](../tools/bigquery/bigquery-get-dataset-info.md)
Retrieve metadata for a specific dataset.
- [`bigquery-get-table-info`](../tools/bigquery/bigquery-get-table-info.md)
Retrieve metadata for a specific table.
- [`bigquery-list-dataset-ids`](../tools/bigquery/bigquery-list-dataset-ids.md)
List available dataset IDs.
- [`bigquery-list-table-ids`](../tools/bigquery/bigquery-list-table-ids.md)
List tables in a given dataset.
### Pre-built Configurations
- [BigQuery using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/bigquery_mcp/)
Connect your IDE to BigQuery using Toolbox.
## Requirements
### IAM Permissions

View File

@@ -32,11 +32,6 @@ such as avoiding full table scans or complex filters.
[bigtable-googlesql]:
https://cloud.google.com/bigtable/docs/googlesql-overview
## Available Tools
- [`bigtable-sql`](../tools/bigtable/bigtable-sql.md)
Run SQL-like queries over Bigtable rows.
## Requirements
### IAM Permissions

View File

@@ -19,19 +19,6 @@ to a database by following these instructions][csql-mssql-connect].
[csql-mssql-docs]: https://cloud.google.com/sql/docs/sqlserver
[csql-mssql-connect]: https://cloud.google.com/sql/docs/sqlserver/connect-overview
## Available Tools
- [`mssql-sql`](../tools/mssql/mssql-sql.md)
Execute pre-defined SQL Server queries with placeholder parameters.
- [`mssql-execute-sql`](../tools/mssql/mssql-execute-sql.md)
Run parameterized SQL Server queries in Cloud SQL for SQL Server.
### Pre-built Configurations
- [Cloud SQL for SQL Server using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_mssql_mcp/)
Connect your IDE to Cloud SQL for SQL Server using Toolbox.
## Requirements
### IAM Permissions

View File

@@ -20,19 +20,6 @@ to a database by following these instructions][csql-mysql-quickstart].
[csql-mysql-docs]: https://cloud.google.com/sql/docs/mysql
[csql-mysql-quickstart]: https://cloud.google.com/sql/docs/mysql/connect-instance-local-computer
## Available Tools
- [`mysql-sql`](../tools/mysql/mysql-sql.md)
Execute pre-defined prepared SQL queries in MySQL.
- [`mysql-execute-sql`](../tools/mysql/mysql-execute-sql.md)
Run parameterized SQL queries in Cloud SQL for MySQL.
### Pre-built Configurations
- [Cloud SQL for MySQL using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_mysql_mcp/)
Connect your IDE to Cloud SQL for MySQL using Toolbox.
## Requirements
### IAM Permissions

View File

@@ -20,20 +20,6 @@ to a database by following these instructions][csql-pg-quickstart].
[csql-pg-docs]: https://cloud.google.com/sql/docs/postgres
[csql-pg-quickstart]: https://cloud.google.com/sql/docs/postgres/connect-instance-local-computer
## Available Tools
- [`postgres-sql`](../tools/postgres/postgres-sql.md)
Execute SQL queries as prepared statements in PostgreSQL.
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
Run parameterized SQL statements in PostgreSQL.
### Pre-built Configurations
- [Cloud SQL for Postgres using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_pg_mcp/)
Connect your IDE to Cloud SQL for Postgres using Toolbox.
## Requirements
### IAM Permissions

View File

@@ -11,11 +11,6 @@ description: >
A `couchbase` source establishes a connection to a Couchbase database cluster,
allowing tools to execute SQL queries against it.
## Available Tools
- [`couchbase-sql`](../tools/couchbase/couchbase-sql.md)
Run SQL++ statements on Couchbase with parameterized input.
## Example
```yaml

View File

@@ -1,301 +0,0 @@
---
title: "Dataplex"
type: docs
weight: 1
description: >
Dataplex Universal Catalog is a unified, intelligent governance solution for data and AI assets in Google Cloud. Dataplex Universal Catalog powers AI, analytics, and business intelligence at scale.
---
# Dataplex Source
[Dataplex][dataplex-docs] Universal Catalog is a unified, intelligent governance
solution for data and AI assets in Google Cloud. Dataplex Universal Catalog
powers AI, analytics, and business intelligence at scale.
At the heart of these governance capabilities is a catalog that contains a
centralized inventory of the data assets in your organization. Dataplex
Universal Catalog holds business, technical, and runtime metadata for all of
your data. It helps you discover relationships and semantics in the metadata by
applying artificial intelligence and machine learning.
[dataplex-docs]: https://cloud.google.com/dataplex/docs
## Example
```yaml
sources:
my-dataplex-source:
kind: "dataplex"
project: "my-project-id"
```
## Sample System Prompt
You can use the following system prompt as "Custom Instructions" in your client
application.
```
# Objective
Your primary objective is to help discover, organize and manage metadata related to data assets.
# Tone and Style
1. Adopt the persona of a senior subject matter expert
2. Your communication style must be:
1. Concise: Always favor brevity.
2. Direct: Avoid greetings (e.g., "Hi there!", "Certainly!"). Get straight to the point.
Example (Incorrect): Hi there! I see that you are looking for...
Example (Correct): This problem likely stems from...
3. Do not reiterate or summarize the question in the answer.
4. Crucially, always convey a tone of uncertainty and caution. Since you are interpreting metadata and have no way to externally verify your answers, never express complete confidence. Frame your responses as interpretations based solely on the provided metadata. Use a suggestive tone, not a prescriptive one:
Example (Correct): "The entry describes..."
Example (Correct): "According to catalog,..."
Example (Correct): "Based on the metadata,..."
Example (Correct): "Based on the search results,..."
5. Do not make assumptions
# Data Model
## Entries
Entry represents a specific data asset. Entry acts as a metadata record for something that is managed by Catalog, such as:
- A BigQuery table or dataset
- A Cloud Storage bucket or folder
- An on-premises SQL table
## Aspects
While the Entry itself is a container, the rich descriptive information about the asset (e.g., schema, data types, business descriptions, classifications) is stored in associated components called Aspects. Aspects are created based on pre-defined blueprints known as Aspect Types.
## Aspect Types
Aspect Type is a reusable template that defines the schema for a set of metadata fields. Think of an Aspect Type as a structure for the kind of metadata that is organized in the catalog within the Entry.
Examples:
- projects/dataplex-types/locations/global/aspectTypes/analytics-hub-exchange
- projects/dataplex-types/locations/global/aspectTypes/analytics-hub
- projects/dataplex-types/locations/global/aspectTypes/analytics-hub-listing
- projects/dataplex-types/locations/global/aspectTypes/bigquery-connection
- projects/dataplex-types/locations/global/aspectTypes/bigquery-data-policy
- projects/dataplex-types/locations/global/aspectTypes/bigquery-dataset
- projects/dataplex-types/locations/global/aspectTypes/bigquery-model
- projects/dataplex-types/locations/global/aspectTypes/bigquery-policy
- projects/dataplex-types/locations/global/aspectTypes/bigquery-routine
- projects/dataplex-types/locations/global/aspectTypes/bigquery-row-access-policy
- projects/dataplex-types/locations/global/aspectTypes/bigquery-table
- projects/dataplex-types/locations/global/aspectTypes/bigquery-view
- projects/dataplex-types/locations/global/aspectTypes/cloud-bigtable-instance
- projects/dataplex-types/locations/global/aspectTypes/cloud-bigtable-table
- projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-database
- projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-instance
- projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-table
- projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-view
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-database
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-instance
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-schema
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-table
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-view
- projects/dataplex-types/locations/global/aspectTypes/contacts
- projects/dataplex-types/locations/global/aspectTypes/dataform-code-asset
- projects/dataplex-types/locations/global/aspectTypes/dataform-repository
- projects/dataplex-types/locations/global/aspectTypes/dataform-workspace
- projects/dataplex-types/locations/global/aspectTypes/dataproc-metastore-database
- projects/dataplex-types/locations/global/aspectTypes/dataproc-metastore-service
- projects/dataplex-types/locations/global/aspectTypes/dataproc-metastore-table
- projects/dataplex-types/locations/global/aspectTypes/data-product
- projects/dataplex-types/locations/global/aspectTypes/data-quality-scorecard
- projects/dataplex-types/locations/global/aspectTypes/external-connection
- projects/dataplex-types/locations/global/aspectTypes/overview
- projects/dataplex-types/locations/global/aspectTypes/pubsub-topic
- projects/dataplex-types/locations/global/aspectTypes/schema
- projects/dataplex-types/locations/global/aspectTypes/sensitive-data-protection-job-result
- projects/dataplex-types/locations/global/aspectTypes/sensitive-data-protection-profile
- projects/dataplex-types/locations/global/aspectTypes/sql-access
- projects/dataplex-types/locations/global/aspectTypes/storage-bucket
- projects/dataplex-types/locations/global/aspectTypes/storage-folder
- projects/dataplex-types/locations/global/aspectTypes/storage
- projects/dataplex-types/locations/global/aspectTypes/usage
## Entry Types
Every Entry must conform to an Entry Type. The Entry Type acts as a template, defining the structure, required aspects, and constraints for Entries of that type.
Examples:
- projects/dataplex-types/locations/global/entryTypes/analytics-hub-exchange
- projects/dataplex-types/locations/global/entryTypes/analytics-hub-listing
- projects/dataplex-types/locations/global/entryTypes/bigquery-connection
- projects/dataplex-types/locations/global/entryTypes/bigquery-data-policy
- projects/dataplex-types/locations/global/entryTypes/bigquery-dataset
- projects/dataplex-types/locations/global/entryTypes/bigquery-model
- projects/dataplex-types/locations/global/entryTypes/bigquery-routine
- projects/dataplex-types/locations/global/entryTypes/bigquery-row-access-policy
- projects/dataplex-types/locations/global/entryTypes/bigquery-table
- projects/dataplex-types/locations/global/entryTypes/bigquery-view
- projects/dataplex-types/locations/global/entryTypes/cloud-bigtable-instance
- projects/dataplex-types/locations/global/entryTypes/cloud-bigtable-table
- projects/dataplex-types/locations/global/entryTypes/cloud-spanner-database
- projects/dataplex-types/locations/global/entryTypes/cloud-spanner-instance
- projects/dataplex-types/locations/global/entryTypes/cloud-spanner-table
- projects/dataplex-types/locations/global/entryTypes/cloud-spanner-view
- projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-database
- projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-instance
- projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-table
- projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-view
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-database
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-instance
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-schema
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-table
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-view
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-database
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-instance
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-schema
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-table
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-view
- projects/dataplex-types/locations/global/entryTypes/dataform-code-asset
- projects/dataplex-types/locations/global/entryTypes/dataform-repository
- projects/dataplex-types/locations/global/entryTypes/dataform-workspace
- projects/dataplex-types/locations/global/entryTypes/dataproc-metastore-database
- projects/dataplex-types/locations/global/entryTypes/dataproc-metastore-service
- projects/dataplex-types/locations/global/entryTypes/dataproc-metastore-table
- projects/dataplex-types/locations/global/entryTypes/pubsub-topic
- projects/dataplex-types/locations/global/entryTypes/storage-bucket
- projects/dataplex-types/locations/global/entryTypes/storage-folder
- projects/dataplex-types/locations/global/entryTypes/vertexai-dataset
- projects/dataplex-types/locations/global/entryTypes/vertexai-feature-group
- projects/dataplex-types/locations/global/entryTypes/vertexai-feature-online-store
## Entry Groups
Entries are organized within Entry Groups, which are logical groupings of Entries. An Entry Group acts as a namespace for its Entries.
## Entry Links
Entries can be linked together using EntryLinks to represent relationships between data assets (e.g. foreign keys).
# Tool instructions
## Tool: dataplex_search_entries
## General
- Do not try to search within search results on your own.
- Do not fetch multiple pages of results unless explicitly asked.
## Search syntax
### Simple search
In its simplest form, a search query consists of a single predicate. Such a predicate can match several pieces of metadata:
- A substring of a name, display name, or description of a resource
- A substring of the type of a resource
- A substring of a column name (or nested column name) in the schema of a resource
- A substring of a project ID
- A string from an overview description
For example, the predicate foo matches the following resources:
- Resource with the name foo.bar
- Resource with the display name Foo Bar
- Resource with the description This is the foo script
- Resource with the exact type foo
- Column foo_bar in the schema of a resource
- Nested column foo_bar in the schema of a resource
- Project prod-foo-bar
- Resource with an overview containing the word foo
### Qualified predicates
You can qualify a predicate by prefixing it with a key that restricts the matching to a specific piece of metadata:
- An equal sign (=) restricts the search to an exact match.
- A colon (:) after the key matches the predicate to either a substring or a token within the value in the search results.
Tokenization splits the stream of text into a series of tokens, with each token usually corresponding to a single word. For example:
- name:foo selects resources with names that contain the foo substring, like foo1 and barfoo.
- description:foo selects resources with the foo token in the description, like bar and foo.
- location=foo matches resources in a specified location with foo as the location name.
The predicate keys type, system, location, and orgid support only the exact match (=) qualifier, not the substring qualifier (:). For example, type=foo or orgid=number.
Search syntax supports the following qualifiers:
- "name:x" - Matches x as a substring of the resource ID.
- "displayname:x" - Match x as a substring of the resource display name.
- "column:x" - Matches x as a substring of the column name (or nested column name) in the schema of the resource.
- "description:x" - Matches x as a token in the resource description.
- "label:bar" - Matches BigQuery resources that have a label (with some value) and the label key has bar as a substring.
- "label=bar" - Matches BigQuery resources that have a label (with some value) and the label key equals bar as a string.
- "label:bar:x" - Matches x as a substring in the value of a label with a key bar attached to a BigQuery resource.
- "label=foo:bar" - Matches BigQuery resources where the key equals foo and the key value equals bar.
- "label.foo=bar" - Matches BigQuery resources where the key equals foo and the key value equals bar.
- "label.foo" - Matches BigQuery resources that have a label whose key equals foo as a string.
- "type=TYPE" - Matches resources of a specific entry type or its type alias.
- "projectid:bar" - Matches resources within Google Cloud projects that match bar as a substring in the ID.
- "parent:x" - Matches x as a substring of the hierarchical path of a resource. The parent path is a fully_qualified_name of the parent resource.
- "orgid=number" - Matches resources within a Google Cloud organization with the exact ID value of the number.
- "system=SYSTEM" - Matches resources from a specified system. For example, system=bigquery matches BigQuery resources.
- "location=LOCATION" - Matches resources in a specified location with an exact name. For example, location=us-central1 matches assets hosted in Iowa. BigQuery Omni assets support this qualifier by using the BigQuery Omni location name. For example, location=aws-us-east-1 matches BigQuery Omni assets in Northern Virginia.
- "createtime" -
Finds resources that were created within, before, or after a given date or time. For example "createtime:2019-01-01" matches resources created on 2019-01-01.
- "updatetime" - Finds resources that were updated within, before, or after a given date or time. For example "updatetime>2019-01-01" matches resources updated after 2019-01-01.
- "fully_qualified_name:x" - Matches x as a substring of fully_qualified_name.
- "fully_qualified_name=x" - Matches x as fully_qualified_name.
### Logical operators
A query can consist of several predicates with logical operators. If you don't specify an operator, logical AND is implied. For example, foo bar returns resources that match both predicate foo and predicate bar.
Logical AND and logical OR are supported. For example, foo OR bar.
You can negate a predicate with a - (hyphen) or NOT prefix. For example, -name:foo returns resources with names that don't match the predicate foo.
Logical operators aren't case-sensitive. For example, both or and OR are acceptable.
### Request
1. Always try to rewrite the prompt using search syntax.
### Response
1. If there are multiple search results found
1. Present the list of search results
2. Format the output in nested ordered list, for example:
Given
```
{
results: [
{
name: "projects/test-project/locations/us/entryGroups/@bigquery-aws-us-east-1/entries/users"
entrySource: {
displayName: "Users"
description: "Table contains list of users."
location: "aws-us-east-1"
system: "BigQuery"
}
},
{
name: "projects/another_project/locations/us-central1/entryGroups/@bigquery/entries/top_customers"
entrySource: {
displayName: "Top customers",
description: "Table contains list of best customers."
location: "us-central1"
system: "BigQuery"
}
},
]
}
```
Return output formatted as markdown nested list:
```
* Users:
- projectId: test_project
- location: aws-us-east-1
- description: Table contains list of users.
* Top customers:
- projectId: another_project
- location: us-central1
- description: Table contains list of best customers.
```
3. Ask to select one of the presented search results
2. If there is only one search result found
1. Present the search result immediately.
3. If there are no search result found
1. Explain that no search result was found
2. Suggest to provide a more specific search query.
## Tool: dataplex_lookup_entry
### Request
1. Always try to limit the size of the response by specifying `aspect_types` parameter. Make sure to include to select view=CUSTOM when using aspect_types parameter.
2. If you do not know the name of the entry, use `dataplex_search_entries` tool
### Response
1. Unless asked for a specific aspect, respond with all aspects attached to the entry.
```
## Reference
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|----------------------------------------------------------------------------------|
| kind | string | true | Must be "dataplex". |
| project | string | true | ID of the GCP project used for quota and billing purposes (e.g. "my-project-id").|

View File

@@ -21,11 +21,6 @@ Dgraph Cloud. If you're new to Dgraph, the fastest way to get started is to
[dgraph-docs]: https://dgraph.io/docs
[dgraph-login]: https://cloud.dgraph.io/login
## Available Tools
- [`dgraph-dql`](../tools/dgraph/dgraph-dql.md)
Run DQL (Dgraph Query Language) queries.
## Requirements
### Database User

View File

@@ -1,77 +0,0 @@
---
title: "Firestore"
type: docs
weight: 1
description: >
Firestore is a NoSQL document database built for automatic scaling, high performance, and ease of application development. It's a fully managed, serverless database that supports mobile, web, and server development.
---
# Firestore Source
[Firestore][firestore-docs] is a NoSQL document database built for automatic
scaling, high performance, and ease of application development. While the
Firestore interface has many of the same features as traditional databases,
as a NoSQL database it differs from them in the way it describes relationships
between data objects.
If you are new to Firestore, you can [create a database and learn the
basics][firestore-quickstart].
[firestore-docs]: https://cloud.google.com/firestore/docs
[firestore-quickstart]: https://cloud.google.com/firestore/docs/quickstart-servers
## Requirements
### IAM Permissions
Firestore uses [Identity and Access Management (IAM)][iam-overview] to control
user and group access to Firestore resources. Toolbox will use your [Application
Default Credentials (ADC)][adc] to authorize and authenticate when interacting
with [Firestore][firestore-docs].
In addition to [setting the ADC for your server][set-adc], you need to ensure
the IAM identity has been given the correct IAM permissions for accessing
Firestore. Common roles include:
- `roles/datastore.user` - Read and write access to Firestore
- `roles/datastore.viewer` - Read-only access to Firestore
- `roles/firebaserules.admin` - Full management of Firebase Security Rules for
Firestore. This role is required for operations that involve creating,
updating, or managing Firestore security rules (see [Firebase Security Rules
roles][firebaserules-roles])
See [Firestore access control][firestore-iam] for more information on
applying IAM permissions and roles to an identity.
[iam-overview]: https://cloud.google.com/firestore/docs/security/iam
[adc]: https://cloud.google.com/docs/authentication#adc
[set-adc]: https://cloud.google.com/docs/authentication/provide-credentials-adc
[firestore-iam]: https://cloud.google.com/firestore/docs/security/iam
[firebaserules-roles]:
https://cloud.google.com/iam/docs/roles-permissions/firebaserules
### Database Selection
Firestore allows you to create multiple databases within a single project. Each
database is isolated from the others and has its own set of documents and
collections. If you don't specify a database in your configuration, the default
database named `(default)` will be used.
## Example
```yaml
sources:
my-firestore-source:
kind: "firestore"
project: "my-project-id"
# database: "my-database" # Optional, defaults to "(default)"
```
## Reference
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|----------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "firestore". |
| project | string | true | Id of the GCP project that contains the Firestore database (e.g. "my-project-id"). |
| database | string | false | Name of the Firestore database to connect to. Defaults to "(default)" if not specified. |

View File

@@ -13,11 +13,6 @@ The HTTP Source allows Toolbox to retrieve data from arbitrary HTTP
endpoints. This enables Generative AI applications to access data from web APIs
and other HTTP-accessible resources.
## Available Tools
- [`http`](../tools/http/http.md)
Make HTTP requests to REST APIs or other web services.
## Example
```yaml

View File

@@ -1,66 +0,0 @@
---
title: "Looker"
type: docs
weight: 1
description: >
Looker is a business intelligence tool that also provides a semantic layer.
---
## About
[Looker][looker-docs] is a web based business intelligence and data management
tool that provides a semantic layer to facilitate querying. It can be deployed
in the cloud, on GCP, or on premises.
[looker-docs]: https://cloud.google.com/looker/docs
## Requirements
### Database User
This source only uses API authentication. You will need to
[create an API user][looker-user] to login to Looker.
[looker-user]:
https://cloud.google.com/looker/docs/api-auth#authentication_with_an_sdk
## Example
```yaml
sources:
my-looker-source:
kind: looker
base_url: http://looker.example.com
client_id: ${LOOKER_CLIENT_ID}
client_secret: ${LOOKER_CLIENT_SECRET}
verify_ssl: true
timeout: 600s
```
The Looker base url will look like "https://looker.example.com", don't include
a trailing "/". In some cases, especially if your Looker is deployed
on-premises, you may need to add the API port numner like
"https://looker.example.com:19999".
Verify ssl should almost always be "true" (all lower case) unless you are using
a self-signed ssl certificate for the Looker server. Anything other than "true"
will be interpretted as false.
The client id and client secret are seemingly random character sequences
assigned by the looker server.
{{< notice tip >}}
Use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}
## Reference
| **field** | **type** | **required** | **description** |
| ------------- | :------: | :----------: | ----------------------------------------------------------------------------------------- |
| kind | string | true | Must be "looker". |
| base_url | string | true | The URL of your Looker server with no trailing /). |
| client_id | string | true | The client id assigned by Looker. |
| client_secret | string | true | The client secret assigned by Looker. |
| verify_ssl | string | true | Whether to check the ssl certificate of the server. |
| timeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, 120s is applied. |

View File

@@ -1,34 +0,0 @@
---
title: "MongoDB"
type: docs
weight: 1
description: >
MongoDB is a no-sql data platform that can not only serve general purpose data requirements also perform VectorSearch where both operational data and embeddings used of search can reside in same document.
---
## About
[MongoDB][mongodb-docs] is a popular NoSQL database that stores data in
flexible, JSON-like documents, making it easy to develop and scale applications.
[mongodb-docs]: https://www.mongodb.com/docs/atlas/getting-started/
## Example
```yaml
sources:
my-mongodb:
kind: mongodb
uri: "mongodb+srv://username:password@host.mongodb.net"
database: sample_mflix
```
## Reference
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|-------------------------------------------------------------------|
| kind | string | true | Must be "mongodb". |
| uri | string | true | connection string to connect to MongoDB |
| database | string | true | Name of the mongodb database to connect to (e.g. "sample_mflix"). |

View File

@@ -15,14 +15,6 @@ amount of data through a structured format.
[mssql-docs]: https://www.microsoft.com/en-us/sql-server
## Available Tools
- [`mssql-sql`](../tools/mssql/mssql-sql.md)
Execute pre-defined SQL Server queries with placeholder parameters.
- [`mssql-execute-sql`](../tools/mssql/mssql-execute-sql.md)
Run parameterized SQL Server queries in SQL Server.
## Requirements
### Database User
@@ -43,7 +35,6 @@ sources:
database: my_db
user: ${USER_NAME}
password: ${PASSWORD}
# encrypt: strict
```
{{< notice tip >}}
@@ -53,12 +44,11 @@ instead of hardcoding your secrets into the configuration file.
## Reference
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "mssql". |
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
| port | string | true | Port to connect to (e.g. "1433"). |
| database | string | true | Name of the SQL Server database to connect to (e.g. "my_db"). |
| user | string | true | Name of the SQL Server user to connect as (e.g. "my-user"). |
| password | string | true | Password of the SQL Server user (e.g. "my-password"). |
| encrypt | string | false | Encryption level for data transmitted between the client and server (e.g., "strict"). If not specified, defaults to the [github.com/microsoft/go-mssqldb](https://github.com/microsoft/go-mssqldb?tab=readme-ov-file#common-parameters) package's default encrypt value. |
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|------------------------------------------------------------------------|
| kind | string | true | Must be "mssql". |
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
| port | string | true | Port to connect to (e.g. "1433"). |
| database | string | true | Name of the SQL Server database to connect to (e.g. "my_db"). |
| user | string | true | Name of the SQL Server user to connect as (e.g. "my-user"). |
| password | string | true | Password of the SQL Server user (e.g. "my-password"). |

View File

@@ -14,14 +14,6 @@ reliability, performance, and ease of use.
[mysql-docs]: https://www.mysql.com/
## Available Tools
- [`mysql-sql`](../tools/mysql/mysql-sql.md)
Execute pre-defined prepared SQL queries in MySQL.
- [`mysql-execute-sql`](../tools/mysql/mysql-execute-sql.md)
Run parameterized SQL queries in MySQL.
## Requirements
### Database User

View File

@@ -7,19 +7,12 @@ description: >
---
## About
[Neo4j][neo4j-docs] is a powerful, open source graph database system with over
15 years of active development that has earned it a strong reputation for
reliability, feature robustness, and performance.
[neo4j-docs]: https://neo4j.com/docs
## Available Tools
- [`neo4j-cypher`](../tools/neo4j/neo4j-cypher.md)
Run Cypher queries against your Neo4j graph database.
## Requirements
### Database User

View File

@@ -15,19 +15,6 @@ reputation for reliability, feature robustness, and performance.
[pg-docs]: https://www.postgresql.org/
## Available Tools
- [`postgres-sql`](../tools/postgres/postgres-sql.md)
Execute SQL queries as prepared statements in PostgreSQL.
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
Run parameterized SQL statements in PostgreSQL.
### Pre-built Configurations
- [PostgreSQL using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/postgres_mcp/)
Connect your IDE to PostgreSQL using Toolbox.
## Requirements
### Database User

View File

@@ -18,11 +18,6 @@ geospatial indexes with radius queries.
If you are new to Redis, you can find installation and getting started guides on
the [official Redis website](https://redis.io/docs/getting-started/).
## Available Tools
- [`redis`](../tools/redis/redis.md)
Run Redis commands and interact with key-value pairs.
## Requirements
### Redis
@@ -38,7 +33,7 @@ sources:
my-redis-instance:
kind: redis
address:
- 127.0.0.1:6379
- 127.0.0.1
username: ${MY_USER_NAME}
password: ${MY_AUTH_STRING} # Omit this field if you don't have a password.
# database: 0
@@ -63,7 +58,7 @@ sources:
my-redis-cluster-instance:
kind: memorystore-redis
address:
- 127.0.0.1:6379
- 127.0.0.1
password: ${MY_AUTH_STRING}
# useGCPIAM: false
# clusterEnabled: false
@@ -79,8 +74,7 @@ using IAM authentication:
sources:
my-redis-cluster-instance:
kind: memorystore-redis
address:
- 127.0.0.1:6379
address: 127.0.0.1
useGCPIAM: true
clusterEnabled: true
```

View File

@@ -23,19 +23,6 @@ the Google Cloud console][spanner-quickstart].
[spanner-quickstart]:
https://cloud.google.com/spanner/docs/create-query-database-console
## Available Tools
- [`spanner-sql`](../tools/spanner/spanner-sql.md)
Execute SQL on Google Cloud Spanner.
- [`spanner-execute-sql`](../tools/spanner/spanner-execute-sql.md)
Run structured and parameterized queries on Spanner.
### Pre-built Configurations
- [Spanner using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/spanner_mcp/)
Connect your IDE to Spanner using Toolbox.
## Requirements
### IAM Permissions

View File

@@ -22,11 +22,6 @@ SQLite has the following notable characteristics:
- Zero-configuration - no setup or administration needed
- Transactional with ACID properties
## Available Tools
- [`sqlite-sql`](../tools/sqlite/sqlite-sql.md)
Run SQL queries against a local SQLite database.
## Requirements
### Database File

View File

@@ -1,81 +0,0 @@
---
title: "TiDB"
type: docs
weight: 1
description: >
TiDB is a distributed SQL database that combines the best of traditional RDBMS and NoSQL databases.
---
## About
[TiDB][tidb-docs] is an open-source distributed SQL database that supports Hybrid Transactional and Analytical Processing (HTAP) workloads. It is MySQL-compatible and features horizontal scalability, strong consistency, and high availability.
[tidb-docs]: https://docs.pingcap.com/tidb/stable
## Requirements
### Database User
This source uses standard MySQL protocol authentication. You will need to [create a TiDB user][tidb-users] to login to the database with.
For TiDB Cloud users, you can create database users through the TiDB Cloud console.
[tidb-users]: https://docs.pingcap.com/tidb/stable/user-account-management
## SSL Configuration
- TiDB Cloud
For TiDB Cloud instances, SSL is automatically enabled when the hostname matches the TiDB Cloud pattern (`gateway*.*.*.tidbcloud.com`). You don't need to explicitly set `ssl: true` for TiDB Cloud connections.
- Self-Hosted TiDB
For self-hosted TiDB instances, you can optionally enable SSL by setting `ssl: true` in your configuration.
## Example
- TiDB Cloud
```yaml
sources:
my-tidb-cloud-source:
kind: tidb
host: gateway01.us-west-2.prod.aws.tidbcloud.com
port: 4000
database: my_db
user: ${TIDB_USERNAME}
password: ${TIDB_PASSWORD}
# SSL is automatically enabled for TiDB Cloud
```
- Self-Hosted TiDB
```yaml
sources:
my-tidb-source:
kind: tidb
host: 127.0.0.1
port: 4000
database: my_db
user: ${TIDB_USERNAME}
password: ${TIDB_PASSWORD}
# ssl: true # Optional: enable SSL for secure connections
```
{{< notice tip >}}
Use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}
## Reference
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------|
| kind | string | true | Must be "tidb". |
| host | string | true | IP address or hostname to connect to (e.g. "127.0.0.1" or "gateway01.*.tidbcloud.com"). |
| port | string | true | Port to connect to (typically "4000" for TiDB). |
| database | string | true | Name of the TiDB database to connect to (e.g. "my_db"). |
| user | string | true | Name of the TiDB user to connect as (e.g. "my-tidb-user"). |
| password | string | true | Password of the TiDB user (e.g. "my-password"). |
| ssl | boolean | false | Whether to use SSL/TLS encryption. Automatically enabled for TiDB Cloud instances. |

View File

@@ -17,12 +17,7 @@ sets, sorted sets with range queries, bitmaps, hyperloglogs, and geospatial
indexes with radius queries.
If you're new to Valkey, you can find installation and getting started guides on
the [official Valkey website](https://valkey.io/topics/quickstart/).
## Available Tools
- [`valkey`](../tools/valkey/valkey.md)
Issue Valkey (Redis-compatible) commands.
the [official Valkey website](https://valkey.io/docs/getting-started/).
## Example
@@ -31,7 +26,7 @@ sources:
my-valkey-instance:
kind: valkey
address:
- 127.0.0.1:6379
- 127.0.0.1
username: ${YOUR_USERNAME}
password: ${YOUR_PASSWORD}
# database: 0
@@ -55,7 +50,7 @@ sources:
my-valkey-instance:
kind: valkey
address:
- 127.0.0.1:6379
- 127.0.0.1
useGCPIAM: true
```

View File

@@ -2,8 +2,8 @@
title: "Tools"
type: docs
weight: 2
description: >
Tools define actions an agent can take -- such as reading and writing to a
description: >
Tools define actions an agent can take -- such as reading and writing to a
source.
---
@@ -81,9 +81,8 @@ the parameter.
|-------------|:---------------:|:------------:|-----------------------------------------------------------------------------|
| name | string | true | Name of the parameter. |
| type | string | true | Must be one of "string", "integer", "float", "boolean" "array" |
| default | parameter type | false | Default value of the parameter. If provided, the parameter is not required. |
| description | string | true | Natural language description of the parameter to describe it to the agent. |
| default | parameter type | false | Default value of the parameter. If provided, `required` will be `false`. |
| required | bool | false | Indicate if the parameter is required. Default to `true`. |
### Array Parameters
@@ -108,59 +107,22 @@ in the list using the items field:
|-------------|:----------------:|:------------:|-----------------------------------------------------------------------------|
| name | string | true | Name of the parameter. |
| type | string | true | Must be "array" |
| default | parameter type | false | Default value of the parameter. If provided, the parameter is not required. |
| description | string | true | Natural language description of the parameter to describe it to the agent. |
| default | parameter type | false | Default value of the parameter. If provided, `required` will be `false`. |
| required | bool | false | Indicate if the parameter is required. Default to `true`. |
| items | parameter object | true | Specify a Parameter object for the type of the values in the array. |
{{< notice note >}}
Items in array should not have a `default` or `required` value. If provided, it
will be ignored.
Items in array should not have a default value. If provided, it will be ignored.
{{< /notice >}}
### Map Parameters
The map type is a collection of key-value pairs. It can be configured in two
ways:
- Generic Map: By default, it accepts values of any primitive type (string,
integer, float, boolean), allowing for mixed data.
- Typed Map: By setting the valueType field, you can enforce that all values
within the map must be of the same specified type.
#### Generic Map (Mixed Value Types)
This is the default behavior when valueType is omitted. It's useful for passing
a flexible group of settings.
```yaml
parameters:
- name: execution_context
type: map
description: A flexible set of key-value pairs for the execution environment.
```
#### Typed Map
Specify valueType to ensure all values in the map are of the same type. An error
will be thrown in case of value type mismatch.
```yaml
parameters:
- name: user_scores
type: map
description: A map of user IDs to their scores. All scores must be integers.
valueType: integer # This enforces the value type for all entries.
```
### Authenticated Parameters
Authenticated parameters are automatically populated with user
information decoded from [ID
tokens](../authServices/#specifying-id-tokens-from-clients) that are passed in
tokens](../authsources/#specifying-id-tokens-from-clients) that are passed in
request headers. They do not take input values in request bodies like other
parameters. To use authenticated parameters, you must configure the tool to map
the required [authServices](../authServices/) to specific claims within the
the required [authServices](../authservices) to specific claims within the
user's ID token.
```yaml
@@ -183,7 +145,7 @@ user's ID token.
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|-----------------------------------------------------------------------------------------|
| name | string | true | Name of the [authServices](../authServices/) used to verify the OIDC auth token. |
| name | string | true | Name of the [authServices](../authservices) used to verify the OIDC auth token. |
| field | string | true | Claim field decoded from the OIDC token used to auto-populate this parameter. |
### Template Parameters
@@ -244,7 +206,7 @@ tools:
You can require an authorization check for any Tool invocation request by
specifying an `authRequired` field. Specify a list of
[authServices](../authServices/) defined in the previous section.
[authServices](../authservices) defined in the previous section.
```yaml
tools:

View File

@@ -2,9 +2,9 @@
title: "alloydb-ai-nl"
type: docs
weight: 1
description: >
The "alloydb-ai-nl" tool leverages
[AlloyDB AI](https://cloud.google.com/alloydb/ai) next-generation Natural
description: >
The "alloydb-ai-nl" tool leverages
[AlloyDB AI](https://cloud.google.com/alloydb/ai) next-generation Natural
Language support to provide the ability to query the database directly using
natural language.
aliases:
@@ -22,7 +22,7 @@ layer.
This tool is compatible with the following sources:
- [alloydb-postgres](../../sources/alloydb-pg.md)
- [alloydb-postgres](../sources/alloydb-pg.md)
AlloyDB AI Natural Language delivers secure and accurate responses for
application end user natural language questions. Natural language streamlines
@@ -69,18 +69,12 @@ database queries.
You can use the `nlConfigParameters` to list the parameters required for your
`nl_config`. You **must** supply all parameters required for all PSVs in the
context. It's strongly recommended to use features like [Authenticated
Parameters](../#array-parameters) or Bound Parameters to provide secure
Parameters](../tools/#array-parameters) or Bound Parameters to provide secure
access to queries generated using natural language, as these parameters are not
visible to the LLM.
[alloydb-psv]: https://cloud.google.com/alloydb/docs/parameterized-secure-views-overview
{{< notice tip >}} Make sure to enable the `parameterized_views` extension before running this tool. You can do so by running this command in the AlloyDB studio:
```sql
CREATE EXTENSION IF NOT EXISTS parameterized_views;
```
{{< /notice >}}
## Example
```yaml
@@ -94,13 +88,14 @@ tools:
- name: user_email
type: string
description: User ID of the logged in user.
# note: we strongly recommend using features like Authenticated or
# Bound parameters to prevent the LLM from seeing these params and
# note: we strongly recommend using features like Authenticated or
# Bound parameters to prevent the LLM from seeing these params and
# specifying values it shouldn't in the tool input
authServices:
- name: my_google_service
field: email
```
## Reference
| **field** | **type** | **required** | **description** |
@@ -109,4 +104,4 @@ tools:
| source | string | true | Name of the AlloyDB source the natural language query should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |
| nlConfig | string | true | The name of the `nl_config` in AlloyDB |
| nlConfigParameters | [parameters](../#specifying-parameters) | true | List of PSV parameters defined in the `nl_config` |
| nlConfigParameters | [parameters](_index#specifying-parameters) | true | List of PSV parameters defined in the `nl_config` |

View File

@@ -2,7 +2,7 @@
title: "bigquery-execute-sql"
type: docs
weight: 1
description: >
description: >
A "bigquery-execute-sql" tool executes a SQL statement against BigQuery.
aliases:
- /resources/tools/bigquery-execute-sql
@@ -13,11 +13,10 @@ aliases:
A `bigquery-execute-sql` tool executes a SQL statement against BigQuery.
It's compatible with the following sources:
- [bigquery](../../sources/bigquery.md)
- [bigquery](../sources/bigquery.md)
`bigquery-execute-sql` takes a required `sql` input parameter and runs the SQL
statement against the configured `source`. It also supports an optional `dry_run`
parameter to validate a query without executing it.
`bigquery-execute-sql` takes one input parameter `sql` and runs the sql
statement against the `source`.
## Example

View File

@@ -2,7 +2,7 @@
title: "bigquery-get-dataset-info"
type: docs
weight: 1
description: >
description: >
A "bigquery-get-dataset-info" tool retrieves metadata for a BigQuery dataset.
aliases:
- /resources/tools/bigquery-get-dataset-info
@@ -13,10 +13,10 @@ aliases:
A `bigquery-get-dataset-info` tool retrieves metadata for a BigQuery dataset.
It's compatible with the following sources:
- [bigquery](../../sources/bigquery.md)
- [bigquery](../sources/bigquery.md)
`bigquery-get-dataset-info` takes a `dataset` parameter to specify the dataset
on the given source. It also optionally accepts a `project` parameter to
on the given source. It also optionally accepts a `project` parameter to
define the Google Cloud project ID. If the `project` parameter is not provided,
the tool defaults to using the project defined in the source configuration.

View File

@@ -2,7 +2,7 @@
title: "bigquery-get-table-info"
type: docs
weight: 1
description: >
description: >
A "bigquery-get-table-info" tool retrieves metadata for a BigQuery table.
aliases:
- /resources/tools/bigquery-get-table-info
@@ -13,11 +13,11 @@ aliases:
A `bigquery-get-table-info` tool retrieves metadata for a BigQuery table.
It's compatible with the following sources:
- [bigquery](../../sources/bigquery.md)
- [bigquery](../sources/bigquery.md)
`bigquery-get-table-info` takes `dataset` and `table` parameters to specify
the target table. It also optionally accepts a `project` parameter to define
the Google Cloud project ID. If the `project` parameter is not provided, the
the target table. It also optionally accepts a `project` parameter to define
the Google Cloud project ID. If the `project` parameter is not provided, the
tool defaults to using the project defined in the source configuration.
## Example

View File

@@ -2,7 +2,7 @@
title: "bigquery-list-dataset-ids"
type: docs
weight: 1
description: >
description: >
A "bigquery-list-dataset-ids" tool returns all dataset IDs from the source.
aliases:
- /resources/tools/bigquery-list-dataset-ids
@@ -13,10 +13,10 @@ aliases:
A `bigquery-list-dataset-ids` tool returns all dataset IDs from the source.
It's compatible with the following sources:
- [bigquery](../../sources/bigquery.md)
- [bigquery](../sources/bigquery.md)
`bigquery-list-dataset-ids` optionally accepts a `project` parameter to define
the Google Cloud project ID. If the `project` parameter is not provided, the
`bigquery-list-dataset-ids` optionally accepts a `project` parameter to define
the Google Cloud project ID. If the `project` parameter is not provided, the
tool defaults to using the project defined in the source configuration.
## Example

View File

@@ -2,7 +2,7 @@
title: "bigquery-list-table-ids"
type: docs
weight: 1
description: >
description: >
A "bigquery-list-table-ids" tool returns table IDs in a given BigQuery dataset.
aliases:
- /resources/tools/bigquery-list-table-ids
@@ -13,11 +13,11 @@ aliases:
A `bigquery-list-table-ids` tool returns table IDs in a given BigQuery dataset.
It's compatible with the following sources:
- [bigquery](../../sources/bigquery.md)
- [bigquery](../sources/bigquery.md)
`bigquery-get-dataset-info` takes a required `dataset` parameter to specify the dataset
from which to list table IDs. It also optionally accepts a `project` parameter to
define the Google Cloud project ID. If the `project` parameter is not provided, the
from which to list table IDs. It also optionally accepts a `project` parameter to
define the Google Cloud project ID. If the `project` parameter is not provided, the
tool defaults to using the project defined in the source configuration.
## Example

View File

@@ -13,7 +13,7 @@ aliases:
A `bigquery-sql` tool executes a pre-defined SQL statement. It's compatible with
the following sources:
- [bigquery](../../sources/bigquery.md)
- [bigquery](../sources/bigquery.md)
### GoogleSQL
@@ -72,7 +72,7 @@ tools:
> including identifiers, column names, and table names. **This makes it more
> vulnerable to SQL injections**. Using basic parameters only (see above) is
> recommended for performance and safety reasons. For more details, please check
> [templateParameters](../#template-parameters).
> [templateParameters](_index#template-parameters).
```yaml
tools:
@@ -101,5 +101,5 @@ tools:
| source | string | true | Name of the source the GoogleSQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |
| statement | string | true | The GoogleSQL statement to execute. |
| parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the SQL statement. |
| templateParameters | [templateParameters](../#template-parameters) | false | List of [templateParameters](../#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |

View File

@@ -2,8 +2,8 @@
title: "bigtable-sql"
type: docs
weight: 1
description: >
A "bigtable-sql" tool executes a pre-defined SQL statement against a Google
description: >
A "bigtable-sql" tool executes a pre-defined SQL statement against a Google
Cloud Bigtable instance.
aliases:
- /resources/tools/bigtable-sql
@@ -14,20 +14,16 @@ aliases:
A `bigtable-sql` tool executes a pre-defined SQL statement against a Bigtable
instance. It's compatible with any of the following sources:
- [bigtable](../../sources/bigtable.md)
- [bigtable](../sources/bigtable.md)
### GoogleSQL
Bigtable supports SQL queries. The integration with Toolbox supports `googlesql`
dialect, the specified SQL statement is executed as a [data manipulation
language (DML)][bigtable-googlesql] statements, and specified parameters will
inserted according to their name: e.g. `@name`.
language (DML)][bigtable-googlesql] statements, and specified parameters will inserted according to their name: e.g. `@name`.
{{<notice note>}}
Bigtable's GoogleSQL support for DML statements might be limited to certain
query types. For detailed information on supported DML statements and use
cases, refer to the [Bigtable GoogleSQL use
cases](https://cloud.google.com/bigtable/docs/googlesql-overview#use-cases).
Bigtable's GoogleSQL support for DML statements might be limited to certain query types. For detailed information on supported DML statements and use cases, refer to the [Bigtable GoogleSQL use cases](https://cloud.google.com/bigtable/docs/googlesql-overview#use-cases).
{{</notice>}}
[bigtable-googlesql]: https://cloud.google.com/bigtable/docs/googlesql-overview
@@ -45,13 +41,13 @@ tools:
kind: bigtable-sql
source: my-bigtable-instance
statement: |
SELECT
TO_INT64(cf[ 'id' ]) as id,
CAST(cf[ 'name' ] AS string) as name,
FROM
mytable
WHERE
TO_INT64(cf[ 'id' ]) = @id
SELECT
TO_INT64(cf[ 'id' ]) as id,
CAST(cf[ 'name' ] AS string) as name,
FROM
mytable
WHERE
TO_INT64(cf[ 'id' ]) = @id
OR CAST(cf[ 'name' ] AS string) = @name;
description: |
Use this tool to get information for a specific user.
@@ -77,7 +73,7 @@ tools:
> including identifiers, column names, and table names. **This makes it more
> vulnerable to SQL injections**. Using basic parameters only (see above) is
> recommended for performance and safety reasons. For more details, please check
> [templateParameters](#template-parameters).
> [templateParameters](_index#template-parameters).
```yaml
tools:
@@ -106,8 +102,8 @@ tools:
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |
| statement | string | true | SQL statement to execute on. |
| parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the SQL statement. |
| templateParameters | [templateParameters](#template-parameters) | false | List of [templateParameters](#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
## Tips

View File

@@ -2,7 +2,7 @@
title: "couchbase-sql"
type: docs
weight: 1
description: >
description: >
A "couchbase-sql" tool executes a pre-defined SQL statement against a Couchbase
database.
aliases:
@@ -14,7 +14,7 @@ aliases:
A `couchbase-sql` tool executes a pre-defined SQL statement against a Couchbase
database. It's compatible with any of the following sources:
- [couchbase](../../sources/couchbase.md)
- [couchbase](../sources/couchbase.md)
The specified SQL statement is executed as a parameterized statement, and specified
parameters will be used according to their name: e.g. `$id`.
@@ -66,7 +66,7 @@ tools:
> including identifiers, column names, and table names. **This makes it more
> vulnerable to SQL injections**. Using basic parameters only (see above) is
> recommended for performance and safety reasons. For more details, please check
> [templateParameters](#template-parameters).
> [templateParameters](_index#template-parameters).
```yaml
tools:
@@ -95,6 +95,6 @@ tools:
| source | string | true | Name of the source the SQL query should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |
| statement | string | true | SQL statement to execute |
| parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be used with the SQL statement. |
| templateParameters | [templateParameters](#template-parameters) | false | List of [templateParameters](#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be used with the SQL statement. |
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
| authRequired | array[string] | false | List of auth services that are required to use this tool. |

View File

@@ -1,7 +0,0 @@
---
title: "Dataplex"
type: docs
weight: 1
description: >
Tools that work with Dataplex Sources.
---

View File

@@ -1,60 +0,0 @@
---
title: "dataplex-lookup-entry"
type: docs
weight: 1
description: >
A "dataplex-lookup-entry" tool returns details of a particular entry in Dataplex Catalog.
aliases:
- /resources/tools/dataplex-lookup-entry
---
## About
A `dataplex-lookup-entry` tool returns details of a particular entry in Dataplex Catalog.
It's compatible with the following sources:
- [dataplex](../sources/dataplex.md)
`dataplex-lookup-entry` takes a required `name` parameter which contains the project and location to which the request should be attributed in the following form: projects/{project}/locations/{location} and also a required `entry` parameter which is the resource name of the entry in the following form: projects/{project}/locations/{location}/entryGroups/{entryGroup}/entries/{entry}. It also optionally accepts following parameters:
- `view` - View to control which parts of an entry the service should return. It takes integer values from 1-4 corresponding to type of view - BASIC, FULL, CUSTOM, ALL
- `aspectTypes` - Limits the aspects returned to the provided aspect types in the format `projects/{project}/locations/{location}/aspectTypes/{aspectType}`. It only works for CUSTOM view.
- `paths` - Limits the aspects returned to those associated with the provided paths within the Entry. It only works for CUSTOM view.
## Requirements
### IAM Permissions
Dataplex uses [Identity and Access Management (IAM)][iam-overview] to control
user and group access to Dataplex resources. Toolbox will use your
[Application Default Credentials (ADC)][adc] to authorize and authenticate when
interacting with [Dataplex][dataplex-docs].
In addition to [setting the ADC for your server][set-adc], you need to ensure
the IAM identity has been given the correct IAM permissions for the tasks you
intend to perform. See [Dataplex Universal Catalog IAM permissions][iam-permissions]
and [Dataplex Universal Catalog IAM roles][iam-roles] for more information on
applying IAM permissions and roles to an identity.
[iam-overview]: https://cloud.google.com/dataplex/docs/iam-and-access-control
[adc]: https://cloud.google.com/docs/authentication#adc
[set-adc]: https://cloud.google.com/docs/authentication/provide-credentials-adc
[iam-permissions]: https://cloud.google.com/dataplex/docs/iam-permissions
[iam-roles]: https://cloud.google.com/dataplex/docs/iam-roles
## Example
```yaml
tools:
lookup_entry:
kind: dataplex-lookup-entry
source: my-dataplex-source
description: Use this tool to retrieve a specific entry in Dataplex Catalog.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "dataplex-lookup-entry". |
| source | string | true | Name of the source the tool should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,75 +0,0 @@
---
title: "dataplex-search-entries"
type: docs
weight: 1
description: >
A "dataplex-search-entries" tool allows to search for entries based on the provided query.
aliases:
- /resources/tools/dataplex-search-entries
---
## About
A `dataplex-search-entries` tool returns all entries in Dataplex Catalog (e.g.
tables, views, models) that matches given user query.
It's compatible with the following sources:
- [dataplex](../../sources/dataplex.md)
`dataplex-search-entries` takes a required `query` parameter based on which
entries are filtered and returned to the user and a required `name` parameter
which is constructed using source's project if user does not provide it
explicitly and has the following format: projects/{project}/locations/global. It
also optionally accepts following parameters:
- `pageSize` - Number of results in the search page. Defaults to `5`.
- `pageToken` - Page token received from a previous locations.searchEntries
call.
- `orderBy` - Specifies the ordering of results. Supported values are: relevance
(default), last_modified_timestamp, last_modified_timestamp asc
- `semanticSearch` - Specifies whether the search should understand the meaning
and intent behind the query, rather than just matching keywords. Defaults to
`true`.
- `scope` - The scope under which the search should be operating. Since this
parameter is not exposed to the toolbox user, it defaults to the organization
where the project provided in name is located.
## Requirements
### IAM Permissions
Dataplex uses [Identity and Access Management (IAM)][iam-overview] to control
user and group access to Dataplex resources. Toolbox will use your
[Application Default Credentials (ADC)][adc] to authorize and authenticate when
interacting with [Dataplex][dataplex-docs].
In addition to [setting the ADC for your server][set-adc], you need to ensure
the IAM identity has been given the correct IAM permissions for the tasks you
intend to perform. See [Dataplex Universal Catalog IAM permissions][iam-permissions]
and [Dataplex Universal Catalog IAM roles][iam-roles] for more information on
applying IAM permissions and roles to an identity.
[iam-overview]: https://cloud.google.com/dataplex/docs/iam-and-access-control
[adc]: https://cloud.google.com/docs/authentication#adc
[set-adc]: https://cloud.google.com/docs/authentication/provide-credentials-adc
[iam-permissions]: https://cloud.google.com/dataplex/docs/iam-permissions
[iam-roles]: https://cloud.google.com/dataplex/docs/iam-roles
[dataplex-docs]: https://cloud.google.com/dataplex
## Example
```yaml
tools:
dataplex-search-entries:
kind: dataplex-search-entries
source: my-dataplex-source
description: Use this tool to get all the entries based on the provided query.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "dataplex-search-entries". |
| source | string | true | Name of the source the tool should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -2,7 +2,7 @@
title: "dgraph-dql"
type: docs
weight: 1
description: >
description: >
A "dgraph-dql" tool executes a pre-defined DQL statement against a Dgraph
database.
aliases:
@@ -14,7 +14,7 @@ aliases:
A `dgraph-dql` tool executes a pre-defined DQL statement against a Dgraph
database. It's compatible with any of the following sources:
- [dgraph](../../sources/dgraph.md)
- [dgraph](../sources/dgraph.md)
To run a statement as a query, you need to set the config `isQuery=true`. For
upserts or mutations, set `isQuery=false`. You can also configure timeout for a
@@ -121,4 +121,4 @@ tools:
| statement | string | true | dql statement to execute |
| isQuery | boolean | false | To run statement as query set true otherwise false |
| timeout | string | false | To set timeout for query |
| parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be used with the dql statement. |
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be used with the dql statement. |

View File

@@ -1,7 +0,0 @@
---
title: "Firestore"
type: docs
weight: 1
description: >
Tools that work with Firestore Sources.
---

View File

@@ -1,39 +0,0 @@
---
title: "firestore-delete-documents"
type: docs
weight: 1
description: >
A "firestore-delete-documents" tool deletes multiple documents from Firestore by their paths.
aliases:
- /resources/tools/firestore-delete-documents
---
## About
A `firestore-delete-documents` tool deletes multiple documents from Firestore by
their paths.
It's compatible with the following sources:
- [firestore](../../sources/firestore.md)
`firestore-delete-documents` takes one input parameter `documentPaths` which is
an array of document paths to delete. The tool uses Firestore's BulkWriter for
efficient batch deletion and returns the success status for each document.
## Example
```yaml
tools:
delete_user_documents:
kind: firestore-delete-documents
source: my-firestore-source
description: Use this tool to delete multiple documents from Firestore.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:--------------:|:------------:|----------------------------------------------------------|
| kind | string | true | Must be "firestore-delete-documents". |
| source | string | true | Name of the Firestore source to delete documents from. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,39 +0,0 @@
---
title: "firestore-get-documents"
type: docs
weight: 1
description: >
A "firestore-get-documents" tool retrieves multiple documents from Firestore by their paths.
aliases:
- /resources/tools/firestore-get-documents
---
## About
A `firestore-get-documents` tool retrieves multiple documents from Firestore by
their paths.
It's compatible with the following sources:
- [firestore](../../sources/firestore.md)
`firestore-get-documents` takes one input parameter `documentPaths` which is an
array of document paths, and returns the documents' data along with metadata
such as existence status, creation time, update time, and read time.
## Example
```yaml
tools:
get_user_documents:
kind: firestore-get-documents
source: my-firestore-source
description: Use this tool to retrieve multiple documents from Firestore.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:--------------:|:------------:|------------------------------------------------------------|
| kind | string | true | Must be "firestore-get-documents". |
| source | string | true | Name of the Firestore source to retrieve documents from. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,39 +0,0 @@
---
title: "firestore-get-rules"
type: docs
weight: 1
description: >
A "firestore-get-rules" tool retrieves the active Firestore security rules for the current project.
aliases:
- /resources/tools/firestore-get-rules
---
## About
A `firestore-get-rules` tool retrieves the active [Firestore security
rules](https://firebase.google.com/docs/firestore/security/get-started) for the
current project.
It's compatible with the following sources:
- [firestore](../../sources/firestore.md)
`firestore-get-rules` takes no input parameters and returns the security rules
content along with metadata such as the ruleset name, and timestamps.
## Example
```yaml
tools:
get_firestore_rules:
kind: firestore-get-rules
source: my-firestore-source
description: Use this tool to retrieve the active Firestore security rules.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:-------------:|:------------:|-------------------------------------------------------|
| kind | string | true | Must be "firestore-get-rules". |
| source | string | true | Name of the Firestore source to retrieve rules from. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,42 +0,0 @@
---
title: "firestore-list-collections"
type: docs
weight: 1
description: >
A "firestore-list-collections" tool lists collections in Firestore, either at the root level or as subcollections of a document.
aliases:
- /resources/tools/firestore-list-collections
---
## About
A `firestore-list-collections` tool lists
[collections](https://firebase.google.com/docs/firestore/data-model#collections)
in Firestore, either at the root level or as
[subcollections](https://firebase.google.com/docs/firestore/data-model#subcollections)
of a specific document.
It's compatible with the following sources:
- [firestore](../../sources/firestore.md)
`firestore-list-collections` takes an optional `parentPath` parameter to specify a document
path. If provided, it lists all subcollections of that document. If not provided, it lists
all root-level collections in the database.
## Example
```yaml
tools:
list_firestore_collections:
kind: firestore-list-collections
source: my-firestore-source
description: Use this tool to list collections in Firestore.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:----------------:|:------------:|--------------------------------------------------------|
| kind | string | true | Must be "firestore-list-collections". |
| source | string | true | Name of the Firestore source to list collections from. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,215 +0,0 @@
---
title: "firestore-query-collection"
type: docs
weight: 1
description: >
A "firestore-query-collection" tool allow to query collections in Firestore.
aliases:
- /resources/tools/firestore-query-collection
---
# About
The `firestore-query-collection` tool allows you to query Firestore collections
with filters, ordering, and limit capabilities.
## Configuration
To use this tool, you need to configure it in your YAML configuration file:
```yaml
sources:
my-firestore:
kind: firestore
project: my-gcp-project
database: "(default)"
tools:
query_collection:
kind: firestore-query-collection
source: my-firestore
description: Query Firestore collections with advanced filtering
```
## Parameters
| **parameters** | **type** | **required** | **default** | **description** |
|------------------|:------------:|:------------:|:-----------:|-----------------------------------------------------------------------|
| `collectionPath` | string | true | - | The Firestore Rules source code to validate |
| `filters` | array | false | - | Array of filter objects (as JSON strings) to apply to the query |
| `orderBy` | string | false | - | JSON string specifying field and direction to order results |
| `limit` | integer | false | 100 | Maximum number of documents to return |
| `analyzeQuery` | boolean | false | false | If true, returns query explain metrics including execution statistics |
### Filter Format
Each filter in the `filters` array should be a JSON string with the following
structure:
```json
{
"field": "fieldName",
"op": "operator",
"value": "compareValue"
}
```
Supported operators:
- `<` - Less than
- `<=` - Less than or equal to
- `>` - Greater than
- `>=` - Greater than or equal to
- `==` - Equal to
- `!=` - Not equal to
- `array-contains` - Array contains a specific value
- `array-contains-any` - Array contains any of the specified values
- `in` - Field value is in the specified array
- `not-in` - Field value is not in the specified array
Value types supported:
- String: `"value": "text"`
- Number: `"value": 123` or `"value": 45.67`
- Boolean: `"value": true` or `"value": false`
- Array: `"value": ["item1", "item2"]` (for `in`, `not-in`, `array-contains-any`
operators)
### OrderBy Format
The `orderBy` parameter should be a JSON string with the following structure:
```json
{
"field": "fieldName",
"direction": "ASCENDING"
}
```
Direction values:
- `ASCENDING`
- `DESCENDING`
## Example Usage
### Query with filters
```json
{
"collectionPath": "users",
"filters": [
"{\"field\": \"age\", \"op\": \">\", \"value\": 18}",
"{\"field\": \"status\", \"op\": \"==\", \"value\": \"active\"}"
],
"orderBy": "{\"field\": \"createdAt\", \"direction\": \"DESCENDING\"}",
"limit": 50
}
```
### Query with array contains filter
```json
{
"collectionPath": "products",
"filters": [
"{\"field\": \"categories\", \"op\": \"array-contains\", \"value\": \"electronics\"}",
"{\"field\": \"price\", \"op\": \"<\", \"value\": 1000}"
],
"orderBy": "{\"field\": \"price\", \"direction\": \"ASCENDING\"}",
"limit": 20
}
```
### Query with IN operator
```json
{
"collectionPath": "orders",
"filters": [
"{\"field\": \"status\", \"op\": \"in\", \"value\": [\"pending\", \"processing\"]}"
],
"limit": 100
}
```
### Query with explain metrics
```json
{
"collectionPath": "users",
"filters": [
"{\"field\": \"age\", \"op\": \">=\", \"value\": 21}",
"{\"field\": \"active\", \"op\": \"==\", \"value\": true}"
],
"orderBy": "{\"field\": \"lastLogin\", \"direction\": \"DESCENDING\"}",
"limit": 25,
"analyzeQuery": true
}
```
## Response Format
### Standard Response (analyzeQuery = false)
The tool returns an array of documents, where each document includes:
```json
{
"id": "documentId",
"path": "collection/documentId",
"data": {
// Document fields
},
"createTime": "2025-01-07T12:00:00Z",
"updateTime": "2025-01-07T12:00:00Z",
"readTime": "2025-01-07T12:00:00Z"
}
```
### Response with Query Analysis (analyzeQuery = true)
When `analyzeQuery` is set to true, the tool returns a single object containing
documents and explain metrics:
```json
{
"documents": [
// Array of document objects as shown above
],
"explainMetrics": {
"planSummary": {
"indexesUsed": [
{
"query_scope": "Collection",
"properties": "(field ASC, __name__ ASC)"
}
]
},
"executionStats": {
"resultsReturned": 50,
"readOperations": 50,
"executionDuration": "120ms",
"debugStats": {
"indexes_entries_scanned": "1000",
"documents_scanned": "50",
"billing_details": {
"documents_billable": "50",
"index_entries_billable": "1000",
"min_query_cost": "0"
}
}
}
}
}
```
## Error Handling
The tool will return errors for:
- Invalid collection path
- Malformed filter JSON
- Unsupported operators
- Query execution failures
- Invalid orderBy format

View File

@@ -1,124 +0,0 @@
---
title: "firestore-validate-rules"
type: docs
weight: 1
description: >
A "firestore-validate-rules" tool validates Firestore security rules syntax and semantic correctness without deploying them. It provides detailed error reporting with source positions and code snippets.
aliases:
- /resources/tools/firestore-validate-rules
---
## Overview
The `firestore-validate-rules` tool validates Firestore security rules syntax
and semantic correctness without deploying them. It provides detailed error
reporting with source positions and code snippets.
## Configuration
```yaml
tools:
firestore-validate-rules:
kind: firestore-validate-rules
source: <firestore-source-name>
description: "Checks the provided Firestore Rules source for syntax and validation errors"
```
## Authentication
This tool requires authentication if the source requires authentication.
## Parameters
| **parameters** | **type** | **required** | **description** |
|-----------------|:------------:|:------------:|----------------------------------------------|
| source | string | true | The Firestore Rules source code to validate |
## Response
The tool returns a `ValidationResult` object containing:
```json
{
"valid": "boolean",
"issueCount": "number",
"formattedIssues": "string",
"rawIssues": [
{
"sourcePosition": {
"fileName": "string",
"line": "number",
"column": "number",
"currentOffset": "number",
"endOffset": "number"
},
"description": "string",
"severity": "string"
}
]
}
```
## Example Usage
### Validate simple rules
```json
{
"source": "rules_version = '2';\nservice cloud.firestore {\n match /databases/{database}/documents {\n match /{document=**} {\n allow read, write: if true;\n }\n }\n}"
}
```
### Example response for valid rules
```json
{
"valid": true,
"issueCount": 0,
"formattedIssues": "✓ No errors detected. Rules are valid."
}
```
### Example response with errors
```json
{
"valid": false,
"issueCount": 1,
"formattedIssues": "Found 1 issue(s) in rules source:\n\nERROR: Unexpected token ';' [Ln 4, Col 32]\n```\n allow read, write: if true;;\n ^\n```",
"rawIssues": [
{
"sourcePosition": {
"line": 4,
"column": 32,
"currentOffset": 105,
"endOffset": 106
},
"description": "Unexpected token ';'",
"severity": "ERROR"
}
]
}
```
## Error Handling
The tool will return errors for:
- Missing or empty `source` parameter
- API errors when calling the Firebase Rules service
- Network connectivity issues
## Use Cases
1. **Pre-deployment validation**: Validate rules before deploying to production
2. **CI/CD integration**: Integrate rules validation into your build pipeline
3. **Development workflow**: Quickly check rules syntax while developing
4. **Error debugging**: Get detailed error locations with code snippets
## Related Tools
- [firestore-get-rules]({{< ref "firestore-get-rules" >}}): Retrieve current
active rules
- [firestore-query-collection]({{< ref "firestore-query-collection" >}}): Test
rules by querying collections

View File

@@ -2,7 +2,7 @@
title: "http"
type: docs
weight: 1
description: >
description: >
A "http" tool sends out an HTTP request to an HTTP endpoint.
aliases:
- /resources/tools/http
@@ -50,7 +50,7 @@ tools:
method: GET
path: /search
description: Tool to search information from the example API
my-dynamic-path-tool:
kind: http
source: my-http-source
@@ -256,8 +256,8 @@ my-http-tool:
| method | string | true | The HTTP method to use (e.g., GET, POST, PUT, DELETE). |
| headers | map[string]string | false | A map of headers to include in the HTTP request (overrides source headers). |
| requestBody | string | false | The request body payload. Use [go template][go-template-doc] with the parameter name as the placeholder (e.g., `{{.id}}` will be replaced with the value of the parameter that has name `id` in the `bodyParams` section). |
| queryParams | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the query string. |
| bodyParams | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the request body payload. |
| headerParams | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted as the request headers. |
| queryParams | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the query string. |
| bodyParams | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the request body payload. |
| headerParams | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted as the request headers. |
[go-template-doc]: <https://pkg.go.dev/text/template#pkg-overview>

View File

@@ -1,7 +0,0 @@
---
title: "Looker"
type: docs
weight: 1
description: >
Tools that work with Looker Sources.
---

View File

@@ -1,44 +0,0 @@
---
title: "looker-get-dimensions"
type: docs
weight: 1
description: >
A "looker-get-dimensions" tool returns all the dimensions from a given explore
in a given model in the source.
aliases:
- /resources/tools/looker-get-dimensions
---
## About
A `looker-get-dimensions` tool returns all the dimensions from a given explore
in a given mode in the source.
It's compatible with the following sources:
- [looker](../../sources/looker.md)
`looker-get-dimensions` accepts two parameters, the `model` and the `explore`.
## Example
```yaml
tools:
get_dimensions:
kind: looker-get-dimensions
source: looker-source
description: |
The get_dimensions tool retrieves the list of dimensions defined in
an explore.
It takes two parameters, the model_name looked up from get_models and the
explore_name looked up from get_explores.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "looker-get-dimensions". |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,44 +0,0 @@
---
title: "looker-get-explores"
type: docs
weight: 1
description: >
A "looker-get-explores" tool returns all explores
for the given model from the source.
aliases:
- /resources/tools/looker-get-explores
---
## About
A `looker-get-explores` tool returns all explores
for a given model from the source.
It's compatible with the following sources:
- [looker](../../sources/looker.md)
`looker-get-explores` accepts one parameter, the
`model` id.
## Example
```yaml
tools:
get_explores:
kind: looker-get-explores
source: looker-source
description: |
The get_explores tool retrieves the list of explores defined in a LookML model
in the Looker system.
It takes one parameter, the model_name looked up from get_models.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "looker-get-explores". |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,44 +0,0 @@
---
title: "looker-get-filters"
type: docs
weight: 1
description: >
A "looker-get-filters" tool returns all the filters from a given explore
in a given model in the source.
aliases:
- /resources/tools/looker-get-filters
---
## About
A `looker-get-filters` tool returns all the filters from a given explore
in a given mode in the source.
It's compatible with the following sources:
- [looker](../../sources/looker.md)
`looker-get-filters` accepts two parameters, the `model` and the `explore`.
## Example
```yaml
tools:
get_dimensions:
kind: looker-get-filters
source: looker-source
description: |
The get_filters tool retrieves the list of filters defined in
an explore.
It takes two parameters, the model_name looked up from get_models and the
explore_name looked up from get_explores.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "looker-get-filters". |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,60 +0,0 @@
---
title: "looker-get-looks"
type: docs
weight: 1
description: >
"looker-get-looks" searches for saved Looks in a Looker
source.
aliases:
- /resources/tools/looker-get-looks
---
## About
The `looker-get-looks` tool searches for a saved Look by
name or description.
It's compatible with the following sources:
- [looker](../../sources/looker.md)
`looker-get-looks` takes four parameters, the `title`, `desc`, `limit`
and `offset`.
Title and description use SQL style wildcards and are case insensitive.
Limit and offset are used to page through a larger set of matches and
default to 100 and 0.
## Example
```yaml
tools:
get_looks:
kind: looker-get-looks
source: looker-source
description: |
get_looks Tool
This tool is used to search for saved looks in a Looker instance.
String search params use case-insensitive matching. String search
params can contain % and '_' as SQL LIKE pattern match wildcard
expressions. example="dan%" will match "danger" and "Danzig" but
not "David" example="D_m%" will match "Damage" and "dump".
Most search params can accept "IS NULL" and "NOT NULL" as special
expressions to match or exclude (respectively) rows where the
column is null.
The limit and offset are used to paginate the results.
The result of the get_looks tool is a list of json objects.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "looker-get-looks" |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,44 +0,0 @@
---
title: "looker-get-measures"
type: docs
weight: 1
description: >
A "looker-get-measures" tool returns all the measures from a given explore
in a given model in the source.
aliases:
- /resources/tools/looker-get-measures
---
## About
A `looker-get-measures` tool returns all the measures from a given explore
in a given mode in the source.
It's compatible with the following sources:
- [looker](../../sources/looker.md)
`looker-get-measures` accepts two parameters, the `model` and the `explore`.
## Example
```yaml
tools:
get_measures:
kind: looker-get-measures
source: looker-source
description: |
The get_measures tool retrieves the list of measures defined in
an explore.
It takes two parameters, the model_name looked up from get_models and the
explore_name looked up from get_explores.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "looker-get-measures". |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,40 +0,0 @@
---
title: "looker-get-models"
type: docs
weight: 1
description: >
A "looker-get-models" tool returns all the models in the source.
aliases:
- /resources/tools/looker-get-models
---
## About
A `looker-get-models` tool returns all the models the source.
It's compatible with the following sources:
- [looker](../../sources/looker.md)
`looker-get-models` accepts no parameters.
## Example
```yaml
tools:
get_models:
kind: looker-get-models
source: looker-source
description: |
The get_models tool retrieves the list of LookML models in the Looker system.
It takes no parameters.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "looker-get-models". |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,44 +0,0 @@
---
title: "looker-get-parameters"
type: docs
weight: 1
description: >
A "looker-get-parameters" tool returns all the parameters from a given explore
in a given model in the source.
aliases:
- /resources/tools/looker-get-parameters
---
## About
A `looker-get-parameters` tool returns all the parameters from a given explore
in a given mode in the source.
It's compatible with the following sources:
- [looker](../../sources/looker.md)
`looker-get-parameters` accepts two parameters, the `model` and the `explore`.
## Example
```yaml
tools:
get_parameters:
kind: looker-get-parameters
source: looker-source
description: |
The get_parameters tool retrieves the list of parameters defined in
an explore.
It takes two parameters, the model_name looked up from get_models and the
explore_name looked up from get_explores.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "looker-get-parameters". |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,65 +0,0 @@
---
title: "looker-make-look"
type: docs
weight: 1
description: >
"looker-make-look" generates a Looker look in the users personal folder in
Looker
aliases:
- /resources/tools/looker-make-look
---
## About
The `looker-make-look` creates a saved Look in the user's
Looker personal folder.
It's compatible with the following sources:
- [looker](../../sources/looker.md)
`looker-make-look` takes eight parameters:
1. the `model`
2. the `explore`
3. the `fields` list
4. an optional set of `filters`
5. an optional set of `pivots`
6. an optional set of `sorts`
7. an optional `limit`
8. an optional `tz`
9. an optional `vis_config`
10. the `title`
11. an optional `description`
## Example
```yaml
tools:
make_look:
kind: looker-make-look
source: looker-source
description: |
make_look Tool
This tool creates a new look in Looker, using the query
parameters and the vis_config specified.
Most of the parameters are the same as the query_url
tool. In addition, there is a title and a description
that must be provided.
The newly created look will be created in the user's
personal folder in looker. The look name must be unique.
The result is a json document with a link to the newly
created look.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "looker-make-look" |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,80 +0,0 @@
---
title: "looker-query-sql"
type: docs
weight: 1
description: >
"looker-query-sql" generates a sql query using the Looker
semantic model.
aliases:
- /resources/tools/looker-query-sql
---
## About
The `looker-query-sql` generates a sql query using the Looker
semantic model.
It's compatible with the following sources:
- [looker](../../sources/looker.md)
`looker-query-sql` takes eight parameters:
1. the `model`
2. the `explore`
3. the `fields` list
4. an optional set of `filters`
5. an optional set of `pivots`
6. an optional set of `sorts`
7. an optional `limit`
8. an optional `tz`
## Example
```yaml
tools:
query_sql:
kind: looker-query-sql
source: looker-source
description: |
Query SQL Tool
This tool is used to generate a sql query against the LookML model. The
model, explore, and fields list must be specified. Pivots,
filters and sorts are optional.
The model can be found from the get_models tool. The explore
can be found from the get_explores tool passing in the model.
The fields can be found from the get_dimensions, get_measures,
get_filters, and get_parameters tools, passing in the model
and the explore.
Provide a model_id and explore_name, then a list
of fields. Optionally a list of pivots can be provided.
The pivots must also be included in the fields list.
Filters are provided as a map of {"field.id": "condition",
"field.id2": "condition2", ...}. Do not put the field.id in
quotes. Filter expressions can be found at
https://cloud.google.com/looker/docs/filter-expressions.
Sorts can be specified like [ "field.id desc 0" ].
An optional row limit can be added. If not provided the limit
will default to 500. "-1" can be specified for unlimited.
An optional query timezone can be added. The query_timezone to
will default to that of the workstation where this MCP server
is running, or Etc/UTC if that can't be determined. Not all
models support custom timezones.
The result of the query tool is the sql string.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "looker-query-sql" |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -1,363 +0,0 @@
---
title: "looker-query-url"
type: docs
weight: 1
description: >
"looker-query-url" generates a url link to a Looker explore.
aliases:
- /resources/tools/looker-query-url
---
## About
The `looker-query-url` generates a url link to an explore in
Looker so the query can be investigated further.
It's compatible with the following sources:
- [looker](../../sources/looker.md)
`looker-query-url` takes eight parameters:
1. the `model`
2. the `explore`
3. the `fields` list
4. an optional set of `filters`
5. an optional set of `pivots`
6. an optional set of `sorts`
7. an optional `limit`
8. an optional `tz`
9. an optional `vis_config`
## Example
```yaml
tools:
query_url:
kind: looker-query-url
source: looker-source
description: |
Query URL Tool
This tool is used to generate the URL of a query in Looker.
The user can then explore the query further inside Looker.
The tool also returns the query_id and slug. The parameters
are the same as the query tool with an additional vis_config
parameter.
The vis_config is optional. If provided, it will be used to
control the default visualization for the query. These are
some sample vis_config settings.
A bar chart -
{{
"defaults_version": 1,
"label_density": 25,
"legend_position": "center",
"limit_displayed_rows": false,
"ordering": "none",
"plot_size_by_field": false,
"point_style": "none",
"show_null_labels": false,
"show_silhouette": false,
"show_totals_labels": false,
"show_value_labels": false,
"show_view_names": false,
"show_x_axis_label": true,
"show_x_axis_ticks": true,
"show_y_axis_labels": true,
"show_y_axis_ticks": true,
"stacking": "normal",
"totals_color": "#808080",
"trellis": "",
"type": "looker_bar",
"x_axis_gridlines": false,
"x_axis_reversed": false,
"x_axis_scale": "auto",
"x_axis_zoom": true,
"y_axis_combined": true,
"y_axis_gridlines": true,
"y_axis_reversed": false,
"y_axis_scale_mode": "linear",
"y_axis_tick_density": "default",
"y_axis_tick_density_custom": 5,
"y_axis_zoom": true
}}
A column chart with an option advanced_vis_config -
{{
"advanced_vis_config": "{ chart: { type: 'pie', spacingBottom: 50, spacingLeft: 50, spacingRight: 50, spacingTop: 50, }, legend: { enabled: false, }, plotOptions: { pie: { dataLabels: { enabled: true, format: '\u003cb\u003e{key}\u003c/b\u003e\u003cspan style=\"font-weight: normal\"\u003e - {percentage:.2f}%\u003c/span\u003e', }, showInLegend: false, }, }, series: [], }",
"colors": [
"grey"
],
"defaults_version": 1,
"hidden_fields": [],
"label_density": 25,
"legend_position": "center",
"limit_displayed_rows": false,
"note_display": "below",
"note_state": "collapsed",
"note_text": "Unsold inventory only",
"ordering": "none",
"plot_size_by_field": false,
"point_style": "none",
"series_colors": {},
"show_null_labels": false,
"show_silhouette": false,
"show_totals_labels": false,
"show_value_labels": true,
"show_view_names": false,
"show_x_axis_label": true,
"show_x_axis_ticks": true,
"show_y_axis_labels": true,
"show_y_axis_ticks": true,
"stacking": "normal",
"totals_color": "#808080",
"trellis": "",
"type": "looker_column",
"x_axis_gridlines": false,
"x_axis_reversed": false,
"x_axis_scale": "auto",
"x_axis_zoom": true,
"y_axes": [],
"y_axis_combined": true,
"y_axis_gridlines": true,
"y_axis_reversed": false,
"y_axis_scale_mode": "linear",
"y_axis_tick_density": "default",
"y_axis_tick_density_custom": 5,
"y_axis_zoom": true
}}
A line chart -
{{
"defaults_version": 1,
"hidden_pivots": {},
"hidden_series": [],
"interpolation": "linear",
"label_density": 25,
"legend_position": "center",
"limit_displayed_rows": false,
"plot_size_by_field": false,
"point_style": "none",
"series_types": {},
"show_null_points": true,
"show_value_labels": false,
"show_view_names": false,
"show_x_axis_label": true,
"show_x_axis_ticks": true,
"show_y_axis_labels": true,
"show_y_axis_ticks": true,
"stacking": "",
"trellis": "",
"type": "looker_line",
"x_axis_gridlines": false,
"x_axis_reversed": false,
"x_axis_scale": "auto",
"y_axis_combined": true,
"y_axis_gridlines": true,
"y_axis_reversed": false,
"y_axis_scale_mode": "linear",
"y_axis_tick_density": "default",
"y_axis_tick_density_custom": 5
}}
An area chart -
{{
"defaults_version": 1,
"interpolation": "linear",
"label_density": 25,
"legend_position": "center",
"limit_displayed_rows": false,
"plot_size_by_field": false,
"point_style": "none",
"series_types": {},
"show_null_points": true,
"show_silhouette": false,
"show_totals_labels": false,
"show_value_labels": false,
"show_view_names": false,
"show_x_axis_label": true,
"show_x_axis_ticks": true,
"show_y_axis_labels": true,
"show_y_axis_ticks": true,
"stacking": "normal",
"totals_color": "#808080",
"trellis": "",
"type": "looker_area",
"x_axis_gridlines": false,
"x_axis_reversed": false,
"x_axis_scale": "auto",
"x_axis_zoom": true,
"y_axis_combined": true,
"y_axis_gridlines": true,
"y_axis_reversed": false,
"y_axis_scale_mode": "linear",
"y_axis_tick_density": "default",
"y_axis_tick_density_custom": 5,
"y_axis_zoom": true
}}
A scatter plot -
{{
"cluster_points": false,
"custom_quadrant_point_x": 5,
"custom_quadrant_point_y": 5,
"custom_value_label_column": "",
"custom_x_column": "",
"custom_y_column": "",
"defaults_version": 1,
"hidden_fields": [],
"hidden_pivots": {},
"hidden_points_if_no": [],
"hidden_series": [],
"interpolation": "linear",
"label_density": 25,
"legend_position": "center",
"limit_displayed_rows": false,
"limit_displayed_rows_values": {
"first_last": "first",
"num_rows": 0,
"show_hide": "hide"
},
"plot_size_by_field": false,
"point_style": "circle",
"quadrant_properties": {
"0": {
"color": "",
"label": "Quadrant 1"
},
"1": {
"color": "",
"label": "Quadrant 2"
},
"2": {
"color": "",
"label": "Quadrant 3"
},
"3": {
"color": "",
"label": "Quadrant 4"
}
},
"quadrants_enabled": false,
"series_labels": {},
"series_types": {},
"show_null_points": false,
"show_value_labels": false,
"show_view_names": true,
"show_x_axis_label": true,
"show_x_axis_ticks": true,
"show_y_axis_labels": true,
"show_y_axis_ticks": true,
"size_by_field": "roi",
"stacking": "normal",
"swap_axes": true,
"trellis": "",
"type": "looker_scatter",
"x_axis_gridlines": false,
"x_axis_reversed": false,
"x_axis_scale": "auto",
"x_axis_zoom": true,
"y_axes": [
{
"label": "",
"orientation": "bottom",
"series": [
{
"axisId": "Channel_0 - average_of_roi_first",
"id": "Channel_0 - average_of_roi_first",
"name": "Channel_0"
},
{
"axisId": "Channel_1 - average_of_roi_first",
"id": "Channel_1 - average_of_roi_first",
"name": "Channel_1"
},
{
"axisId": "Channel_2 - average_of_roi_first",
"id": "Channel_2 - average_of_roi_first",
"name": "Channel_2"
},
{
"axisId": "Channel_3 - average_of_roi_first",
"id": "Channel_3 - average_of_roi_first",
"name": "Channel_3"
},
{
"axisId": "Channel_4 - average_of_roi_first",
"id": "Channel_4 - average_of_roi_first",
"name": "Channel_4"
}
],
"showLabels": true,
"showValues": true,
"tickDensity": "custom",
"tickDensityCustom": 100,
"type": "linear",
"unpinAxis": false
}
],
"y_axis_combined": true,
"y_axis_gridlines": true,
"y_axis_reversed": false,
"y_axis_scale_mode": "linear",
"y_axis_tick_density": "default",
"y_axis_tick_density_custom": 5,
"y_axis_zoom": true
}}
A single value visualization -
{{
"defaults_version": 1,
"show_view_names": false,
"type": "looker_single_record"
}}
A Pie chart -
{{
"defaults_version": 1,
"label_density": 25,
"label_type": "labPer",
"legend_position": "center",
"limit_displayed_rows": false,
"ordering": "none",
"plot_size_by_field": false,
"point_style": "none",
"series_types": {},
"show_null_labels": false,
"show_silhouette": false,
"show_totals_labels": false,
"show_value_labels": false,
"show_view_names": false,
"show_x_axis_label": true,
"show_x_axis_ticks": true,
"show_y_axis_labels": true,
"show_y_axis_ticks": true,
"stacking": "",
"totals_color": "#808080",
"trellis": "",
"type": "looker_pie",
"value_labels": "legend",
"x_axis_gridlines": false,
"x_axis_reversed": false,
"x_axis_scale": "auto",
"y_axis_combined": true,
"y_axis_gridlines": true,
"y_axis_reversed": false,
"y_axis_scale_mode": "linear",
"y_axis_tick_density": "default",
"y_axis_tick_density_custom": 5
}}
The result is a JSON object with the id, slug, the url, and
the long_url.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "looker-query-url" |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

Some files were not shown because too many files have changed in this diff Show More