mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-02-18 11:02:26 -05:00
Compare commits
3 Commits
v0.22.0
...
lsc-177141
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6d1e38281a | ||
|
|
89c85729d1 | ||
|
|
93f638650d |
@@ -273,7 +273,7 @@ steps:
|
||||
.ci/test_with_coverage.sh \
|
||||
"Cloud Healthcare API" \
|
||||
cloudhealthcare \
|
||||
cloudhealthcare || echo "Integration tests failed."
|
||||
cloudhealthcare
|
||||
|
||||
- id: "postgres"
|
||||
name: golang:1
|
||||
@@ -385,7 +385,7 @@ steps:
|
||||
.ci/test_with_coverage.sh \
|
||||
"Cloud SQL MySQL" \
|
||||
cloudsqlmysql \
|
||||
mysql
|
||||
mysql || echo "Integration tests failed." # ignore test failures
|
||||
|
||||
- id: "mysql"
|
||||
name: golang:1
|
||||
@@ -407,7 +407,7 @@ steps:
|
||||
.ci/test_with_coverage.sh \
|
||||
"MySQL" \
|
||||
mysql \
|
||||
mysql
|
||||
mysql || echo "Integration tests failed." # ignore test failures
|
||||
|
||||
- id: "mssql"
|
||||
name: golang:1
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
steps:
|
||||
- name: 'node:22'
|
||||
- name: 'node:20'
|
||||
id: 'js-quickstart-test'
|
||||
entrypoint: 'bash'
|
||||
args:
|
||||
@@ -44,4 +44,4 @@ availableSecrets:
|
||||
timeout: 1000s
|
||||
|
||||
options:
|
||||
logging: CLOUD_LOGGING_ONLY
|
||||
logging: CLOUD_LOGGING_ONLY
|
||||
@@ -1,18 +0,0 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
ignore_patterns:
|
||||
- "package-lock.json"
|
||||
- "go.sum"
|
||||
- "requirements.txt"
|
||||
4
.github/blunderbuss.yml
vendored
4
.github/blunderbuss.yml
vendored
@@ -15,6 +15,7 @@
|
||||
assign_issues:
|
||||
- Yuan325
|
||||
- duwenxin99
|
||||
- averikitsch
|
||||
- anubhav756
|
||||
- twishabansal
|
||||
- dishaprakash
|
||||
@@ -61,7 +62,8 @@ assign_issues_by:
|
||||
- 'googleapis/toolbox-spanner'
|
||||
assign_prs:
|
||||
- Yuan325
|
||||
- duwenxin99
|
||||
- duwenxin99
|
||||
- averikitsch
|
||||
assign_prs_by:
|
||||
- labels:
|
||||
- 'product: alloydb'
|
||||
|
||||
27
.github/trusted-contribution.yml
vendored
27
.github/trusted-contribution.yml
vendored
@@ -1,27 +0,0 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Trigger presubmit tests for trusted contributors
|
||||
# https://github.com/googleapis/repo-automation-bots/tree/main/packages/trusted-contribution
|
||||
# Install: https://github.com/apps/trusted-contributions-gcf
|
||||
|
||||
trustedContributors:
|
||||
- "dependabot[bot]"
|
||||
- "renovate-bot"
|
||||
annotations:
|
||||
# Trigger Cloud Build tests
|
||||
- type: comment
|
||||
text: "/gcbrun"
|
||||
- type: label
|
||||
text: "tests: run"
|
||||
4
.github/workflows/deploy_versioned_docs.yaml
vendored
4
.github/workflows/deploy_versioned_docs.yaml
vendored
@@ -35,7 +35,9 @@ jobs:
|
||||
ref: ${{ github.event.release.tag_name }}
|
||||
|
||||
- name: Get Version from Release Tag
|
||||
run: echo "VERSION=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
|
||||
run: echo "VERSION=${GITHUB_EVENT_RELEASE_TAG_NAME}" >> $GITHUB_ENV
|
||||
env:
|
||||
GITHUB_EVENT_RELEASE_TAG_NAME: ${{ github.event.release.tag_name }}
|
||||
|
||||
- name: Setup Hugo
|
||||
uses: peaceiris/actions-hugo@75d2e84710de30f6ff7268e08f310b60ef14033f # v3
|
||||
|
||||
2
.github/workflows/lint.yaml
vendored
2
.github/workflows/lint.yaml
vendored
@@ -51,7 +51,7 @@ jobs:
|
||||
console.log('Failed to remove label. Another job may have already removed it!');
|
||||
}
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||
uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
||||
with:
|
||||
go-version: "1.25"
|
||||
- name: Checkout code
|
||||
|
||||
2
.github/workflows/tests.yaml
vendored
2
.github/workflows/tests.yaml
vendored
@@ -57,7 +57,7 @@ jobs:
|
||||
}
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||
uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
||||
with:
|
||||
go-version: "1.24"
|
||||
|
||||
|
||||
@@ -51,10 +51,6 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
|
||||
# Add a new version block here before every release
|
||||
# The order of versions in this file is mirrored into the dropdown
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.22.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.22.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.21.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.21.0/"
|
||||
|
||||
30
CHANGELOG.md
30
CHANGELOG.md
@@ -1,35 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## [0.22.0](https://github.com/googleapis/genai-toolbox/compare/v0.21.0...v0.22.0) (2025-12-04)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **tools/postgres:** Add allowed-origins flag ([#1984](https://github.com/googleapis/genai-toolbox/issues/1984)) ([862868f](https://github.com/googleapis/genai-toolbox/commit/862868f28476ea981575ce412faa7d6a03138f31))
|
||||
* **tools/postgres:** Add list-query-stats and get-column-cardinality functions ([#1976](https://github.com/googleapis/genai-toolbox/issues/1976)) ([9f76026](https://github.com/googleapis/genai-toolbox/commit/9f760269253a8cc92a357e995c6993ccc4a0fb7b))
|
||||
* **tools/spanner:** Add spanner list graphs to prebuiltconfigs ([#2056](https://github.com/googleapis/genai-toolbox/issues/2056)) ([0e7fbf4](https://github.com/googleapis/genai-toolbox/commit/0e7fbf465c488397aa9d8cab2e55165fff4eb53c))
|
||||
* **prebuilt/cloud-sql:** Add clone instance tool for cloud sql ([#1845](https://github.com/googleapis/genai-toolbox/issues/1845)) ([5e43630](https://github.com/googleapis/genai-toolbox/commit/5e43630907aa2d7bc6818142483a33272eab060b))
|
||||
* **serverless-spark:** Add create_pyspark_batch tool ([1bf0b51](https://github.com/googleapis/genai-toolbox/commit/1bf0b51f033c956790be1577bf5310d0b17e9c12))
|
||||
* **serverless-spark:** Add create_spark_batch tool ([17a9792](https://github.com/googleapis/genai-toolbox/commit/17a979207dbc4fe70acd0ebda164d1a8d34c1ed3))
|
||||
* Support alternate accessToken header name ([#1968](https://github.com/googleapis/genai-toolbox/issues/1968)) ([18017d6](https://github.com/googleapis/genai-toolbox/commit/18017d6545335a6fc1c472617101c35254d9a597))
|
||||
* Support for annotations ([#2007](https://github.com/googleapis/genai-toolbox/issues/2007)) ([ac21335](https://github.com/googleapis/genai-toolbox/commit/ac21335f4e88ca52d954d7f8143a551a35661b94))
|
||||
* **tool/mssql:** Set default host and port for MSSQL source ([#1943](https://github.com/googleapis/genai-toolbox/issues/1943)) ([7a9cc63](https://github.com/googleapis/genai-toolbox/commit/7a9cc633768d9ae9a7ff8230002da69d6a36ca86))
|
||||
* **tools/cloudsqlpg:** Add CloudSQL PostgreSQL pre-check tool ([#1722](https://github.com/googleapis/genai-toolbox/issues/1722)) ([8752e05](https://github.com/googleapis/genai-toolbox/commit/8752e05ab6e98812d95673a6f1ff67e9a6ae48d2))
|
||||
* **tools/postgres-list-publication-tables:** Add new postgres-list-publication-tables tool ([#1919](https://github.com/googleapis/genai-toolbox/issues/1919)) ([f4b1f0a](https://github.com/googleapis/genai-toolbox/commit/f4b1f0a68000ca2fc0325f55a1905705417c38a2))
|
||||
* **tools/postgres-list-tablespaces:** Add new postgres-list-tablespaces tool ([#1934](https://github.com/googleapis/genai-toolbox/issues/1934)) ([5ad7c61](https://github.com/googleapis/genai-toolbox/commit/5ad7c6127b3e47504fc4afda0b7f3de1dff78b8b))
|
||||
* **tools/spanner-list-graph:** Tool impl + docs + tests ([#1923](https://github.com/googleapis/genai-toolbox/issues/1923)) ([a0f44d3](https://github.com/googleapis/genai-toolbox/commit/a0f44d34ea3f044dd08501be616f70ddfd63ab45))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Add import for firebirdsql ([#2045](https://github.com/googleapis/genai-toolbox/issues/2045)) ([fb7aae9](https://github.com/googleapis/genai-toolbox/commit/fb7aae9d35b760d3471d8379642f835a0d84ec41))
|
||||
* Correct FAQ to mention HTTP tools ([#2036](https://github.com/googleapis/genai-toolbox/issues/2036)) ([7b44237](https://github.com/googleapis/genai-toolbox/commit/7b44237d4a21bfbf8d3cebe4d32a15affa29584d))
|
||||
* Format BigQuery numeric output as decimal strings ([#2084](https://github.com/googleapis/genai-toolbox/issues/2084)) ([155bff8](https://github.com/googleapis/genai-toolbox/commit/155bff80c1da4fae1e169e425fd82e1dc3373041))
|
||||
* Set default annotations for tools in code if annotation not provided in yaml ([#2049](https://github.com/googleapis/genai-toolbox/issues/2049)) ([565460c](https://github.com/googleapis/genai-toolbox/commit/565460c4ea8953dbe80070a8e469f957c0f7a70c))
|
||||
* **tools/alloydb-postgres-list-tables:** Exclude google_ml schema from list_tables ([#2046](https://github.com/googleapis/genai-toolbox/issues/2046)) ([a03984c](https://github.com/googleapis/genai-toolbox/commit/a03984cc15254c928f30085f8fa509ded6a79a0c))
|
||||
* **tools/alloydbcreateuser:** Remove duplication of project praram ([#2028](https://github.com/googleapis/genai-toolbox/issues/2028)) ([730ac6d](https://github.com/googleapis/genai-toolbox/commit/730ac6d22805fd50b4a675b74c1865f4e7689e7c))
|
||||
* **tools/mongodb:** Remove `required` tag from the `canonical` field ([#2099](https://github.com/googleapis/genai-toolbox/issues/2099)) ([744214e](https://github.com/googleapis/genai-toolbox/commit/744214e04cd12b11d166e6eb7da8ce4714904abc))
|
||||
|
||||
## [0.21.0](https://github.com/googleapis/genai-toolbox/compare/v0.20.0...v0.21.0) (2025-11-19)
|
||||
|
||||
|
||||
|
||||
@@ -48,17 +48,6 @@ squashed when merged.
|
||||
author for more than 10 days, maintainers may mark that PR as Draft. PRs that
|
||||
are inactive for more than 30 days may be closed.
|
||||
|
||||
### Automated Code Reviews
|
||||
|
||||
This repository uses **Gemini Code Assist** to provide automated code reviews on Pull Requests. While this does not replace human review, it provides immediate feedback on code quality and potential issues.
|
||||
|
||||
You can manually trigger the bot by commenting on your Pull Request:
|
||||
|
||||
* `/gemini`: Manually invokes Gemini Code Assist in comments
|
||||
* `/gemini review`: Posts a code review of the changes in the pull request
|
||||
* `/gemini summary`: Posts a summary of the changes in the pull request.
|
||||
* `/gemini help`: Overview of the available commands
|
||||
|
||||
## Adding a New Database Source or Tool
|
||||
|
||||
Please create an
|
||||
@@ -244,4 +233,4 @@ resources.
|
||||
* **PR Description:** PR description should **always** be included. It should
|
||||
include a concise description of the changes, it's impact, along with a
|
||||
summary of the solution. If the PR is related to a specific issue, the issue
|
||||
number should be mentioned in the PR description (e.g. `Fixes #1`).
|
||||
number should be mentioned in the PR description (e.g. `Fixes #1`).
|
||||
|
||||
42
README.md
42
README.md
@@ -125,7 +125,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.22.0
|
||||
> export VERSION=0.21.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -138,7 +138,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.22.0
|
||||
> export VERSION=0.21.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -151,7 +151,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.22.0
|
||||
> export VERSION=0.21.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -164,7 +164,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```powershell
|
||||
> :: see releases page for other versions
|
||||
> set VERSION=0.22.0
|
||||
> set VERSION=0.21.0
|
||||
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
> ```
|
||||
>
|
||||
@@ -177,7 +177,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.22.0
|
||||
export VERSION=0.21.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -201,7 +201,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.22.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.21.0
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -515,36 +515,6 @@ For more detailed instructions on using the Toolbox Core SDK, see the
|
||||
```
|
||||
|
||||
</details>
|
||||
<details>
|
||||
<summary>ADK</summary>
|
||||
|
||||
1. Install [Toolbox ADK SDK][toolbox-adk-js]:
|
||||
|
||||
```bash
|
||||
npm install @toolbox-sdk/adk
|
||||
```
|
||||
|
||||
2. Load tools:
|
||||
|
||||
```javascript
|
||||
import { ToolboxClient } from '@toolbox-sdk/adk';
|
||||
|
||||
// update the url to point to your server
|
||||
const URL = 'http://127.0.0.1:5000';
|
||||
let client = new ToolboxClient(URL);
|
||||
|
||||
// these tools can be passed to your application!
|
||||
const tools = await client.loadToolset('toolsetName');
|
||||
```
|
||||
|
||||
For more detailed instructions on using the Toolbox ADK SDK, see the
|
||||
[project's README][toolbox-adk-js-readme].
|
||||
|
||||
[toolbox-adk-js]: https://www.npmjs.com/package/@toolbox-sdk/adk
|
||||
[toolbox-adk-js-readme]:
|
||||
https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-adk/README.md
|
||||
|
||||
</details>
|
||||
</details>
|
||||
</blockquote>
|
||||
<details>
|
||||
|
||||
@@ -89,7 +89,6 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomseries"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomstudies"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcloneinstance"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatedatabase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreateusers"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
|
||||
@@ -181,18 +180,14 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/oracle/oraclesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresdatabaseoverview"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresgetcolumncardinality"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistactivequeries"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistavailableextensions"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistindexes"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistinstalledextensions"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistlocks"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpublicationtables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistquerystats"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistschemas"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistsequences"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablespaces"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttriggers"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistviews"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslongrunningtransactions"
|
||||
@@ -200,14 +195,11 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/redis"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcancelbatch"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatepysparkbatch"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatesparkbatch"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkgetbatch"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparklistbatches"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoreexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoresql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlistgraphs"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannersql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqliteexecutesql"
|
||||
@@ -368,7 +360,6 @@ func NewCommand(opts ...Option) *Command {
|
||||
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
|
||||
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
|
||||
flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
|
||||
flags.StringSliceVar(&cmd.cfg.AllowedOrigins, "allowed-origins", []string{"*"}, "Specifies a list of origins permitted to access this server. Defaults to '*'.")
|
||||
|
||||
// wrap RunE command so that we have access to original Command object
|
||||
cmd.RunE = func(*cobra.Command, []string) error { return run(cmd) }
|
||||
|
||||
@@ -63,9 +63,6 @@ func withDefaults(c server.ServerConfig) server.ServerConfig {
|
||||
if c.TelemetryServiceName == "" {
|
||||
c.TelemetryServiceName = "toolbox"
|
||||
}
|
||||
if c.AllowedOrigins == nil {
|
||||
c.AllowedOrigins = []string{"*"}
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
@@ -197,13 +194,6 @@ func TestServerConfigFlags(t *testing.T) {
|
||||
DisableReload: true,
|
||||
}),
|
||||
},
|
||||
{
|
||||
desc: "allowed origin",
|
||||
args: []string{"--allowed-origins", "http://foo.com,http://bar.com"},
|
||||
want: withDefaults(server.ServerConfig{
|
||||
AllowedOrigins: []string{"http://foo.com", "http://bar.com"},
|
||||
}),
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
@@ -1458,7 +1448,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_postgres_admin_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_postgres_admin_tools",
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck", "clone_instance"},
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1468,7 +1458,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_mysql_admin_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_mysql_admin_tools",
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance"},
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1478,7 +1468,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_mssql_admin_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_mssql_admin_tools",
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance"},
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1488,7 +1478,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"alloydb_postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "alloydb_postgres_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces"},
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1518,7 +1508,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_postgres_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces"},
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1558,7 +1548,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"serverless_spark_tools": tools.ToolsetConfig{
|
||||
Name: "serverless_spark_tools",
|
||||
ToolNames: []string{"list_batches", "get_batch", "cancel_batch", "create_pyspark_batch", "create_spark_batch"},
|
||||
ToolNames: []string{"list_batches", "get_batch", "cancel_batch"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1618,7 +1608,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "postgres_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces"},
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1628,7 +1618,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"spanner-database-tools": tools.ToolsetConfig{
|
||||
Name: "spanner-database-tools",
|
||||
ToolNames: []string{"execute_sql", "execute_sql_dql", "list_tables", "list_graphs"},
|
||||
ToolNames: []string{"execute_sql", "execute_sql_dql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -1 +1 @@
|
||||
0.22.0
|
||||
0.21.0
|
||||
|
||||
@@ -8,25 +8,54 @@ An editor configured to use the AlloyDB MCP server can use its AI capabilities t
|
||||
|
||||
* **Provision & Manage Infrastructure**: Create and manage AlloyDB clusters, instances, and users
|
||||
|
||||
To connect to the database to explore and query data, search the MCP store for the AlloyDB for PostgreSQL MCP Server.
|
||||
## Installation and Setup
|
||||
|
||||
## Prerequisites
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* [Node.js](https://nodejs.org/) installed.
|
||||
* A Google Cloud project with the **AlloyDB API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* AlloyDB Admin (`roles/alloydb.admin`) (for managing infrastructure)
|
||||
* Service Usage Consumer (`roles/serviceusage.serviceUsageConsumer`)
|
||||
|
||||
## Install & Configuration
|
||||
### Configuration
|
||||
|
||||
In the Antigravity MCP Store, click the "Install" button.
|
||||
|
||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||
|
||||
> [!NOTE]
|
||||
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "AlloyDB for PostgreSQL Admin", and click "Install".
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -53,20 +82,6 @@ The AlloyDB MCP server provides the following tools:
|
||||
| `list_users` | List users in a given project and location. |
|
||||
| `wait_for_operation` | Poll the operations API until the operation is done. |
|
||||
|
||||
## Custom MCP Server Configuration
|
||||
|
||||
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb-admin": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "alloydb-postgres-admin", "--stdio"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
|
||||
@@ -11,29 +11,66 @@ An editor configured to use the AlloyDB MCP server can use its AI capabilities t
|
||||
- **Monitor Performance** - View active queries, query plans, and other performance metrics (via observability tools)
|
||||
- **Manage Extensions** - List available and installed PostgreSQL extensions
|
||||
|
||||
For AlloyDB infrastructure management, search the MCP store for the AlloyDB for PostgreSQL Admin MCP Server.
|
||||
## Installation and Setup
|
||||
|
||||
## Prerequisites
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* [Node.js](https://nodejs.org/) installed.
|
||||
* A Google Cloud project with the **AlloyDB API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* AlloyDB Client (`roles/alloydb.client`) (for connecting and querying)
|
||||
* Service Usage Consumer (`roles/serviceusage.serviceUsageConsumer`)
|
||||
|
||||
> **Note:** If your AlloyDB instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
|
||||
### Configuration
|
||||
|
||||
## Install & Configuration
|
||||
|
||||
1. In the Antigravity MCP Store, click the "Install" button.
|
||||
|
||||
2. Add the required inputs for your [cluster](https://docs.cloud.google.com/alloydb/docs/cluster-list) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "AlloyDB for PostgreSQL", and click "Install".
|
||||
3. **Configuration**: The following configuration is needed for the server:
|
||||
* AlloyDB Project ID: The GCP project ID.
|
||||
* AlloyDB Region: The region of your AlloyDB instance.
|
||||
* AlloyDB Cluster ID: The ID of your AlloyDB cluster.
|
||||
* AlloyDB Instance ID: The ID of your AlloyDB instance.
|
||||
* AlloyDB Database Name: The name of the database.
|
||||
* AlloyDB Database User: (Optional) The database username. Defaults to IAM authentication if unspecified.
|
||||
* AlloyDB Database Password: (Optional) The password for the database user. Defaults to IAM authentication if unspecified.
|
||||
* AlloyDB IP Type: (Optional) The IP type i.e. “Public” or “Private”. Defaults to "Public" if unspecified.
|
||||
|
||||
> [!NOTE]
|
||||
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
||||
|
||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||
> If your AlloyDB instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -61,34 +98,6 @@ The AlloyDB MCP server provides the following tools:
|
||||
| `list_replication_slots` | List replication slots. |
|
||||
| `list_invalid_indexes` | List invalid indexes. |
|
||||
|
||||
## Custom MCP Server Configuration
|
||||
|
||||
The AlloyDB MCP server is configured using environment variables.
|
||||
|
||||
```bash
|
||||
export ALLOYDB_POSTGRES_PROJECT="<your-gcp-project-id>"
|
||||
export ALLOYDB_POSTGRES_REGION="<your-alloydb-region>"
|
||||
export ALLOYDB_POSTGRES_CLUSTER="<your-alloydb-cluster-id>"
|
||||
export ALLOYDB_POSTGRES_INSTANCE="<your-alloydb-instance-id>"
|
||||
export ALLOYDB_POSTGRES_DATABASE="<your-database-name>"
|
||||
export ALLOYDB_POSTGRES_USER="<your-database-user>" # Optional
|
||||
export ALLOYDB_POSTGRES_PASSWORD="<your-database-password>" # Optional
|
||||
export ALLOYDB_POSTGRES_IP_TYPE="PUBLIC" # Optional: `PUBLIC`, `PRIVATE`, `PSC`. Defaults to `PUBLIC`.
|
||||
```
|
||||
|
||||
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb-postgres": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "alloydb-postgres", "--stdio"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [AlloyDB for PostgreSQL documentation](https://cloud.google.com/alloydb/docs).
|
||||
|
||||
@@ -10,25 +10,56 @@ An editor configured to use the BigQuery MCP server can use its AI capabilities
|
||||
- **Seamless Workflow:** Stay within your CLI, eliminating the need to constantly switch to the GCP console for generating analytical insights.
|
||||
- **Run Advanced Analytics:** Generate forecasts and perform contribution analysis using built-in advanced tools.
|
||||
|
||||
## Prerequisites
|
||||
## Installation and Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* [Node.js](https://nodejs.org/) installed.
|
||||
* A Google Cloud project with the **BigQuery API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* BigQuery User (`roles/bigquery.user`)
|
||||
|
||||
## Install & Configuration
|
||||
|
||||
1. In the Antigravity MCP Store, click the "Install" button.
|
||||
|
||||
2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||
|
||||
> [!NOTE]
|
||||
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
||||
|
||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||
### Configuration
|
||||
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "BigQuery", and click "Install".
|
||||
3. **Configuration**: The following configuration is needed for the server:
|
||||
* BigQuery Project ID: The GCP project ID.
|
||||
* BigQuery Location: (Optional) The location of your BigQuery dataset (e.g. "US", "EU").
|
||||
|
||||
### Usage
|
||||
|
||||
@@ -60,29 +91,6 @@ The BigQuery MCP server provides the following tools:
|
||||
| `analyze_contribution` | Perform contribution analysis, also called key driver analysis. |
|
||||
| `search_catalog` | Search for tables based on the provided query. |
|
||||
|
||||
## Custom MCP Server Configuration
|
||||
|
||||
The BigQuery MCP server is configured using environment variables.
|
||||
|
||||
```bash
|
||||
export BIGQUERY_PROJECT="<your-gcp-project-id>"
|
||||
export BIGQUERY_LOCATION="<your-dataset-location>" # Optional
|
||||
export BIGQUERY_USE_CLIENT_OAUTH="true" # Optional
|
||||
```
|
||||
|
||||
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"bigquery": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "bigquery", "--stdio"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [BigQuery documentation](https://cloud.google.com/bigquery/docs).
|
||||
|
||||
@@ -8,25 +8,54 @@ An editor configured to use the Cloud SQL for SQL Server MCP server can use its
|
||||
|
||||
- **Provision & Manage Infrastructure** - Create and manage Cloud SQL instances and users
|
||||
|
||||
To connect to the database to explore and query data, search the MCP store for the Cloud SQL for SQL Server MCP Server.
|
||||
## Installation and Setup
|
||||
|
||||
## Prerequisites
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* [Node.js](https://nodejs.org/) installed.
|
||||
* A Google Cloud project with the **Cloud SQL Admin API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* Cloud SQL Viewer (`roles/cloudsql.viewer`)
|
||||
* Cloud SQL Admin (`roles/cloudsql.admin`)
|
||||
|
||||
## Install & Configuration
|
||||
### Configuration
|
||||
|
||||
In the Antigravity MCP Store, click the "Install" button.
|
||||
|
||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||
|
||||
> [!NOTE]
|
||||
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "Cloud SQL for SQL Server Admin", and click "Install".
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -49,22 +78,6 @@ The Cloud SQL for SQL Server MCP server provides the following tools:
|
||||
| `list_users` | List users in a given project and location. |
|
||||
| `wait_for_operation` | Poll the operations API until the operation is done. |
|
||||
|
||||
|
||||
## Custom MCP Server Configuration
|
||||
|
||||
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"cloud-sql-sqlserver-admin": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "cloud-sql-mssql-admin", "--stdio"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [Cloud SQL for SQL Server documentation](https://cloud.google.com/sql/docs/sqlserver).
|
||||
|
||||
@@ -9,28 +9,65 @@ An editor configured to use the Cloud SQL for SQL Server MCP server can use its
|
||||
- **Query Data** - Execute SQL queries
|
||||
- **Explore Schema** - List tables and view schema details
|
||||
|
||||
For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL for SQL Server Admin MCP Server.
|
||||
## Installation and Setup
|
||||
|
||||
## Prerequisites
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* [Node.js](https://nodejs.org/) installed.
|
||||
* A Google Cloud project with the **Cloud SQL Admin API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* Cloud SQL Client (`roles/cloudsql.client`)
|
||||
|
||||
> **Note:** If your instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
|
||||
### Configuration
|
||||
|
||||
## Install & Configuration
|
||||
|
||||
1. In the Antigravity MCP Store, click the "Install" button.
|
||||
|
||||
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/sqlserver/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||
|
||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "Cloud SQL for SQL Server", and click "Install".
|
||||
3. **Configuration**: The following configuration is needed for the server:
|
||||
* Cloud SQL Project ID: The GCP project ID.
|
||||
* Cloud SQL Region: The region of your Cloud SQL instance.
|
||||
* Cloud SQL Instance ID: The ID of your Cloud SQL instance.
|
||||
* Cloud SQL Database Name: The name of the database.
|
||||
* Cloud SQL Database User: The database username.
|
||||
* Cloud SQL Database Password: The password for the database user.
|
||||
* Cloud SQL IP Type: (Optional) The IP type i.e. “Public” or “Private”. Defaults to "Public" if unspecified.
|
||||
|
||||
> [!NOTE]
|
||||
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
||||
> If your instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -48,42 +85,6 @@ The Cloud SQL for SQL Server MCP server provides the following tools:
|
||||
| `execute_sql` | Use this tool to execute SQL. |
|
||||
| `list_tables` | Lists detailed schema information for user-created tables. |
|
||||
|
||||
## Custom MCP Server Configuration
|
||||
|
||||
The MCP server is configured using environment variables.
|
||||
|
||||
```bash
|
||||
export CLOUD_SQL_MSSQL_PROJECT="<your-gcp-project-id>"
|
||||
export CLOUD_SQL_MSSQL_REGION="<your-cloud-sql-region>"
|
||||
export CLOUD_SQL_MSSQL_INSTANCE="<your-cloud-sql-instance-id>"
|
||||
export CLOUD_SQL_MSSQL_DATABASE="<your-database-name>"
|
||||
export CLOUD_SQL_MSSQL_USER="<your-database-user>" # Optional
|
||||
export CLOUD_SQL_MSSQL_PASSWORD="<your-database-password>" # Optional
|
||||
export CLOUD_SQL_MSSQL_IP_TYPE="PUBLIC" # Optional: `PUBLIC`, `PRIVATE`, `PSC`. Defaults to `PUBLIC`.
|
||||
```
|
||||
|
||||
|
||||
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"cloud-sql-mssql": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "cloud-sql-mssql", "--stdio"],
|
||||
"env": {
|
||||
"CLOUD_SQL_MSSQL_PROJECT": "your-project-id",
|
||||
"CLOUD_SQL_MSSQL_REGION": "your-region",
|
||||
"CLOUD_SQL_MSSQL_INSTANCE": "your-instance-id",
|
||||
"CLOUD_SQL_MSSQL_DATABASE": "your-database-name",
|
||||
"CLOUD_SQL_MSSQL_USER": "your-username",
|
||||
"CLOUD_SQL_MSSQL_PASSWORD": "your-password"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [Cloud SQL for SQL Server documentation](https://cloud.google.com/sql/docs/sqlserver).
|
||||
|
||||
@@ -8,25 +8,54 @@ An editor configured to use the Cloud SQL for MySQL MCP server can use its AI ca
|
||||
|
||||
- **Provision & Manage Infrastructure** - Create and manage Cloud SQL instances and users
|
||||
|
||||
To connect to the database to explore and query data, search the MCP store for the Cloud SQL for MySQL MCP Server.
|
||||
## Installation and Setup
|
||||
|
||||
## Prerequisites
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* [Node.js](https://nodejs.org/) installed.
|
||||
* A Google Cloud project with the **Cloud SQL Admin API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* Cloud SQL Viewer (`roles/cloudsql.viewer`)
|
||||
* Cloud SQL Admin (`roles/cloudsql.admin`)
|
||||
|
||||
## Install & Configuration
|
||||
### Configuration
|
||||
|
||||
In the Antigravity MCP Store, click the "Install" button.
|
||||
|
||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||
|
||||
> [!NOTE]
|
||||
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "Cloud SQL for MySQL Admin", and click "Install".
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -49,21 +78,6 @@ The Cloud SQL for MySQL MCP server provides the following tools:
|
||||
| `list_users` | List users in a given project and location. |
|
||||
| `wait_for_operation` | Poll the operations API until the operation is done. |
|
||||
|
||||
## Custom MCP Server Configuration
|
||||
|
||||
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"cloud-sql-mysql-admin": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "cloud-sql-mysql-admin", "--stdio"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [Cloud SQL for MySQL documentation](https://cloud.google.com/sql/docs/mysql).
|
||||
|
||||
@@ -11,28 +11,64 @@ An editor configured to use the Cloud SQL for MySQL MCP server can use its AI ca
|
||||
- **Database Maintenance** - Check for fragmentation and missing indexes
|
||||
- **Monitor Performance** - View active queries
|
||||
|
||||
For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL for MySQL Admin MCP Server.
|
||||
## Installation and Setup
|
||||
|
||||
## Prerequisites
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* [Node.js](https://nodejs.org/) installed.
|
||||
* A Google Cloud project with the **Cloud SQL Admin API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* Cloud SQL Client (`roles/cloudsql.client`)
|
||||
|
||||
> **Note:** If your instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
|
||||
### Configuration
|
||||
|
||||
## Install & Configuration
|
||||
|
||||
1. In the Antigravity MCP Store, click the "Install" button.
|
||||
|
||||
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/mysql/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||
|
||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "Cloud SQL for MySQL", and click "Install".
|
||||
3. **Configuration**: The following configuration is needed for the server:
|
||||
* Cloud SQL Project ID: The GCP project ID.
|
||||
* Cloud SQL Region: The region of your Cloud SQL instance.
|
||||
* Cloud SQL Instance ID: The ID of your Cloud SQL instance.
|
||||
* Cloud SQL Database Name: The name of the database.
|
||||
* Cloud SQL Database User: The database username.
|
||||
* Cloud SQL Database Password: The password for the database user.
|
||||
* Cloud SQL IP Type: (Optional) The IP type i.e. “Public” or “Private”. Defaults to "Public" if unspecified.
|
||||
|
||||
> [!NOTE]
|
||||
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
||||
> If your instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -55,41 +91,6 @@ The Cloud SQL for MySQL MCP server provides the following tools:
|
||||
| `list_tables_missing_unique_indexes` | Find tables that do not have primary or unique key constraint. |
|
||||
| `list_table_fragmentation` | List table fragmentation in MySQL. |
|
||||
|
||||
## Custom MCP Server Configuration
|
||||
|
||||
The MCP server is configured using environment variables.
|
||||
|
||||
```bash
|
||||
export CLOUD_SQL_MYSQL_PROJECT="<your-gcp-project-id>"
|
||||
export CLOUD_SQL_MYSQL_REGION="<your-cloud-sql-region>"
|
||||
export CLOUD_SQL_MYSQL_INSTANCE="<your-cloud-sql-instance-id>"
|
||||
export CLOUD_SQL_MYSQL_DATABASE="<your-database-name>"
|
||||
export CLOUD_SQL_MYSQL_USER="<your-database-user>" # Optional
|
||||
export CLOUD_SQL_MYSQL_PASSWORD="<your-database-password>" # Optional
|
||||
export CLOUD_SQL_MYSQL_IP_TYPE="PUBLIC" # Optional: `PUBLIC`, `PRIVATE`, `PSC`. Defaults to `PUBLIC`.
|
||||
```
|
||||
|
||||
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"cloud-sql-mysql": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "cloud-sql-mysql", "--stdio"],
|
||||
"env": {
|
||||
"CLOUD_SQL_MYSQL_PROJECT": "your-project-id",
|
||||
"CLOUD_SQL_MYSQL_REGION": "your-region",
|
||||
"CLOUD_SQL_MYSQL_INSTANCE": "your-instance-id",
|
||||
"CLOUD_SQL_MYSQL_DATABASE": "your-database-name",
|
||||
"CLOUD_SQL_MYSQL_USER": "your-username",
|
||||
"CLOUD_SQL_MYSQL_PASSWORD": "your-password"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [Cloud SQL for MySQL documentation](https://cloud.google.com/sql/docs/mysql).
|
||||
|
||||
@@ -8,25 +8,54 @@ An editor configured to use the Cloud SQL for PostgreSQL MCP server can use its
|
||||
|
||||
- **Provision & Manage Infrastructure** - Create and manage Cloud SQL instances and users
|
||||
|
||||
To connect to the database to explore and query data, search the MCP store for the Cloud SQL for PostgreSQL MCP Server.
|
||||
## Installation and Setup
|
||||
|
||||
## Prerequisites
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* [Node.js](https://nodejs.org/) installed.
|
||||
* A Google Cloud project with the **Cloud SQL Admin API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* Cloud SQL Viewer (`roles/cloudsql.viewer`)
|
||||
* Cloud SQL Admin (`roles/cloudsql.admin`)
|
||||
* Cloud SQL Viewer (`roles/cloudsql.viewer`)
|
||||
* Cloud SQL Admin (`roles/cloudsql.admin`)
|
||||
|
||||
## Install & Configuration
|
||||
### Configuration
|
||||
|
||||
In the Antigravity MCP Store, click the "Install" button.
|
||||
|
||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||
|
||||
> [!NOTE]
|
||||
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "Cloud SQL for PostgreSQL Admin", and click "Install".
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -49,21 +78,6 @@ The Cloud SQL for PostgreSQL MCP server provides the following tools:
|
||||
| `list_users` | List users in a given project and location. |
|
||||
| `wait_for_operation` | Poll the operations API until the operation is done. |
|
||||
|
||||
## Custom MCP Server Configuration
|
||||
|
||||
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"cloud-sql-postgres-admin": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "cloud-sql-postgres-admin", "--stdio"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [Cloud SQL for PostgreSQL documentation](https://cloud.google.com/sql/docs/postgres).
|
||||
|
||||
@@ -11,28 +11,65 @@ An editor configured to use the Cloud SQL for PostgreSQL MCP server can use its
|
||||
- **Monitor Performance** - View active queries, bloat, and memory configurations
|
||||
- **Manage Extensions** - List available and installed extensions
|
||||
|
||||
For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL for PostgreSQL Admin MCP Server.
|
||||
## Installation and Setup
|
||||
|
||||
## Prerequisites
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* [Node.js](https://nodejs.org/) installed.
|
||||
* A Google Cloud project with the **Cloud SQL Admin API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* Cloud SQL Client (`roles/cloudsql.client`)
|
||||
|
||||
> **Note:** If your instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
|
||||
### Configuration
|
||||
|
||||
## Install & Configuration
|
||||
|
||||
1. In the Antigravity MCP Store, click the "Install" button.
|
||||
|
||||
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/postgres/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||
|
||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "Cloud SQL for PostgreSQL", and click "Install".
|
||||
3. **Configuration**: The following configuration is needed for the server:
|
||||
* Cloud SQL Project ID: The GCP project ID.
|
||||
* Cloud SQL Region: The region of your Cloud SQL instance.
|
||||
* Cloud SQL Instance ID: The ID of your Cloud SQL instance.
|
||||
* Cloud SQL Database Name: The name of the database.
|
||||
* Cloud SQL Database User: (Optional) The database username. Defaults to IAM authentication if unspecified.
|
||||
* Cloud SQL Database Password: (Optional) The password for the database user. Defaults to IAM authentication if unspecified.
|
||||
* Cloud SQL IP Type: (Optional) The IP type i.e. “Public” or “Private”. Defaults to "Public" if unspecified.
|
||||
|
||||
> [!NOTE]
|
||||
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
||||
> If your instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -66,42 +103,6 @@ The Cloud SQL for PostgreSQL MCP server provides the following tools:
|
||||
| `list_indexes` | Lists available user indexes in the database. |
|
||||
| `list_sequences` | Lists sequences in the database. |
|
||||
|
||||
|
||||
## Custom MCP Server Configuration
|
||||
|
||||
The MCP server is configured using environment variables.
|
||||
|
||||
```bash
|
||||
export CLOUD_SQL_POSTGRES_PROJECT="<your-gcp-project-id>"
|
||||
export CLOUD_SQL_POSTGRES_REGION="<your-cloud-sql-region>"
|
||||
export CLOUD_SQL_POSTGRES_INSTANCE="<your-cloud-sql-instance-id>"
|
||||
export CLOUD_SQL_POSTGRES_DATABASE="<your-database-name>"
|
||||
export CLOUD_SQL_POSTGRES_USER="<your-database-user>" # Optional
|
||||
export CLOUD_SQL_POSTGRES_PASSWORD="<your-database-password>" # Optional
|
||||
export CLOUD_SQL_POSTGRES_IP_TYPE="PUBLIC" # Optional: `PUBLIC`, `PRIVATE`, `PSC`. Defaults to `PUBLIC`.
|
||||
```
|
||||
|
||||
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"cloud-sql-postgres": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "cloud-sql-postgres", "--stdio"],
|
||||
"env": {
|
||||
"CLOUD_SQL_POSTGRES_PROJECT": "your-project-id",
|
||||
"CLOUD_SQL_POSTGRES_REGION": "your-region",
|
||||
"CLOUD_SQL_POSTGRES_INSTANCE": "your-instance-id",
|
||||
"CLOUD_SQL_POSTGRES_DATABASE": "your-database-name",
|
||||
"CLOUD_SQL_POSTGRES_USER": "your-username",
|
||||
"CLOUD_SQL_POSTGRES_PASSWORD": "your-password"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [Cloud SQL for PostgreSQL documentation](https://cloud.google.com/sql/docs/postgres).
|
||||
|
||||
@@ -9,24 +9,55 @@ An editor configured to use the Dataplex MCP server can use its AI capabilities
|
||||
- **Search Catalog** - Search for entries in Dataplex Catalog
|
||||
- **Explore Metadata** - Lookup specific entries and search aspect types
|
||||
|
||||
## Prerequisites
|
||||
## Installation and Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* [Node.js](https://nodejs.org/) installed.
|
||||
* A Google Cloud project with the **Dataplex API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* Dataplex Viewer (`roles/dataplex.viewer`) or equivalent permissions to read catalog entries.
|
||||
|
||||
## Install & Configuration
|
||||
### Configuration
|
||||
|
||||
1. In the Antigravity MCP Store, click the "Install" button.
|
||||
|
||||
2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||
|
||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||
|
||||
> [!NOTE]
|
||||
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "Dataplex", and click "Install".
|
||||
3. **Configuration**: The following configuration is needed for the server:
|
||||
* Dataplex Project ID: The GCP project ID.
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -45,30 +76,6 @@ The Dataplex MCP server provides the following tools:
|
||||
| `lookup_entry` | Retrieve a specific entry from Dataplex Catalog. |
|
||||
| `search_aspect_types` | Find aspect types relevant to the query. |
|
||||
|
||||
## Custom MCP Server Configuration
|
||||
|
||||
The MCP server is configured using environment variables.
|
||||
|
||||
```bash
|
||||
export DATAPLEX_PROJECT="<your-gcp-project-id>"
|
||||
```
|
||||
|
||||
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"dataplex": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "dataplex", "--stdio"],
|
||||
"env": {
|
||||
"DATAPLEX_PROJECT": "your-project-id"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [Dataplex documentation](https://cloud.google.com/dataplex/docs).
|
||||
|
||||
@@ -11,23 +11,62 @@ An editor configured to use the Looker MCP server can use its AI capabilities to
|
||||
- **Manage Dashboards** - Create, run, and modify dashboards
|
||||
- **Manage Looks** - Search for and run saved looks
|
||||
- **Health Checks** - Analyze instance health and performance
|
||||
- **Developer Tools** - Manage project files and toggle dev mode
|
||||
|
||||
## Prerequisites
|
||||
## Installation and Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* [Node.js](https://nodejs.org/) installed.
|
||||
* Access to a Looker instance.
|
||||
* API Credentials (`Client ID` and `Client Secret`) or OAuth configuration.
|
||||
|
||||
## Install & Configuration
|
||||
### Configuration
|
||||
|
||||
1. In the Antigravity MCP Store, click the "Install" button.
|
||||
|
||||
2. Add the required inputs for your [instance](https://docs.cloud.google.com/looker/docs/set-up-and-administer-looker) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||
|
||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||
|
||||
> [!NOTE]
|
||||
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "Looker", and click "Install".
|
||||
3. **Configuration**: The following configuration is needed for the server:
|
||||
* Looker Base URL: The URL of your Looker instance.
|
||||
* Looker Client ID: The client ID for the Looker API.
|
||||
* Looker Client Secret: The client secret for the Looker API.
|
||||
* Looker Verify SSL: Whether to verify SSL certificates.
|
||||
* Looker Use Client OAuth: Whether to use OAuth for authentication.
|
||||
* Looker Show Hidden Models: Whether to show hidden models.
|
||||
* Looker Show Hidden Explores: Whether to show hidden explores.
|
||||
* Looker Show Hidden Fields: Whether to show hidden fields.
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -55,38 +94,8 @@ The Looker MCP server provides a wide range of tools. Here are some of the key c
|
||||
| `dev_mode` | Toggles development mode. |
|
||||
| `get_projects` | Lists LookML projects. |
|
||||
|
||||
## Custom MCP Server Configuration
|
||||
|
||||
The MCP server is configured using environment variables.
|
||||
|
||||
```bash
|
||||
export LOOKER_BASE_URL="<your-looker-instance-url>" # e.g. `https://looker.example.com`. You may need to add the port, i.e. `:19999`.
|
||||
export LOOKER_CLIENT_ID="<your-looker-client-id>"
|
||||
export LOOKER_CLIENT_SECRET="<your-looker-client-secret>"
|
||||
export LOOKER_VERIFY_SSL="true" # Optional, defaults to true
|
||||
export LOOKER_SHOW_HIDDEN_MODELS="true" # Optional, defaults to true
|
||||
export LOOKER_SHOW_HIDDEN_EXPLORES="true" # Optional, defaults to true
|
||||
export LOOKER_SHOW_HIDDEN_FIELDS="true" # Optional, defaults to true
|
||||
```
|
||||
|
||||
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"looker": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "looker", "--stdio"],
|
||||
"env": {
|
||||
"LOOKER_BASE_URL": "https://your.looker.instance.com",
|
||||
"LOOKER_CLIENT_ID": "your-client-id",
|
||||
"LOOKER_CLIENT_SECRET": "your-client-secret"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
*(See the full list of tools in the extension)*
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, visit the [Looker documentation](https://cloud.google.com/looker).
|
||||
For more information, visit the [Looker documentation](https://cloud.google.com/looker/docs).
|
||||
|
||||
@@ -9,25 +9,59 @@ An editor configured to use the Cloud Spanner MCP server can use its AI capabili
|
||||
- **Query Data** - Execute DML and DQL SQL queries
|
||||
- **Explore Schema** - List tables and view schema details
|
||||
|
||||
## Prerequisites
|
||||
## Installation and Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
* [Node.js](https://nodejs.org/) installed.
|
||||
* A Google Cloud project with the **Cloud Spanner API** enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* IAM Permissions:
|
||||
* Cloud Spanner Database User (`roles/spanner.databaseUser`) (for data access)
|
||||
* Cloud Spanner Viewer (`roles/spanner.viewer`) (for schema access)
|
||||
|
||||
## Install & Configuration
|
||||
### Configuration
|
||||
|
||||
1. In the Antigravity MCP Store, click the "Install" button.
|
||||
|
||||
2. Add the required inputs for your [instance](https://docs.cloud.google.com/spanner/docs/instances) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||
|
||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||
|
||||
> [!NOTE]
|
||||
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "Spanner", and click "Install".
|
||||
3. **Configuration**: The following configuration is needed for the server:
|
||||
* Spanner Project ID: The GCP project ID.
|
||||
* Spanner Instance ID: The Spanner instance ID.
|
||||
* Spanner Database ID: The Spanner database ID.
|
||||
* Spanner Dialect: (Optional) The database dialect, which can be "googlesql" or "postgresql". Defaults to "googlesql" if unspecified.
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -41,42 +75,11 @@ Once configured, the MCP server will automatically provide Cloud Spanner capabil
|
||||
|
||||
The Cloud Spanner MCP server provides the following tools:
|
||||
|
||||
| Tool Name | Description |
|
||||
|:------------------|:-----------------------------------------------------------------|
|
||||
| `execute_sql` | Use this tool to execute DML SQL. |
|
||||
| `execute_sql_dql` | Use this tool to execute DQL SQL. |
|
||||
| `list_tables` | Lists detailed schema information for user-created tables. |
|
||||
| `list_graphs` | Lists detailed graph schema information for user-created graphs. |
|
||||
|
||||
## Custom MCP Server Configuration
|
||||
|
||||
The MCP server is configured using environment variables.
|
||||
|
||||
```bash
|
||||
export SPANNER_PROJECT="<your-gcp-project-id>"
|
||||
export SPANNER_INSTANCE="<your-spanner-instance-id>"
|
||||
export SPANNER_DATABASE="<your-spanner-database-id>"
|
||||
export SPANNER_DIALECT="googlesql" # Optional: "googlesql" or "postgresql". Defaults to "googlesql".
|
||||
```
|
||||
|
||||
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"spanner": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "spanner", "--stdio"],
|
||||
"env": {
|
||||
"SPANNER_PROJECT": "your-project-id",
|
||||
"SPANNER_INSTANCE": "your-instance-id",
|
||||
"SPANNER_DATABASE": "your-database-name",
|
||||
"SPANNER_DIALECT": "googlesql"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
| Tool Name | Description |
|
||||
|:------------------|:-----------------------------------------------------------|
|
||||
| `execute_sql` | Use this tool to execute DML SQL. |
|
||||
| `execute_sql_dql` | Use this tool to execute DQL SQL. |
|
||||
| `list_tables` | Lists detailed schema information for user-created tables. |
|
||||
|
||||
## Documentation
|
||||
|
||||
|
||||
@@ -2,43 +2,59 @@
|
||||
|
||||
The MCP Toolbox for Databases Server gives AI-powered development tools the ability to work with your custom tools. It is designed to simplify and secure the development of tools for interacting with databases.
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
## Prerequisites
|
||||
### Prerequisites
|
||||
|
||||
* [Node.js](https://nodejs.org/) installed.
|
||||
* A Google Cloud project with relevant APIs enabled.
|
||||
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
|
||||
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
|
||||
1. **Download the Toolbox binary**:
|
||||
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
|
||||
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
|
||||
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
|
||||
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
2. **Make it executable**:
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
## Install & Configuration
|
||||
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
|
||||
```bash
|
||||
sudo mv toolbox /usr/local/bin/
|
||||
# sudo mv toolbox /usr/bin/
|
||||
```
|
||||
|
||||
1. In the Antigravity MCP Store, click the "Install" button.
|
||||
**On Windows, move binary to the `WindowsApps\` folder**:
|
||||
```
|
||||
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
|
||||
```
|
||||
|
||||
**Tip:** Ensure the destination folder for your binary is included in
|
||||
your system's PATH environment variable. To check `PATH`, use `echo
|
||||
$PATH` (or `echo %PATH%` on Windows).
|
||||
|
||||
2. Create your [`tools.yaml` configuration file](https://googleapis.github.io/genai-toolbox/getting-started/configure/).
|
||||
**Note:** You may need to restart Antigravity for changes to take effect.
|
||||
|
||||
3. Click "View raw config" and update the `tools.yaml` path with the full absolute path to your file.
|
||||
* Any required APIs and permissions for connecting to your database.
|
||||
|
||||
> [!NOTE]
|
||||
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
||||
### Configuration
|
||||
|
||||
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
|
||||
2. **Browse and Install**: Search for "MCP Toolbox for Databases", and click "Install".
|
||||
3. **Configuration**: The following configuration is needed for the server:
|
||||
* Add your [`tools.yaml` configuration
|
||||
file](https://googleapis.github.io/genai-toolbox/getting-started/configure/)
|
||||
to the directory you are running Antigravity
|
||||
|
||||
## Usage
|
||||
|
||||
Interact with your custom tools using natural language.
|
||||
|
||||
## Custom MCP Server Configuration
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"mcp-toolbox": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@toolbox-sdk/server", "--tools-file", "your-tool-file.yaml"],
|
||||
"env": {
|
||||
"ENV_VAR_NAME": "ENV_VAR_VALUE",
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
|
||||
@@ -44,11 +44,15 @@ most popular issues, so make sure to +1 ones you are the most interested in.
|
||||
|
||||
## Can Toolbox be used for non-database tools?
|
||||
|
||||
**Yes!** While Toolbox is primarily focused on databases, it also supports generic
|
||||
**HTTP tools** (`kind: http`). These allow you to connect your agents to REST APIs
|
||||
and other web services, enabling workflows that extend beyond database interactions.
|
||||
Currently, Toolbox is primarily focused on making it easier to create and
|
||||
develop tools focused on interacting with Databases. We believe that there are a
|
||||
lot of unique problems when interacting with Databases for Gen AI use cases, and
|
||||
want to prioritize solving those first.
|
||||
|
||||
For configuration details, see the [HTTP Tools documentation](../resources/tools/http/http.md).
|
||||
However, we've also received feedback that supporting more generic HTTP or
|
||||
GRPC tools might be helpful in assisting with migrating to Toolbox or in
|
||||
accomplishing more complicated workflows. We're looking into what that might
|
||||
best look like in Toolbox.
|
||||
|
||||
## Can I use _$BAR_ orchestration framework to use tools from Toolbox?
|
||||
|
||||
|
||||
@@ -234,7 +234,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.22.0\" # x-release-please-version\n",
|
||||
"version = \"0.21.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -87,7 +87,7 @@ To install Toolbox as a binary on Linux (AMD64):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.22.0
|
||||
export VERSION=0.21.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -98,7 +98,7 @@ To install Toolbox as a binary on macOS (Apple Silicon):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.22.0
|
||||
export VERSION=0.21.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -109,7 +109,7 @@ To install Toolbox as a binary on macOS (Intel):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.22.0
|
||||
export VERSION=0.21.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -120,7 +120,7 @@ To install Toolbox as a binary on Windows (AMD64):
|
||||
|
||||
```powershell
|
||||
:: see releases page for other versions
|
||||
set VERSION=0.22.0
|
||||
set VERSION=0.21.0
|
||||
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
```
|
||||
|
||||
@@ -132,7 +132,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.22.0
|
||||
export VERSION=0.21.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -151,7 +151,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.22.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.21.0
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
@@ -294,10 +294,6 @@ let client = new ToolboxClient(URL);
|
||||
const toolboxTools = await client.loadToolset('toolsetName');
|
||||
{{< /highlight >}}
|
||||
|
||||
For more detailed instructions on using the Toolbox Core SDK, see the
|
||||
[project's
|
||||
README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="LangChain/Langraph" lang="en" %}}
|
||||
|
||||
@@ -322,10 +318,6 @@ const getTool = (toolboxTool) => tool(currTool, {
|
||||
const tools = toolboxTools.map(getTool);
|
||||
{{< /highlight >}}
|
||||
|
||||
For more detailed instructions on using the Toolbox Core SDK, see the
|
||||
[project's
|
||||
README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Genkit" lang="en" %}}
|
||||
|
||||
@@ -361,10 +353,6 @@ const getTool = (toolboxTool) => ai.defineTool({
|
||||
const tools = toolboxTools.map(getTool);
|
||||
{{< /highlight >}}
|
||||
|
||||
For more detailed instructions on using the Toolbox Core SDK, see the
|
||||
[project's
|
||||
README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="LlamaIndex" lang="en" %}}
|
||||
|
||||
@@ -392,32 +380,12 @@ const tools = toolboxTools.map(getTool);
|
||||
|
||||
{{< /highlight >}}
|
||||
|
||||
For more detailed instructions on using the Toolbox Core SDK, see the
|
||||
[project's
|
||||
README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="ADK TS" lang="en" %}}
|
||||
|
||||
{{< highlight javascript >}}
|
||||
import { ToolboxClient } from '@toolbox-sdk/adk';
|
||||
|
||||
// Replace with the actual URL where your Toolbox service is running
|
||||
const URL = 'http://127.0.0.1:5000';
|
||||
|
||||
let client = new ToolboxClient(URL);
|
||||
const tools = await client.loadToolset();
|
||||
|
||||
// Use the client and tools as per requirement
|
||||
|
||||
{{< /highlight >}}
|
||||
|
||||
For detailed samples on using the Toolbox JS SDK with ADK JS, see the [project's
|
||||
README.](https://github.com/googleapis/mcp-toolbox-sdk-js/tree/main/packages/toolbox-adk/README.md)
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
For more detailed instructions on using the Toolbox Core SDK, see the
|
||||
[project's
|
||||
README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
|
||||
|
||||
#### Go
|
||||
|
||||
|
||||
@@ -101,62 +101,44 @@ pip install google-genai
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
1. Create the agent:
|
||||
{{< tabpane persist=header >}}
|
||||
{{% tab header="ADK" text=true %}}
|
||||
1. Create a new file named `hotel_agent.py` and copy the following
|
||||
code to create an agent:
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="ADK" lang="python" >}}
|
||||
|
||||
1. Create a new agent project. This will create a new directory named `my_agent`
|
||||
with a file `agent.py`.
|
||||
{{< include "quickstart/python/adk/quickstart.py" >}}
|
||||
|
||||
```bash
|
||||
adk create my_agent
|
||||
```
|
||||
<br/>
|
||||
{{< /tab >}}
|
||||
{{< tab header="LangChain" lang="python" >}}
|
||||
|
||||
1. Update `my_agent/agent.py` with the following content to connect to Toolbox:
|
||||
```py
|
||||
{{< include "quickstart/python/adk/quickstart.py" >}}
|
||||
```
|
||||
<br/>
|
||||
|
||||
1. Create a `.env` file with your Google API key:
|
||||
```bash
|
||||
echo 'GOOGLE_API_KEY="YOUR_API_KEY"' > my_agent/.env
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="LangChain" text=true %}}
|
||||
Create a new file named `agent.py` and copy the following code:
|
||||
```py
|
||||
{{< include "quickstart/python/langchain/quickstart.py" >}}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="LlamaIndex" text=true %}}
|
||||
Create a new file named `agent.py` and copy the following code:
|
||||
```py
|
||||
|
||||
{{< /tab >}}
|
||||
{{< tab header="LlamaIndex" lang="python" >}}
|
||||
|
||||
{{< include "quickstart/python/llamaindex/quickstart.py" >}}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Core" text=true %}}
|
||||
Create a new file named `agent.py` and copy the following code:
|
||||
```py
|
||||
|
||||
{{< /tab >}}
|
||||
{{< tab header="Core" lang="python" >}}
|
||||
|
||||
{{< include "quickstart/python/core/quickstart.py" >}}
|
||||
```
|
||||
{{% /tab %}}
|
||||
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
{{< tabpane text=true persist=header >}}
|
||||
{{% tab header="ADK" lang="en" %}}
|
||||
To learn more about Agent Development Kit, check out the [ADK
|
||||
Documentation](https://google.github.io/adk-docs/get-started/python/).
|
||||
documentation.](https://google.github.io/adk-docs/)
|
||||
{{% /tab %}}
|
||||
{{% tab header="Langchain" lang="en" %}}
|
||||
To learn more about Agents in LangChain, check out the [LangGraph Agent
|
||||
Documentation](https://langchain-ai.github.io/langgraph/reference/prebuilt/#langgraph.prebuilt.chat_agent_executor.create_react_agent).
|
||||
documentation.](https://langchain-ai.github.io/langgraph/reference/prebuilt/#langgraph.prebuilt.chat_agent_executor.create_react_agent)
|
||||
{{% /tab %}}
|
||||
{{% tab header="LlamaIndex" lang="en" %}}
|
||||
To learn more about Agents in LlamaIndex, check out the [LlamaIndex
|
||||
AgentWorkflow
|
||||
Documentation](https://docs.llamaindex.ai/en/stable/examples/agent/agent_workflow_basic/).
|
||||
documentation.](https://docs.llamaindex.ai/en/stable/examples/agent/agent_workflow_basic/)
|
||||
{{% /tab %}}
|
||||
{{% tab header="Core" lang="en" %}}
|
||||
To learn more about tool calling with Google GenAI, check out the
|
||||
@@ -165,37 +147,11 @@ Documentation](https://github.com/googleapis/python-genai?tab=readme-ov-file#man
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
4. Run your agent, and observe the results:
|
||||
1. Run your agent, and observe the results:
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{% tab header="ADK" text=true %}}
|
||||
Run your agent locally for testing:
|
||||
```sh
|
||||
adk run my_agent
|
||||
```
|
||||
<br/>
|
||||
|
||||
Alternatively, serve it via a web interface:
|
||||
```sh
|
||||
adk web --port 8000
|
||||
```
|
||||
<br/>
|
||||
|
||||
For more information, refer to the ADK documentation on [Running
|
||||
Agents](https://google.github.io/adk-docs/get-started/python/#run-your-agent)
|
||||
and [Deploying to Cloud](https://google.github.io/adk-docs/deploy/).
|
||||
|
||||
{{% /tab %}}
|
||||
{{< tab header="Langchain" lang="bash" >}}
|
||||
python agent.py
|
||||
{{< /tab >}}
|
||||
{{< tab header="LlamaIndex" lang="bash" >}}
|
||||
python agent.py
|
||||
{{< /tab >}}
|
||||
{{< tab header="Core" lang="bash" >}}
|
||||
python agent.py
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
```sh
|
||||
python hotel_agent.py
|
||||
```
|
||||
|
||||
{{< notice info >}}
|
||||
For more information, visit the [Python SDK
|
||||
|
||||
@@ -40,24 +40,11 @@ from Toolbox.
|
||||
```
|
||||
|
||||
1. In a new terminal, install the
|
||||
SDK package.
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="LangChain" lang="bash" >}}
|
||||
npm install @toolbox-sdk/core
|
||||
{{< /tab >}}
|
||||
{{< tab header="GenkitJS" lang="bash" >}}
|
||||
npm install @toolbox-sdk/core
|
||||
{{< /tab >}}
|
||||
{{< tab header="LlamaIndex" lang="bash" >}}
|
||||
npm install @toolbox-sdk/core
|
||||
{{< /tab >}}
|
||||
{{< tab header="GoogleGenAI" lang="bash" >}}
|
||||
npm install @toolbox-sdk/core
|
||||
{{< /tab >}}
|
||||
{{< tab header="ADK" lang="bash" >}}
|
||||
npm install @toolbox-sdk/adk
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
[SDK](https://www.npmjs.com/package/@toolbox-sdk/core).
|
||||
|
||||
```bash
|
||||
npm install @toolbox-sdk/core
|
||||
```
|
||||
|
||||
1. Install other required dependencies
|
||||
|
||||
@@ -74,9 +61,6 @@ npm install llamaindex @llamaindex/google @llamaindex/workflow
|
||||
{{< tab header="GoogleGenAI" lang="bash" >}}
|
||||
npm install @google/genai
|
||||
{{< /tab >}}
|
||||
{{< tab header="ADK" lang="bash" >}}
|
||||
npm install @google/adk
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
1. Create a new file named `hotelAgent.js` and copy the following code to create
|
||||
@@ -107,12 +91,6 @@ npm install @google/adk
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="ADK" lang="js" >}}
|
||||
|
||||
{{< include "quickstart/js/adk/quickstart.js" >}}
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< /tabpane >}}
|
||||
|
||||
1. Run your agent, and observe the results:
|
||||
|
||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -25,11 +25,11 @@ require (
|
||||
go.opentelemetry.io/otel v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.38.0 // indirect
|
||||
golang.org/x/crypto v0.45.0 // indirect
|
||||
golang.org/x/net v0.47.0 // indirect
|
||||
golang.org/x/crypto v0.43.0 // indirect
|
||||
golang.org/x/net v0.46.0 // indirect
|
||||
golang.org/x/oauth2 v0.32.0 // indirect
|
||||
golang.org/x/sys v0.38.0 // indirect
|
||||
golang.org/x/text v0.31.0 // indirect
|
||||
golang.org/x/sys v0.37.0 // indirect
|
||||
golang.org/x/text v0.30.0 // indirect
|
||||
google.golang.org/api v0.255.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251029180050-ab9386a59fda // indirect
|
||||
google.golang.org/grpc v1.76.0 // indirect
|
||||
|
||||
@@ -84,18 +84,18 @@ go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6
|
||||
go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA=
|
||||
go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE=
|
||||
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
|
||||
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
|
||||
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
|
||||
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
|
||||
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
|
||||
golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=
|
||||
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
|
||||
golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=
|
||||
golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210=
|
||||
golang.org/x/oauth2 v0.32.0 h1:jsCblLleRMDrxMN29H3z/k1KliIvpLgCkE6R8FXXNgY=
|
||||
golang.org/x/oauth2 v0.32.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
||||
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
|
||||
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
||||
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
||||
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
|
||||
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
|
||||
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
|
||||
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
|
||||
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
|
||||
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
|
||||
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||
|
||||
2590
docs/en/getting-started/quickstart/js/adk/package-lock.json
generated
2590
docs/en/getting-started/quickstart/js/adk/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"name": "adk",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "quickstart.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"test": "node --test"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@google/adk": "^0.1.3",
|
||||
"@toolbox-sdk/adk": "^0.1.5"
|
||||
}
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
import { InMemoryRunner, LlmAgent, LogLevel } from '@google/adk';
|
||||
import { ToolboxClient } from '@toolbox-sdk/adk';
|
||||
|
||||
const prompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
process.env.GOOGLE_GENAI_API_KEY = process.env.GOOGLE_API_KEY || 'your-api-key'; // Replace it with your API key
|
||||
|
||||
export async function main() {
|
||||
const userId = 'test_user';
|
||||
const client = new ToolboxClient('http://127.0.0.1:5000');
|
||||
const tools = await client.loadToolset("my-toolset");
|
||||
|
||||
const rootAgent = new LlmAgent({
|
||||
name: 'hotel_agent',
|
||||
model: 'gemini-2.5-flash',
|
||||
description: 'Agent for hotel bookings and administration.',
|
||||
instruction: prompt,
|
||||
tools: tools,
|
||||
});
|
||||
|
||||
const appName = rootAgent.name;
|
||||
const runner = new InMemoryRunner({ agent: rootAgent, appName, logLevel: LogLevel.ERROR, });
|
||||
const session = await runner.sessionService.createSession({ appName, userId });
|
||||
|
||||
for (const query of queries) {
|
||||
await runPrompt(runner, userId, session.id, query);
|
||||
}
|
||||
}
|
||||
|
||||
async function runPrompt(runner, userId, sessionId, prompt) {
|
||||
const content = { role: 'user', parts: [{ text: prompt }] };
|
||||
const stream = runner.runAsync({ userId, sessionId, newMessage: content });
|
||||
const responses = await Array.fromAsync(stream);
|
||||
const accumulatedResponse = responses
|
||||
.flatMap((e) => e.content?.parts?.map((p) => p.text) ?? [])
|
||||
.join('');
|
||||
|
||||
console.log(`\nMODEL RESPONSE: ${accumulatedResponse}\n`);
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -9,7 +9,7 @@
|
||||
"version": "1.0.0",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@langchain/google-genai": "^2.0.0",
|
||||
"@langchain/google-genai": "^1.0.0",
|
||||
"@langchain/langgraph": "^1.0.0",
|
||||
"@toolbox-sdk/core": "^0.1.2",
|
||||
"langchain": "^1.0.0"
|
||||
@@ -18,7 +18,8 @@
|
||||
"node_modules/@cfworker/json-schema": {
|
||||
"version": "4.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@cfworker/json-schema/-/json-schema-4.1.1.tgz",
|
||||
"integrity": "sha512-gAmrUZSGtKc3AiBL71iNWxDsyUC5uMaKKGdvzYsBoTW/xi42JQHl7eKV2OYzCUqvc+D2RCcf7EXY2iCyFIk6og=="
|
||||
"integrity": "sha512-gAmrUZSGtKc3AiBL71iNWxDsyUC5uMaKKGdvzYsBoTW/xi42JQHl7eKV2OYzCUqvc+D2RCcf7EXY2iCyFIk6og==",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/@google/generative-ai": {
|
||||
"version": "0.24.1",
|
||||
@@ -45,10 +46,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@langchain/core": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@langchain/core/-/core-1.1.0.tgz",
|
||||
"integrity": "sha512-yJ6JHcU9psjnQbzRFkXjIdNTA+3074dA+2pHdH8ewvQCSleSk6JcjkCMIb5+NASjeMoi1ZuntlLKVsNqF38YxA==",
|
||||
"license": "MIT",
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@langchain/core/-/core-1.0.2.tgz",
|
||||
"integrity": "sha512-6mOn4bZyO6XT0GGrEijRtMVrmYJGZ8y1BcwyTPDptFz38lP0CEzrKEYB++h+u3TEcAd3eO25l1aGw/zVlVgw2Q==",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@cfworker/json-schema": "^4.0.2",
|
||||
@@ -59,7 +59,7 @@
|
||||
"langsmith": "^0.3.64",
|
||||
"mustache": "^4.2.0",
|
||||
"p-queue": "^6.6.2",
|
||||
"p-retry": "^7.0.0",
|
||||
"p-retry": "4",
|
||||
"uuid": "^10.0.0",
|
||||
"zod": "^3.25.76 || ^4"
|
||||
},
|
||||
@@ -67,26 +67,10 @@
|
||||
"node": ">=20"
|
||||
}
|
||||
},
|
||||
"node_modules/@langchain/core/node_modules/p-retry": {
|
||||
"version": "7.1.0",
|
||||
"resolved": "https://registry.npmjs.org/p-retry/-/p-retry-7.1.0.tgz",
|
||||
"integrity": "sha512-xL4PiFRQa/f9L9ZvR4/gUCRNus4N8YX80ku8kv9Jqz+ZokkiZLM0bcvX0gm1F3PDi9SPRsww1BDsTWgE6Y1GLQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"is-network-error": "^1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/@langchain/google-genai": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@langchain/google-genai/-/google-genai-2.0.0.tgz",
|
||||
"integrity": "sha512-PaAWkogQdF+Y2bhhXWXUrC2nO7sTgWLtobBbZl/0V8Aa1F/KG2wrMECie3S17bAdFu/6VmQOuFFrlgSMwQC5KA==",
|
||||
"license": "MIT",
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@langchain/google-genai/-/google-genai-1.0.0.tgz",
|
||||
"integrity": "sha512-ICUBZl/46nG6+Yhe5v7kp/2TQBGOzqEkpfKPLDeNyJ4x9OOL46xsW3ZZrHJjhGMQuR6/JMmQMTU9kLoYgsd1Tg==",
|
||||
"dependencies": {
|
||||
"@google/generative-ai": "^0.24.0",
|
||||
"uuid": "^11.1.0"
|
||||
@@ -95,7 +79,7 @@
|
||||
"node": ">=20"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@langchain/core": "1.1.0"
|
||||
"@langchain/core": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@langchain/google-genai/node_modules/uuid": {
|
||||
@@ -106,7 +90,6 @@
|
||||
"https://github.com/sponsors/broofa",
|
||||
"https://github.com/sponsors/ctavan"
|
||||
],
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"uuid": "dist/esm/bin/uuid"
|
||||
}
|
||||
@@ -241,6 +224,7 @@
|
||||
"version": "5.2.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz",
|
||||
"integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
@@ -324,6 +308,7 @@
|
||||
"version": "6.3.0",
|
||||
"resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz",
|
||||
"integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
@@ -436,6 +421,7 @@
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
|
||||
"integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
@@ -814,18 +800,6 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/is-network-error": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/is-network-error/-/is-network-error-1.3.0.tgz",
|
||||
"integrity": "sha512-6oIwpsgRfnDiyEDLMay/GqCl3HoAtH5+RUKW29gYkL0QA+ipzpDLA16yQs7/RHCSu+BwgbJaOUqa4A99qNVQVw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/isexe": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
|
||||
@@ -849,6 +823,7 @@
|
||||
"version": "1.0.21",
|
||||
"resolved": "https://registry.npmjs.org/js-tiktoken/-/js-tiktoken-1.0.21.tgz",
|
||||
"integrity": "sha512-biOj/6M5qdgx5TKjDnFT1ymSpM5tbd3ylwDtrQvFQSu0Z7bBYko2dF+W/aUkXUPuk6IVpRxk/3Q2sHOzGlS36g==",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"base64-js": "^1.5.1"
|
||||
}
|
||||
@@ -995,6 +970,7 @@
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz",
|
||||
"integrity": "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==",
|
||||
"peer": true,
|
||||
"bin": {
|
||||
"mustache": "bin/mustache"
|
||||
}
|
||||
@@ -1433,7 +1409,6 @@
|
||||
"version": "3.25.76",
|
||||
"resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
|
||||
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
|
||||
"peer": true,
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/colinhacks"
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@langchain/google-genai": "^2.0.0",
|
||||
"@langchain/google-genai": "^1.0.0",
|
||||
"@langchain/langgraph": "^1.0.0",
|
||||
"@toolbox-sdk/core": "^0.1.2",
|
||||
"langchain": "^1.0.0"
|
||||
|
||||
@@ -1,15 +1,71 @@
|
||||
from google.adk import Agent
|
||||
from google.adk.apps import App
|
||||
from google.adk.agents import Agent
|
||||
from google.adk.runners import Runner
|
||||
from google.adk.sessions import InMemorySessionService
|
||||
from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService
|
||||
from google.genai import types
|
||||
from toolbox_core import ToolboxSyncClient
|
||||
|
||||
# TODO(developer): update the TOOLBOX_URL to your toolbox endpoint
|
||||
client = ToolboxSyncClient("http://127.0.0.1:5000")
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
root_agent = Agent(
|
||||
name='root_agent',
|
||||
model='gemini-2.5-flash',
|
||||
instruction="You are a helpful AI assistant designed to provide accurate and useful information.",
|
||||
tools=client.load_toolset(),
|
||||
)
|
||||
# TODO(developer): replace this with your Google API key
|
||||
|
||||
app = App(root_agent=root_agent, name="my_agent")
|
||||
api_key = os.environ.get("GOOGLE_API_KEY") or "your-api-key" # Set your API key here
|
||||
os.environ["GOOGLE_API_KEY"] = api_key
|
||||
|
||||
async def main():
|
||||
with ToolboxSyncClient("http://127.0.0.1:5000") as toolbox_client:
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
root_agent = Agent(
|
||||
model='gemini-2.0-flash-001',
|
||||
name='hotel_agent',
|
||||
description='A helpful AI assistant.',
|
||||
instruction=prompt,
|
||||
tools=toolbox_client.load_toolset("my-toolset"),
|
||||
)
|
||||
|
||||
session_service = InMemorySessionService()
|
||||
artifacts_service = InMemoryArtifactService()
|
||||
session = await session_service.create_session(
|
||||
state={}, app_name='hotel_agent', user_id='123'
|
||||
)
|
||||
runner = Runner(
|
||||
app_name='hotel_agent',
|
||||
agent=root_agent,
|
||||
artifact_service=artifacts_service,
|
||||
session_service=session_service,
|
||||
)
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
for query in queries:
|
||||
content = types.Content(role='user', parts=[types.Part(text=query)])
|
||||
events = runner.run(session_id=session.id,
|
||||
user_id='123', new_message=content)
|
||||
|
||||
responses = (
|
||||
part.text
|
||||
for event in events
|
||||
for part in event.content.parts
|
||||
if part.text is not None
|
||||
)
|
||||
|
||||
for text in responses:
|
||||
print(text)
|
||||
|
||||
asyncio.run(main())
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
langchain==1.1.0
|
||||
langchain-google-vertexai==3.1.0
|
||||
langgraph==1.0.4
|
||||
langchain==1.0.8
|
||||
langchain-google-vertexai==3.0.3
|
||||
langgraph==1.0.3
|
||||
toolbox-langchain==0.5.3
|
||||
pytest==9.0.1
|
||||
|
||||
15
docs/en/getting-started/quickstart/python/quickstart_test.py
Executable file → Normal file
15
docs/en/getting-started/quickstart/python/quickstart_test.py
Executable file → Normal file
@@ -44,28 +44,15 @@ class TestExecution:
|
||||
@pytest.fixture(scope="function")
|
||||
def script_output(self, capsys):
|
||||
"""Run the quickstart function and return its output."""
|
||||
|
||||
# TODO: Add better validation for ADK once we have a way to capture its
|
||||
# output.
|
||||
if ORCH_NAME == "adk":
|
||||
return quickstart.app.root_agent.name
|
||||
else:
|
||||
asyncio.run(quickstart.main())
|
||||
|
||||
asyncio.run(quickstart.main())
|
||||
return capsys.readouterr()
|
||||
|
||||
def test_script_runs_without_errors(self, script_output):
|
||||
"""Test that the script runs and produces no stderr."""
|
||||
if ORCH_NAME == "adk":
|
||||
return
|
||||
assert script_output.err == "", f"Script produced stderr: {script_output.err}"
|
||||
|
||||
def test_keywords_in_output(self, script_output, golden_keywords):
|
||||
"""Test that expected keywords are present in the script's output."""
|
||||
|
||||
if ORCH_NAME == "adk":
|
||||
assert script_output == "root_agent"
|
||||
return
|
||||
output = script_output.out
|
||||
missing_keywords = [kw for kw in golden_keywords if kw not in output]
|
||||
assert not missing_keywords, f"Missing keywords in output: {missing_keywords}"
|
||||
|
||||
@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -52,7 +52,6 @@ instance, database and users:
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
* `create_user`
|
||||
* `clone_instance`
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
@@ -298,7 +297,6 @@ instances and interacting with your database:
|
||||
* **list_databases**: Lists all databases for a Cloud SQL instance.
|
||||
* **create_user**: Creates a new user in a Cloud SQL instance.
|
||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for SQL Server instance.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
|
||||
@@ -52,7 +52,6 @@ database and users:
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
* `create_user`
|
||||
* `clone_instance`
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
@@ -298,7 +297,6 @@ instances and interacting with your database:
|
||||
* **list_databases**: Lists all databases for a Cloud SQL instance.
|
||||
* **create_user**: Creates a new user in a Cloud SQL instance.
|
||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for MySQL instance.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
|
||||
@@ -52,7 +52,6 @@ instance, database and users:
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
* `create_user`
|
||||
* `clone_instance`
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
@@ -298,7 +297,6 @@ instances and interacting with your database:
|
||||
* **list_databases**: Lists all databases for a Cloud SQL instance.
|
||||
* **create_user**: Creates a new user in a Cloud SQL instance.
|
||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for PostgreSQL instance.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
|
||||
@@ -49,19 +49,19 @@ to expose your developer assistant tools to a Looker instance:
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -45,19 +45,19 @@ instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -43,19 +43,19 @@ expose your developer assistant tools to a MySQL instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -44,19 +44,19 @@ expose your developer assistant tools to a Neo4j instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -43,19 +43,19 @@ to expose your developer assistant tools to a SQLite instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -1,147 +0,0 @@
|
||||
---
|
||||
title: "Deploy ADK Agent and MCP Toolbox"
|
||||
type: docs
|
||||
weight: 4
|
||||
description: >
|
||||
How to deploy your ADK Agent to Vertex AI Agent Engine and connect it to an MCP Toolbox deployed on Cloud Run.
|
||||
---
|
||||
|
||||
## Before you begin
|
||||
|
||||
This guide assumes you have already done the following:
|
||||
|
||||
1. Completed the [Python Quickstart
|
||||
(Local)](../getting-started/local_quickstart.md) and have a working ADK
|
||||
agent running locally.
|
||||
2. Installed the [Google Cloud CLI](https://cloud.google.com/sdk/docs/install).
|
||||
3. A Google Cloud project with billing enabled.
|
||||
|
||||
## Step 1: Deploy MCP Toolbox to Cloud Run
|
||||
|
||||
Before deploying your agent, your MCP Toolbox server needs to be accessible from
|
||||
the cloud. We will deploy MCP Toolbox to Cloud Run.
|
||||
|
||||
Follow the [Deploy to Cloud Run](deploy_toolbox.md) guide to deploy your MCP
|
||||
Toolbox instance.
|
||||
|
||||
{{% alert title="Important" %}}
|
||||
After deployment, note down the Service URL of your MCP Toolbox Cloud Run
|
||||
service. You will need this to configure your agent.
|
||||
{{% /alert %}}
|
||||
## Step 2: Prepare your Agent for Deployment
|
||||
|
||||
We will use the `agent-starter-pack` tool to enhance your local agent project
|
||||
with the necessary configuration for deployment to Vertex AI Agent Engine.
|
||||
|
||||
1. Open a terminal and navigate to the **parent directory** of your agent
|
||||
project (the directory containing the `my_agent` folder).
|
||||
|
||||
2. Run the following command to enhance your project:
|
||||
|
||||
```bash
|
||||
uvx agent-starter-pack enhance --adk -d agent_engine
|
||||
```
|
||||
|
||||
3. Follow the interactive prompts to configure your deployment settings. This
|
||||
process will generate deployment configuration files (like a `Makefile` and
|
||||
`Dockerfile`) in your project directory.
|
||||
|
||||
4. Add `toolbox-core` as a dependency to the new project:
|
||||
|
||||
```bash
|
||||
uv add toolbox-core
|
||||
```
|
||||
|
||||
## Step 3: Configure Google Cloud Authentication
|
||||
|
||||
Ensure your local environment is authenticated with Google Cloud to perform the
|
||||
deployment.
|
||||
|
||||
1. Login with Application Default Credentials (ADC):
|
||||
|
||||
```bash
|
||||
gcloud auth application-default login
|
||||
```
|
||||
|
||||
2. Set your active project:
|
||||
|
||||
```bash
|
||||
gcloud config set project <YOUR_PROJECT_ID>
|
||||
```
|
||||
|
||||
## Step 4: Connect Agent to Deployed MCP Toolbox
|
||||
|
||||
You need to update your agent's code to connect to the Cloud Run URL of your MCP
|
||||
Toolbox instead of the local address.
|
||||
|
||||
1. Recall that you can find the Cloud Run deployment URL of the MCP Toolbox
|
||||
server using the following command:
|
||||
|
||||
```bash
|
||||
gcloud run services describe toolbox --format 'value(status.url)'
|
||||
```
|
||||
|
||||
2. Open your agent file (`my_agent/agent.py`).
|
||||
|
||||
3. Update the `ToolboxSyncClient` initialization to use your Cloud Run URL.
|
||||
|
||||
{{% alert color="info" %}}
|
||||
Since Cloud Run services are secured by default, you also need to provide an
|
||||
authentication token.
|
||||
{{% /alert %}}
|
||||
|
||||
Replace your existing client initialization code with the following:
|
||||
|
||||
```python
|
||||
from google.adk import Agent
|
||||
from google.adk.apps import App
|
||||
from toolbox_core import ToolboxSyncClient, auth_methods
|
||||
|
||||
# TODO(developer): Replace with your Toolbox Cloud Run Service URL
|
||||
TOOLBOX_URL = "https://your-toolbox-service-xyz.a.run.app"
|
||||
|
||||
# Initialize the client with the Cloud Run URL and Auth headers
|
||||
client = ToolboxSyncClient(
|
||||
TOOLBOX_URL,
|
||||
client_headers={"Authorization": auth_methods.get_google_id_token(TOOLBOX_URL)}
|
||||
)
|
||||
|
||||
root_agent = Agent(
|
||||
name='root_agent',
|
||||
model='gemini-2.5-flash',
|
||||
instruction="You are a helpful AI assistant designed to provide accurate and useful information.",
|
||||
tools=client.load_toolset(),
|
||||
)
|
||||
|
||||
app = App(root_agent=root_agent, name="my_agent")
|
||||
```
|
||||
|
||||
{{% alert title="Important" %}}
|
||||
Ensure that the `name` parameter in the `App` initialization matches the name of
|
||||
your agent's parent directory (e.g., `my_agent`).
|
||||
```python
|
||||
...
|
||||
|
||||
app = App(root_agent=root_agent, name="my_agent")
|
||||
```
|
||||
{{% /alert %}}
|
||||
|
||||
## Step 5: Deploy to Agent Engine
|
||||
|
||||
Run the deployment command:
|
||||
|
||||
```bash
|
||||
make backend
|
||||
```
|
||||
|
||||
This command will build your agent's container image and deploy it to Vertex AI.
|
||||
|
||||
## Step 6: Test your Deployment
|
||||
|
||||
Once the deployment command (`make backend`) completes, it will output the URL
|
||||
for the Agent Engine Playground. You can click on this URL to open the
|
||||
Playground in your browser and start chatting with your agent to test the tools.
|
||||
|
||||
For additional test scenarios, refer to the [Test deployed
|
||||
agent](https://google.github.io/adk-docs/deploy/agent-engine/#test-deployment)
|
||||
section in the ADK documentation.
|
||||
@@ -67,13 +67,6 @@ networks:
|
||||
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
To prevent DNS rebinding attack, use the `--allowed-origins` flag to specify a
|
||||
list of origins permitted to access the server. E.g. `command: [ "toolbox",
|
||||
"--tools-file", "/config/tools.yaml", "--address", "0.0.0.0",
|
||||
"--allowed-origins", "https://foo.bar"]`
|
||||
{{< /notice >}}
|
||||
|
||||
1. Run the following command to bring up the Toolbox and Postgres instance
|
||||
|
||||
```bash
|
||||
|
||||
@@ -188,12 +188,6 @@ description: >
|
||||
path: tools.yaml
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
To prevent DNS rebinding attack, use the `--allowed-origins` flag to specify a
|
||||
list of origins permitted to access the server. E.g. `args: ["--address",
|
||||
"0.0.0.0", "--allowed-origins", "https://foo.bar"]`
|
||||
{{< /notice >}}
|
||||
|
||||
1. Create the deployment.
|
||||
|
||||
```bash
|
||||
|
||||
@@ -104,7 +104,7 @@ section.
|
||||
export IMAGE=us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:latest
|
||||
```
|
||||
|
||||
{{< notice note >}}
|
||||
{{< notice note >}}
|
||||
**The `$PORT` Environment Variable**
|
||||
Google Cloud Run dictates the port your application must listen on by setting
|
||||
the `$PORT` environment variable inside your container. This value defaults to
|
||||
@@ -140,45 +140,6 @@ deployment will time out.
|
||||
# --allow-unauthenticated # https://cloud.google.com/run/docs/authenticating/public#gcloud
|
||||
```
|
||||
|
||||
### Update deployed server to be secure
|
||||
|
||||
To prevent DNS rebinding attack, use the `--allowed-origins` flag to specify a
|
||||
list of origins permitted to access the server. In order to do that, you will
|
||||
have to re-deploy the cloud run service with the new flag.
|
||||
|
||||
1. Set an environment variable to the cloud run url:
|
||||
|
||||
```bash
|
||||
export URL=<cloud run url>
|
||||
```
|
||||
|
||||
2. Redeploy Toolbox:
|
||||
|
||||
```bash
|
||||
gcloud run deploy toolbox \
|
||||
--image $IMAGE \
|
||||
--service-account toolbox-identity \
|
||||
--region us-central1 \
|
||||
--set-secrets "/app/tools.yaml=tools:latest" \
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL"
|
||||
# --allow-unauthenticated # https://cloud.google.com/run/docs/authenticating/public#gcloud
|
||||
```
|
||||
|
||||
If you are using a VPC network, use the command below:
|
||||
|
||||
```bash
|
||||
gcloud run deploy toolbox \
|
||||
--image $IMAGE \
|
||||
--service-account toolbox-identity \
|
||||
--region us-central1 \
|
||||
--set-secrets "/app/tools.yaml=tools:latest" \
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL" \
|
||||
# TODO(dev): update the following to match your VPC if necessary
|
||||
--network default \
|
||||
--subnet default
|
||||
# --allow-unauthenticated # https://cloud.google.com/run/docs/authenticating/public#gcloud
|
||||
```
|
||||
|
||||
## Connecting with Toolbox Client SDK
|
||||
|
||||
You can connect to Toolbox Cloud Run instances directly through the SDK.
|
||||
@@ -201,23 +162,18 @@ You can connect to Toolbox Cloud Run instances directly through the SDK.
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="Python" lang="python" >}}
|
||||
import asyncio
|
||||
from toolbox_core import ToolboxClient, auth_methods
|
||||
|
||||
# Replace with the Cloud Run service URL generated in the previous step
|
||||
|
||||
URL = "https://cloud-run-url.app"
|
||||
|
||||
auth_token_provider = auth_methods.aget_google_id_token(URL) # can also use sync method
|
||||
|
||||
async def main():
|
||||
async with ToolboxClient(
|
||||
URL,
|
||||
client_headers={"Authorization": auth_token_provider},
|
||||
) as toolbox:
|
||||
toolset = await toolbox.load_toolset()
|
||||
# ...
|
||||
|
||||
asyncio.run(main())
|
||||
async with ToolboxClient(
|
||||
URL,
|
||||
client_headers={"Authorization": auth_token_provider},
|
||||
) as toolbox:
|
||||
{{< /tab >}}
|
||||
{{< tab header="Javascript" lang="javascript" >}}
|
||||
import { ToolboxClient } from '@toolbox-sdk/core';
|
||||
|
||||
@@ -25,7 +25,6 @@ description: >
|
||||
| | `--tools-files` | Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --prebuilt, --tools-file, or --tools-folder. | |
|
||||
| | `--tools-folder` | Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --prebuilt, --tools-file, or --tools-files. | |
|
||||
| | `--ui` | Launches the Toolbox UI web server. | |
|
||||
| | `--allowed-origins` | Specifies a list of origins permitted to access this server. | `*` |
|
||||
| `-v` | `--version` | version for toolbox | |
|
||||
|
||||
## Examples
|
||||
|
||||
@@ -50,8 +50,6 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `list_triggers`: Lists triggers in the database.
|
||||
* `list_indexes`: List available user indexes in a PostgreSQL database.
|
||||
* `list_sequences`: List sequences in a PostgreSQL database.
|
||||
* `list_publication_tables`: List publication tables in a PostgreSQL database.
|
||||
* `list_tablespaces`: Lists tablespaces in the database.
|
||||
|
||||
## AlloyDB Postgres Admin
|
||||
|
||||
@@ -180,8 +178,6 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
* `create_user`
|
||||
* `clone_instance`
|
||||
|
||||
* **Tools:**
|
||||
* `create_instance`: Creates a new Cloud SQL for MySQL instance.
|
||||
* `get_instance`: Gets information about a Cloud SQL instance.
|
||||
@@ -190,7 +186,6 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `list_databases`: Lists all databases for a Cloud SQL instance.
|
||||
* `create_user`: Creates a new user in a Cloud SQL instance.
|
||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for MySQL instance.
|
||||
|
||||
## Cloud SQL for PostgreSQL
|
||||
|
||||
@@ -229,8 +224,6 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `list_triggers`: Lists triggers in the database.
|
||||
* `list_indexes`: List available user indexes in a PostgreSQL database.
|
||||
* `list_sequences`: List sequences in a PostgreSQL database.
|
||||
* `list_publication_tables`: List publication tables in a PostgreSQL database.
|
||||
* `list_tablespaces`: Lists tablespaces in the database.
|
||||
|
||||
## Cloud SQL for PostgreSQL Observability
|
||||
|
||||
@@ -264,7 +257,6 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
* `create_user`
|
||||
* `clone_instance`
|
||||
* **Tools:**
|
||||
* `create_instance`: Creates a new Cloud SQL for PostgreSQL instance.
|
||||
* `get_instance`: Gets information about a Cloud SQL instance.
|
||||
@@ -273,7 +265,6 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `list_databases`: Lists all databases for a Cloud SQL instance.
|
||||
* `create_user`: Creates a new user in a Cloud SQL instance.
|
||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for PostgreSQL instance.
|
||||
|
||||
## Cloud SQL for SQL Server
|
||||
|
||||
@@ -325,7 +316,6 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
* `create_user`
|
||||
* `clone_instance`
|
||||
* **Tools:**
|
||||
* `create_instance`: Creates a new Cloud SQL for SQL Server instance.
|
||||
* `get_instance`: Gets information about a Cloud SQL instance.
|
||||
@@ -334,7 +324,6 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `list_databases`: Lists all databases for a Cloud SQL instance.
|
||||
* `create_user`: Creates a new user in a Cloud SQL instance.
|
||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for SQL Server instance.
|
||||
|
||||
## Dataplex
|
||||
|
||||
@@ -454,8 +443,8 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
|
||||
* `--prebuilt` value: `mssql`
|
||||
* **Environment Variables:**
|
||||
* `MSSQL_HOST`: (Optional) The hostname or IP address of the SQL Server instance.
|
||||
* `MSSQL_PORT`: (Optional) The port number for the SQL Server instance.
|
||||
* `MSSQL_HOST`: The hostname or IP address of the SQL Server instance.
|
||||
* `MSSQL_PORT`: The port number for the SQL Server instance.
|
||||
* `MSSQL_DATABASE`: The name of the database to connect to.
|
||||
* `MSSQL_USER`: The database username.
|
||||
* `MSSQL_PASSWORD`: The password for the database user.
|
||||
@@ -536,8 +525,6 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `list_triggers`: Lists triggers in the database.
|
||||
* `list_indexes`: List available user indexes in a PostgreSQL database.
|
||||
* `list_sequences`: List sequences in a PostgreSQL database.
|
||||
* `list_publication_tables`: List publication tables in a PostgreSQL database.
|
||||
* `list_tablespaces`: Lists tablespaces in the database.
|
||||
|
||||
## Google Cloud Serverless for Apache Spark
|
||||
|
||||
@@ -569,7 +556,6 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `execute_sql`: Executes a DML SQL query.
|
||||
* `execute_sql_dql`: Executes a DQL SQL query.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
* `list_graphs`: Lists graphs in the database.
|
||||
|
||||
## Spanner (PostgreSQL dialect)
|
||||
|
||||
|
||||
@@ -71,18 +71,6 @@ cluster][alloydb-free-trial].
|
||||
- [`postgres-replication-stats`](../tools/postgres/postgres-replication-stats.md)
|
||||
List replication stats in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-query-stats`](../tools/postgres/postgres-list-query-stats.md)
|
||||
List query statistics in a PostgreSQL database.
|
||||
|
||||
- [`postgres-get-column-cardinality`](../tools/postgres/postgres-get-column-cardinality.md)
|
||||
List cardinality of columns in a table in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-publication-tables`](../tools/postgres/postgres-list-publication-tables.md)
|
||||
List publication tables in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-tablespaces`](../tools/postgres/postgres-list-tablespaces.md)
|
||||
List tablespaces in an AlloyDB for PostgreSQL database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [AlloyDB using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/alloydb_pg_mcp/)
|
||||
|
||||
@@ -67,18 +67,6 @@ to a database by following these instructions][csql-pg-quickstart].
|
||||
- [`postgres-replication-stats`](../tools/postgres/postgres-replication-stats.md)
|
||||
List replication stats in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-query-stats`](../tools/postgres/postgres-list-query-stats.md)
|
||||
List query statistics in a PostgreSQL database.
|
||||
|
||||
- [`postgres-get-column-cardinality`](../tools/postgres/postgres-get-column-cardinality.md)
|
||||
List cardinality of columns in a table in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-publication-tables`](../tools/postgres/postgres-list-publication-tables.md)
|
||||
List publication tables in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-tablespaces`](../tools/postgres/postgres-list-tablespaces.md)
|
||||
List tablespaces in a PostgreSQL database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [Cloud SQL for Postgres using
|
||||
|
||||
@@ -62,18 +62,6 @@ reputation for reliability, feature robustness, and performance.
|
||||
- [`postgres-replication-stats`](../tools/postgres/postgres-replication-stats.md)
|
||||
List replication stats in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-query-stats`](../tools/postgres/postgres-list-query-stats.md)
|
||||
List query statistics in a PostgreSQL database.
|
||||
|
||||
- [`postgres-get-column-cardinality`](../tools/postgres/postgres-get-column-cardinality.md)
|
||||
List cardinality of columns in a table in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-publication-tables`](../tools/postgres/postgres-list-publication-tables.md)
|
||||
List publication tables in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-tablespaces`](../tools/postgres/postgres-list-tablespaces.md)
|
||||
List tablespaces in a PostgreSQL database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [PostgreSQL using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/postgres_mcp/)
|
||||
|
||||
@@ -21,10 +21,6 @@ Apache Spark.
|
||||
Get a Serverless Spark batch.
|
||||
- [`serverless-spark-cancel-batch`](../tools/serverless-spark/serverless-spark-cancel-batch.md)
|
||||
Cancel a running Serverless Spark batch operation.
|
||||
- [`serverless-spark-create-pyspark-batch`](../tools/serverless-spark/serverless-spark-create-pyspark-batch.md)
|
||||
Create a Serverless Spark PySpark batch operation.
|
||||
- [`serverless-spark-create-spark-batch`](../tools/serverless-spark/serverless-spark-create-spark-batch.md)
|
||||
Create a Serverless Spark Java batch operation.
|
||||
|
||||
## Requirements
|
||||
|
||||
|
||||
@@ -34,9 +34,6 @@ the Google Cloud console][spanner-quickstart].
|
||||
- [`spanner-list-tables`](../tools/spanner/spanner-list-tables.md)
|
||||
Retrieve schema information about tables in a Spanner database.
|
||||
|
||||
- [`spanner-list-graphs`](../tools/spanner/spanner-list-graphs.md)
|
||||
Retrieve schema information about graphs in a Spanner database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [Spanner using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/spanner_mcp/)
|
||||
|
||||
@@ -1,68 +0,0 @@
|
||||
---
|
||||
title: cloud-sql-clone-instance
|
||||
type: docs
|
||||
weight: 10
|
||||
description: "Clone a Cloud SQL instance."
|
||||
---
|
||||
|
||||
The `cloud-sql-clone-instance` tool clones a Cloud SQL instance using the Cloud SQL Admin API.
|
||||
|
||||
{{< notice info dd>}}
|
||||
This tool uses a `source` of kind `cloud-sql-admin`.
|
||||
{{< /notice >}}
|
||||
|
||||
## Examples
|
||||
|
||||
Basic clone (current state)
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
clone-instance-basic:
|
||||
kind: cloud-sql-clone-instance
|
||||
source: cloud-sql-admin-source
|
||||
description: "Creates an exact copy of a Cloud SQL instance. Supports configuring instance zones and high-availability setup through zone preferences."
|
||||
```
|
||||
|
||||
Point-in-time recovery (PITR) clone
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
clone-instance-pitr:
|
||||
kind: cloud-sql-clone-instance
|
||||
source: cloud-sql-admin-source
|
||||
description: "Creates an exact copy of a Cloud SQL instance at a specific point in time (PITR). Supports configuring instance zones and high-availability setup through zone preferences"
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
### Tool Configuration
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| -------------- | :------: | :----------: | ------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "cloud-sql-clone-instance". |
|
||||
| source | string | true | The name of the `cloud-sql-admin` source to use. |
|
||||
| description | string | false | A description of the tool. |
|
||||
|
||||
### Tool Inputs
|
||||
|
||||
| **parameter** | **type** | **required** | **description** |
|
||||
| -------------------------- | :------: | :----------: | ------------------------------------------------------------------------------- |
|
||||
| project | string | true | The project ID. |
|
||||
| sourceInstanceName | string | true | The name of the source instance to clone. |
|
||||
| destinationInstanceName | string | true | The name of the new (cloned) instance. |
|
||||
| pointInTime | string | false | (Optional) The point in time for a PITR (Point-In-Time Recovery) clone. |
|
||||
| preferredZone | string | false | (Optional) The preferred zone for the cloned instance. If not specified, defaults to the source instance's zone. |
|
||||
| preferredSecondaryZone | string | false | (Optional) The preferred secondary zone for the cloned instance (for HA). |
|
||||
|
||||
## Usage Notes
|
||||
|
||||
- The tool supports both basic clone and point-in-time recovery (PITR) clone operations.
|
||||
- For PITR, specify the `pointInTime` parameter in RFC3339 format (e.g., `2024-01-01T00:00:00Z`).
|
||||
- The source must be a valid Cloud SQL Admin API source.
|
||||
- You can optionally specify the `zone` parameter to set the zone for the cloned instance. If omitted, the zone of the source instance will be used.
|
||||
- You can optionally specify the `preferredZone` and `preferredSecondaryZone` (only in REGIONAL instances) to set the preferred zones for the cloned instance. These are useful for high availability (HA) configurations. If omitted, defaults will be used based on the source instance.
|
||||
|
||||
## See Also
|
||||
- [Cloud SQL Admin API documentation](https://cloud.google.com/sql/docs/mysql/admin-api)
|
||||
- [Toolbox Cloud SQL tools documentation](../cloudsql)
|
||||
- [Cloud SQL Clone API documentation](https://cloud.google.com/sql/docs/mysql/clone-instance)
|
||||
@@ -48,11 +48,11 @@ in the `data` parameter, like this:
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:------------|:---------|:-------------|:------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-insert-many`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection into which the documents will be inserted. |
|
||||
| canonical | bool | false | Determines if the data string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. Defaults to `false`. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:------------|:---------|:-------------|:---------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-insert-many`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection into which the documents will be inserted. |
|
||||
| canonical | bool | true | Determines if the data string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. |
|
||||
|
||||
@@ -43,11 +43,11 @@ An LLM would call this tool by providing the document as a JSON string in the
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:------------|:---------|:-------------|:------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-insert-one`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection into which the document will be inserted. |
|
||||
| canonical | bool | false | Determines if the data string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. Defaults to `false`. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:------------|:---------|:-------------|:---------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-insert-one`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection into which the document will be inserted. |
|
||||
| canonical | bool | true | Determines if the data string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. |
|
||||
|
||||
@@ -57,16 +57,16 @@ tools:
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:--------------|:---------|:-------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-update-many`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection in which to update documents. |
|
||||
| filterPayload | string | true | The MongoDB query filter document to select the documents for updating. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| filterParams | list | false | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
| updatePayload | string | true | The MongoDB update document, It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| updateParams | list | true | A list of parameter objects that define the variables used in the `updatePayload`. |
|
||||
| canonical | bool | false | Determines if the `filterPayload` and `updatePayload` strings are parsed using MongoDB's Canonical or Relaxed Extended JSON format. **Canonical** is stricter about type representation, while **Relaxed** is more lenient. Defaults to `false`. |
|
||||
| upsert | bool | false | If `true`, a new document is created if no document matches the `filterPayload`. Defaults to `false`. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:--------------|:---------|:-------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-update-many`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection in which to update documents. |
|
||||
| filterPayload | string | true | The MongoDB query filter document to select the documents for updating. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| filterParams | list | false | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
| updatePayload | string | true | The MongoDB update document, It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| updateParams | list | true | A list of parameter objects that define the variables used in the `updatePayload`. |
|
||||
| canonical | bool | true | Determines if the `filterPayload` and `updatePayload` strings are parsed using MongoDB's Canonical or Relaxed Extended JSON format. **Canonical** is stricter about type representation, while **Relaxed** is more lenient. |
|
||||
| upsert | bool | false | If `true`, a new document is created if no document matches the `filterPayload`. Defaults to `false`. |
|
||||
|
||||
@@ -57,16 +57,16 @@ tools:
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:--------------|:---------|:-------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-update-one`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection to update a document in. |
|
||||
| filterPayload | string | true | The MongoDB query filter document to select the document for updating. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| filterParams | list | false | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
| updatePayload | string | true | The MongoDB update document, which specifies the modifications. This often uses update operators like `$set`. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| updateParams | list | true | A list of parameter objects that define the variables used in the `updatePayload`. |
|
||||
| canonical | bool | false | Determines if the `updatePayload` string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. **Canonical** is stricter about type representation (e.g., `{"$numberInt": "42"}`), while **Relaxed** is more lenient (e.g., `42`). Defaults to `false`. |
|
||||
| upsert | bool | false | If `true`, a new document is created if no document matches the `filterPayload`. Defaults to `false`. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:--------------|:---------|:-------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-update-one`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection to update a document in. |
|
||||
| filterPayload | string | true | The MongoDB query filter document to select the document for updating. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| filterParams | list | false | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
| updatePayload | string | true | The MongoDB update document, which specifies the modifications. This often uses update operators like `$set`. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| updateParams | list | true | A list of parameter objects that define the variables used in the `updatePayload`. |
|
||||
| canonical | bool | true | Determines if the `updatePayload` string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. **Canonical** is stricter about type representation (e.g., `{"$numberInt": "42"}`), while **Relaxed** is more lenient (e.g., `42`). |
|
||||
| upsert | bool | false | If `true`, a new document is created if no document matches the `filterPayload`. Defaults to `false`. |
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
---
|
||||
title: "postgres-get-column-cardinality"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "postgres-get-column-cardinality" tool estimates the number of unique values in one or all columns of a Postgres database table.
|
||||
aliases:
|
||||
- /resources/tools/postgres-get-column-cardinality
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `postgres-get-column-cardinality` tool estimates the number of unique values
|
||||
(cardinality) for one or all columns in a specific PostgreSQL table by using the
|
||||
database's internal statistics. It's compatible with any of the following sources:
|
||||
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
`postgres-get-column-cardinality` returns detailed information as JSON about column
|
||||
cardinality values, ordered by estimated cardinality in descending order. The tool takes
|
||||
the following input parameters:
|
||||
|
||||
- `schema_name` (required): The schema name in which the table is present.
|
||||
- `table_name` (required): The table name in which the column is present.
|
||||
- `column_name` (optional): The column name for which the cardinality is to be found.
|
||||
If not provided, cardinality for all columns will be returned. Default: `""`.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
get_column_cardinality:
|
||||
kind: postgres-get-column-cardinality
|
||||
source: postgres-source
|
||||
description: Estimates the number of unique values (cardinality) quickly for one or all columns in a specific PostgreSQL table by using the database's internal statistics, returning the results in descending order of estimated cardinality. Please run ANALYZE on the table before using this tool to get accurate results. The tool returns the column_name and the estimated_cardinality. If the column_name is not provided, the tool returns all columns along with their estimated cardinality.
|
||||
```
|
||||
|
||||
The response is a json array with the following elements:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"column_name": "name of the column",
|
||||
"estimated_cardinality": "estimated number of unique values in the column"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
For accurate results, it's recommended to run `ANALYZE` on the table before using this
|
||||
tool. The `ANALYZE` command updates the database statistics that this tool relies on
|
||||
to estimate cardinality.
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|------------------------------------------------------|
|
||||
| kind | string | true | Must be "postgres-get-column-cardinality". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -1,66 +0,0 @@
|
||||
---
|
||||
title: "postgres-list-publication-tables"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "postgres-list-publication-tables" tool lists publication tables in a Postgres database.
|
||||
aliases:
|
||||
- /resources/tools/postgres-list-publication-tables
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `postgres-list-publication-tables` tool lists all publication tables in the database. It's compatible with any of the following sources:
|
||||
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
`postgres-list-publication-tables` lists detailed information as JSON for publication tables. A publication table in PostgreSQL is a
|
||||
table that is explicitly included as a source for replication within a publication (a set of changes generated from a table or group
|
||||
of tables) as part of the logical replication feature. The tool takes the following input parameters:
|
||||
|
||||
- `table_names` (optional): Filters by a comma-separated list of table names. Default: `""`
|
||||
- `publication_names` (optional): Filters by a comma-separated list of publication names. Default: `""`
|
||||
- `schema_names` (optional): Filters by a comma-separated list of schema names. Default: `""`
|
||||
- `limit` (optional): The maximum number of rows to return. Default: `50`
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_indexes:
|
||||
kind: postgres-list-publication-tables
|
||||
source: postgres-source
|
||||
description: |
|
||||
Lists all tables that are explicitly part of a publication in the database.
|
||||
Tables that are part of a publication via 'FOR ALL TABLES' are not included,
|
||||
unless they are also explicitly added to the publication.
|
||||
Returns the publication name, schema name, and table name, along with
|
||||
definition details indicating if it publishes all tables, whether it
|
||||
replicates inserts, updates, deletes, or truncates, and the publication
|
||||
owner.
|
||||
```
|
||||
|
||||
The response is a JSON array with the following elements:
|
||||
```json
|
||||
{
|
||||
"publication_name": "Name of the publication",
|
||||
"schema_name": "Name of the schema the table belongs to",
|
||||
"table_name": "Name of the table",
|
||||
"publishes_all_tables": "boolean indicating if the publication was created with FOR ALL TABLES",
|
||||
"publishes_inserts": "boolean indicating if INSERT operations are replicated",
|
||||
"publishes_updates": "boolean indicating if UPDATE operations are replicated",
|
||||
"publishes_deletes": "boolean indicating if DELETE operations are replicated",
|
||||
"publishes_truncates": "boolean indicating if TRUNCATE operations are replicated",
|
||||
"publication_owner": "Username of the database role that owns the publication"
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|------------------------------------------------------|
|
||||
| kind | string | true | Must be "postgres-list-publication-tables". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | false | Description of the tool that is passed to the agent. |
|
||||
@@ -1,71 +0,0 @@
|
||||
---
|
||||
title: "postgres-list-query-stats"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "postgres-list-query-stats" tool lists query statistics from a Postgres database.
|
||||
aliases:
|
||||
- /resources/tools/postgres-list-query-stats
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `postgres-list-query-stats` tool retrieves query statistics from the
|
||||
`pg_stat_statements` extension in a PostgreSQL database. It provides detailed
|
||||
performance metrics for executed queries. It's compatible with any of the following
|
||||
sources:
|
||||
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
`postgres-list-query-stats` lists detailed query statistics as JSON, ordered by
|
||||
total execution time in descending order. The tool takes the following input parameters:
|
||||
|
||||
- `database_name` (optional): The database name to filter query stats for. The input is
|
||||
used within a LIKE clause. Default: `""` (all databases).
|
||||
- `limit` (optional): The maximum number of results to return. Default: `50`.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_query_stats:
|
||||
kind: postgres-list-query-stats
|
||||
source: postgres-source
|
||||
description: List query statistics from pg_stat_statements, showing performance metrics for queries including execution counts, timing information, and resource usage. Results are ordered by total execution time descending.
|
||||
```
|
||||
|
||||
The response is a json array with the following elements:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"datname": "database name",
|
||||
"query": "the SQL query text",
|
||||
"calls": "number of times the query was executed",
|
||||
"total_exec_time": "total execution time in milliseconds",
|
||||
"min_exec_time": "minimum execution time in milliseconds",
|
||||
"max_exec_time": "maximum execution time in milliseconds",
|
||||
"mean_exec_time": "mean execution time in milliseconds",
|
||||
"rows": "total number of rows retrieved or affected",
|
||||
"shared_blks_hit": "number of shared block cache hits",
|
||||
"shared_blks_read": "number of shared block disk reads"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
This tool requires the `pg_stat_statements` extension to be installed and enabled
|
||||
on the PostgreSQL database. The `pg_stat_statements` extension tracks execution
|
||||
statistics for all SQL statements executed by the server, which is useful for
|
||||
identifying slow queries and understanding query performance patterns.
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|------------------------------------------------------|
|
||||
| kind | string | true | Must be "postgres-list-query-stats". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -1,56 +0,0 @@
|
||||
---
|
||||
title: "postgres-list-tablespaces"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "postgres-list-tablespaces" tool lists tablespaces in a Postgres database.
|
||||
aliases:
|
||||
- /resources/tools/postgres-list-tablespaces
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `postgres-list-tablespaces` tool lists available tablespaces in the database. It's compatible with any of the following sources:
|
||||
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
`postgres-list-tablespaces` lists detailed information as JSON for tablespaces. The tool takes the following input parameters:
|
||||
|
||||
- `tablespace_name` (optional): A text to filter results by tablespace name. Default: `""`
|
||||
- `limit` (optional): The maximum number of tablespaces to return. Default: `50`
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_tablespaces:
|
||||
kind: postgres-list-tablespaces
|
||||
source: postgres-source
|
||||
description: |
|
||||
Lists all tablespaces in the database. Returns the tablespace name,
|
||||
owner name, size in bytes(if the current user has CREATE privileges on
|
||||
the tablespace, otherwise NULL), internal object ID, the access control
|
||||
list regarding permissions, and any specific tablespace options.
|
||||
```
|
||||
The response is a json array with the following elements:
|
||||
|
||||
```json
|
||||
{
|
||||
"tablespace_name": "name of the tablespace",
|
||||
"owner_username": "owner of the tablespace",
|
||||
"size_in_bytes": "size in bytes if the current user has CREATE privileges on the tablespace, otherwise NULL",
|
||||
"oid": "Object ID of the tablespace",
|
||||
"spcacl": "Access privileges",
|
||||
"spcoptions": "Tablespace-level options (e.g., seq_page_cost, random_page_cost)"
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:-------------:|------------------------------------------------------|
|
||||
| kind | string | true | Must be "postgres-list-tablespaces". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | false | Description of the tool that is passed to the agent. |
|
||||
@@ -9,5 +9,3 @@ description: >
|
||||
- [serverless-spark-get-batch](./serverless-spark-get-batch.md)
|
||||
- [serverless-spark-list-batches](./serverless-spark-list-batches.md)
|
||||
- [serverless-spark-cancel-batch](./serverless-spark-cancel-batch.md)
|
||||
- [serverless-spark-create-pyspark-batch](./serverless-spark-create-pyspark-batch.md)
|
||||
- [serverless-spark-create-spark-batch](./serverless-spark-create-spark-batch.md)
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
---
|
||||
title: "serverless-spark-create-pyspark-batch"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
A "serverless-spark-create-pyspark-batch" tool submits a Spark batch to run asynchronously.
|
||||
aliases:
|
||||
- /resources/tools/serverless-spark-create-pyspark-batch
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `serverless-spark-create-pyspark-batch` tool submits a Spark batch to a Google
|
||||
Cloud Serverless for Apache Spark source. The workload executes asynchronously
|
||||
and takes around a minute to begin executing; status can be polled using the
|
||||
[get batch](serverless-spark-get-batch.md) tool.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [serverless-spark](../../sources/serverless-spark.md)
|
||||
|
||||
`serverless-spark-create-pyspark-batch` accepts the following parameters:
|
||||
|
||||
- **`mainFile`**: The path to the main Python file, as a gs://... URI.
|
||||
- **`args`** Optional. A list of arguments passed to the main file.
|
||||
- **`version`** Optional. The Serverless [runtime
|
||||
version](https://docs.cloud.google.com/dataproc-serverless/docs/concepts/versions/dataproc-serverless-versions)
|
||||
to execute with.
|
||||
|
||||
## Custom Configuration
|
||||
|
||||
This tool supports custom
|
||||
[`runtimeConfig`](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/RuntimeConfig)
|
||||
and
|
||||
[`environmentConfig`](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/EnvironmentConfig)
|
||||
settings, which can be specified in a `tools.yaml` file. These configurations
|
||||
are parsed as YAML and passed to the Dataproc API.
|
||||
|
||||
**Note:** If your project requires custom runtime or environment configuration,
|
||||
you must write a custom `tools.yaml`, you cannot use the `serverless-spark`
|
||||
prebuilt config.
|
||||
|
||||
### Example `tools.yaml`
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
- name: "serverless-spark-create-pyspark-batch"
|
||||
kind: "serverless-spark-create-pyspark-batch"
|
||||
source: "my-serverless-spark-source"
|
||||
runtimeConfig:
|
||||
properties:
|
||||
spark.driver.memory: "1024m"
|
||||
environmentConfig:
|
||||
executionConfig:
|
||||
networkUri: "my-network"
|
||||
```
|
||||
|
||||
## Response Format
|
||||
|
||||
The response is an [operation](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/projects.locations.operations#resource:-operation) metadata JSON
|
||||
object corresponding to [batch operation metadata](https://pkg.go.dev/cloud.google.com/go/dataproc/v2/apiv1/dataprocpb#BatchOperationMetadata)
|
||||
Example:
|
||||
|
||||
```json
|
||||
{
|
||||
"batch": "projects/myproject/locations/us-central1/batches/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"batchUuid": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"createTime": "2025-11-19T16:36:47.607119Z",
|
||||
"description": "Batch",
|
||||
"labels": {
|
||||
"goog-dataproc-batch-uuid": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"goog-dataproc-location": "us-central1"
|
||||
},
|
||||
"operationType": "BATCH",
|
||||
"warnings": [
|
||||
"No runtime version specified. Using the default runtime version."
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ----------------- | :------: | :----------: | -------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "serverless-spark-create-pyspark-batch". |
|
||||
| source | string | true | Name of the source the tool should use. |
|
||||
| description | string | false | Description of the tool that is passed to the LLM. |
|
||||
| runtimeConfig | map | false | [Runtime config](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/RuntimeConfig) for all batches created with this tool. |
|
||||
| environmentConfig | map | false | [Environment config](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/EnvironmentConfig) for all batches created with this tool. |
|
||||
| authRequired | string[] | false | List of auth services required to invoke this tool. |
|
||||
@@ -1,97 +0,0 @@
|
||||
---
|
||||
title: "serverless-spark-create-spark-batch"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
A "serverless-spark-create-spark-batch" tool submits a Spark batch to run asynchronously.
|
||||
aliases:
|
||||
- /resources/tools/serverless-spark-create-spark-batch
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `serverless-spark-create-spark-batch` tool submits a Java Spark batch to a
|
||||
Google Cloud Serverless for Apache Spark source. The workload executes
|
||||
asynchronously and takes around a minute to begin executing; status can be
|
||||
polled using the [get batch](serverless-spark-get-batch.md) tool.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [serverless-spark](../../sources/serverless-spark.md)
|
||||
|
||||
`serverless-spark-create-spark-batch` accepts the following parameters:
|
||||
|
||||
- **`mainJarFile`**: Optional. The gs:// URI of the jar file that contains the
|
||||
main class. Exactly one of mainJarFile or mainClass must be specified.
|
||||
- **`mainClass`**: Optional. The name of the driver's main class. Exactly one of
|
||||
mainJarFile or mainClass must be specified.
|
||||
- **`jarFiles`**: Optional. A list of gs:// URIs of jar files to add to the CLASSPATHs of
|
||||
the Spark driver and tasks.
|
||||
- **`args`** Optional. A list of arguments passed to the driver.
|
||||
- **`version`** Optional. The Serverless [runtime
|
||||
version](https://docs.cloud.google.com/dataproc-serverless/docs/concepts/versions/dataproc-serverless-versions)
|
||||
to execute with.
|
||||
|
||||
## Custom Configuration
|
||||
|
||||
This tool supports custom
|
||||
[`runtimeConfig`](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/RuntimeConfig)
|
||||
and
|
||||
[`environmentConfig`](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/EnvironmentConfig)
|
||||
settings, which can be specified in a `tools.yaml` file. These configurations
|
||||
are parsed as YAML and passed to the Dataproc API.
|
||||
|
||||
**Note:** If your project requires custom runtime or environment configuration,
|
||||
you must write a custom `tools.yaml`, you cannot use the `serverless-spark`
|
||||
prebuilt config.
|
||||
|
||||
### Example `tools.yaml`
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
- name: "serverless-spark-create-spark-batch"
|
||||
kind: "serverless-spark-create-spark-batch"
|
||||
source: "my-serverless-spark-source"
|
||||
runtimeConfig:
|
||||
properties:
|
||||
spark.driver.memory: "1024m"
|
||||
environmentConfig:
|
||||
executionConfig:
|
||||
networkUri: "my-network"
|
||||
```
|
||||
|
||||
## Response Format
|
||||
|
||||
The response is an
|
||||
[operation](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/projects.locations.operations#resource:-operation)
|
||||
metadata JSON object corresponding to [batch operation
|
||||
metadata](https://pkg.go.dev/cloud.google.com/go/dataproc/v2/apiv1/dataprocpb#BatchOperationMetadata)
|
||||
Example:
|
||||
|
||||
```json
|
||||
{
|
||||
"batch": "projects/myproject/locations/us-central1/batches/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"batchUuid": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"createTime": "2025-11-19T16:36:47.607119Z",
|
||||
"description": "Batch",
|
||||
"labels": {
|
||||
"goog-dataproc-batch-uuid": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"goog-dataproc-location": "us-central1"
|
||||
},
|
||||
"operationType": "BATCH",
|
||||
"warnings": [
|
||||
"No runtime version specified. Using the default runtime version."
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ----------------- | :------: | :----------: | -------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "serverless-spark-create-spark-batch". |
|
||||
| source | string | true | Name of the source the tool should use. |
|
||||
| description | string | false | Description of the tool that is passed to the LLM. |
|
||||
| runtimeConfig | map | false | [Runtime config](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/RuntimeConfig) for all batches created with this tool. |
|
||||
| environmentConfig | map | false | [Environment config](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/EnvironmentConfig) for all batches created with this tool. |
|
||||
| authRequired | string[] | false | List of auth services required to invoke this tool. |
|
||||
@@ -1,276 +0,0 @@
|
||||
---
|
||||
title: "spanner-list-graphs"
|
||||
type: docs
|
||||
weight: 3
|
||||
description: >
|
||||
A "spanner-list-graphs" tool retrieves schema information about graphs in a
|
||||
Google Cloud Spanner database.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `spanner-list-graphs` tool retrieves comprehensive schema information about
|
||||
graphs in a Cloud Spanner database. It returns detailed metadata including
|
||||
node tables, edge tables, labels and property declarations. It's compatible with:
|
||||
|
||||
- [spanner](../../sources/spanner.md)
|
||||
|
||||
This tool is read-only and executes pre-defined SQL queries against the
|
||||
`INFORMATION_SCHEMA` tables to gather metadata.
|
||||
{{< notice warning >}}
|
||||
The tool only works for the GoogleSQL
|
||||
source dialect, as Spanner Graph isn't available in the PostgreSQL dialect.
|
||||
{{< /notice >}}
|
||||
|
||||
## Features
|
||||
|
||||
- **Comprehensive Schema Information**: Returns node tables, edge tables, labels
|
||||
and property declarations
|
||||
- **Flexible Filtering**: Can list all graphs or filter by specific graph names
|
||||
- **Output Format Options**: Choose between simple (graph names only) or detailed
|
||||
(full schema information) output
|
||||
|
||||
## Example
|
||||
|
||||
### Basic Usage - List All Graphs
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-spanner-db:
|
||||
kind: spanner
|
||||
project: ${SPANNER_PROJECT}
|
||||
instance: ${SPANNER_INSTANCE}
|
||||
database: ${SPANNER_DATABASE}
|
||||
dialect: googlesql # wont work for postgresql
|
||||
|
||||
tools:
|
||||
list_all_graphs:
|
||||
kind: spanner-list-graphs
|
||||
source: my-spanner-db
|
||||
description: Lists all graphs with their complete schema information
|
||||
```
|
||||
|
||||
### List Specific Graphs
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_specific_graphs:
|
||||
kind: spanner-list-graphs
|
||||
source: my-spanner-db
|
||||
description: |
|
||||
Lists schema information for specific graphs.
|
||||
Example usage:
|
||||
{
|
||||
"graph_names": "FinGraph,SocialGraph",
|
||||
"output_format": "detailed"
|
||||
}
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
The tool accepts two optional parameters:
|
||||
|
||||
| **parameter** | **type** | **default** | **description** |
|
||||
|---------------|:--------:|:-----------:|------------------------------------------------------------------------------------------------------|
|
||||
| graph_names | string | "" | Comma-separated list of graph names to filter. If empty, lists all graphs in user-accessible schemas |
|
||||
| output_format | string | "detailed" | Output format: "simple" returns only graph names, "detailed" returns full schema information |
|
||||
|
||||
## Output Format
|
||||
|
||||
### Simple Format
|
||||
|
||||
When `output_format` is set to "simple", the tool returns a minimal JSON structure:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"object_details": {
|
||||
"name": "FinGraph"
|
||||
},
|
||||
"object_name": "FinGraph",
|
||||
"schema_name": ""
|
||||
},
|
||||
{
|
||||
"object_details": {
|
||||
"name": "SocialGraph"
|
||||
},
|
||||
"object_name": "SocialGraph",
|
||||
"schema_name": ""
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
### Detailed Format
|
||||
|
||||
When `output_format` is set to "detailed" (default), the tool returns
|
||||
comprehensive schema information:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"object_details": {
|
||||
"catalog": "",
|
||||
"edge_tables": [
|
||||
{
|
||||
"baseCatalogName": "",
|
||||
"baseSchemaName": "",
|
||||
"baseTableName": "Knows",
|
||||
"destinationNodeTable": {
|
||||
"edgeTableColumns": [
|
||||
"DstId"
|
||||
],
|
||||
"nodeTableColumns": [
|
||||
"Id"
|
||||
],
|
||||
"nodeTableName": "Person"
|
||||
},
|
||||
"keyColumns": [
|
||||
"SrcId",
|
||||
"DstId"
|
||||
],
|
||||
"kind": "EDGE",
|
||||
"labelNames": [
|
||||
"Knows"
|
||||
],
|
||||
"name": "Knows",
|
||||
"propertyDefinitions": [
|
||||
{
|
||||
"propertyDeclarationName": "DstId",
|
||||
"valueExpressionSql": "DstId"
|
||||
},
|
||||
{
|
||||
"propertyDeclarationName": "SrcId",
|
||||
"valueExpressionSql": "SrcId"
|
||||
}
|
||||
],
|
||||
"sourceNodeTable": {
|
||||
"edgeTableColumns": [
|
||||
"SrcId"
|
||||
],
|
||||
"nodeTableColumns": [
|
||||
"Id"
|
||||
],
|
||||
"nodeTableName": "Person"
|
||||
}
|
||||
}
|
||||
],
|
||||
"labels": [
|
||||
{
|
||||
"name": "Knows",
|
||||
"propertyDeclarationNames": [
|
||||
"DstId",
|
||||
"SrcId"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Person",
|
||||
"propertyDeclarationNames": [
|
||||
"Id",
|
||||
"Name"
|
||||
]
|
||||
}
|
||||
],
|
||||
"node_tables": [
|
||||
{
|
||||
"baseCatalogName": "",
|
||||
"baseSchemaName": "",
|
||||
"baseTableName": "Person",
|
||||
"keyColumns": [
|
||||
"Id"
|
||||
],
|
||||
"kind": "NODE",
|
||||
"labelNames": [
|
||||
"Person"
|
||||
],
|
||||
"name": "Person",
|
||||
"propertyDefinitions": [
|
||||
{
|
||||
"propertyDeclarationName": "Id",
|
||||
"valueExpressionSql": "Id"
|
||||
},
|
||||
{
|
||||
"propertyDeclarationName": "Name",
|
||||
"valueExpressionSql": "Name"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"object_name": "SocialGraph",
|
||||
"property_declarations": [
|
||||
{
|
||||
"name": "DstId",
|
||||
"type": "INT64"
|
||||
},
|
||||
{
|
||||
"name": "Id",
|
||||
"type": "INT64"
|
||||
},
|
||||
{
|
||||
"name": "Name",
|
||||
"type": "STRING"
|
||||
},
|
||||
{
|
||||
"name": "SrcId",
|
||||
"type": "INT64"
|
||||
}
|
||||
],
|
||||
"schema_name": ""
|
||||
},
|
||||
"object_name": "SocialGraph",
|
||||
"schema_name": ""
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
## Use Cases
|
||||
|
||||
1. **Database Documentation**: Generate comprehensive documentation of your
|
||||
database schema
|
||||
2. **Schema Validation**: Verify that expected graphs, node and edge exist
|
||||
3. **Migration Planning**: Understand the current schema before making changes
|
||||
4. **Development Tools**: Build tools that need to understand database structure
|
||||
5. **Audit and Compliance**: Track schema changes and ensure compliance with
|
||||
data governance policies
|
||||
|
||||
## Example with Agent Integration
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
spanner-db:
|
||||
kind: spanner
|
||||
project: my-project
|
||||
instance: my-instance
|
||||
database: my-database
|
||||
dialect: googlesql
|
||||
|
||||
tools:
|
||||
schema_inspector:
|
||||
kind: spanner-list-graphs
|
||||
source: spanner-db
|
||||
description: |
|
||||
Use this tool to inspect database schema information.
|
||||
You can:
|
||||
- List all graphs by leaving graph_names empty
|
||||
- Get specific graph schemas by providing comma-separated graph names
|
||||
- Choose between simple (names only) or detailed (full schema) output
|
||||
|
||||
Examples:
|
||||
1. List all graphs with details: {"output_format": "detailed"}
|
||||
2. Get specific graphs: {"graph_names": "FinGraph,SocialGraph", "output_format": "detailed"}
|
||||
3. Just get graph names: {"output_format": "simple"}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------|:--------:|:------------:|-----------------------------------------------------------------|
|
||||
| kind | string | true | Must be "spanner-list-graphs" |
|
||||
| source | string | true | Name of the Spanner source to query (dialect must be GoogleSQL) |
|
||||
| description | string | false | Description of the tool that is passed to the LLM |
|
||||
| authRequired | string[] | false | List of auth services required to invoke this tool |
|
||||
|
||||
## Notes
|
||||
|
||||
- This tool is read-only and does not modify any data
|
||||
- The tool only works for the GoogleSQL source dialect
|
||||
- Large databases with many graphs may take longer to query
|
||||
@@ -771,7 +771,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.22.0\" # x-release-please-version\n",
|
||||
"version = \"0.21.0\" # x-release-please-version\n",
|
||||
"! curl -L -o /content/toolbox https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -123,7 +123,7 @@ In this section, we will download and install the Toolbox binary.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
export VERSION="0.22.0"
|
||||
export VERSION="0.21.0"
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -220,7 +220,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.22.0\" # x-release-please-version\n",
|
||||
"version = \"0.21.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -179,7 +179,7 @@ to use BigQuery, and then run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "mcp-toolbox-for-databases",
|
||||
"version": "0.22.0",
|
||||
"version": "0.21.0",
|
||||
"description": "MCP Toolbox for Databases is an open-source MCP server for more than 30 different datasources.",
|
||||
"contextFileName": "MCP-TOOLBOX-EXTENSION.md"
|
||||
}
|
||||
21
go.mod
21
go.mod
@@ -17,7 +17,7 @@ require (
|
||||
cloud.google.com/go/spanner v1.86.1
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.40.3
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.30.0
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.29.0
|
||||
github.com/apache/cassandra-gocql-driver/v2 v2.0.0
|
||||
github.com/cenkalti/backoff/v5 v5.0.3
|
||||
github.com/couchbase/gocb/v2 v2.11.1
|
||||
@@ -26,7 +26,6 @@ require (
|
||||
github.com/elastic/go-elasticsearch/v9 v9.2.0
|
||||
github.com/fsnotify/fsnotify v1.9.0
|
||||
github.com/go-chi/chi/v5 v5.2.3
|
||||
github.com/go-chi/cors v1.2.2
|
||||
github.com/go-chi/httplog/v2 v2.1.1
|
||||
github.com/go-chi/render v1.0.3
|
||||
github.com/go-goquery/goquery v1.0.1
|
||||
@@ -89,7 +88,7 @@ require (
|
||||
filippo.io/edwards25519 v1.1.0 // indirect
|
||||
github.com/GoogleCloudPlatform/grpc-gcp-go/grpcgcp v1.5.3 // indirect
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0 // indirect
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.54.0 // indirect
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0 // indirect
|
||||
github.com/PuerkitoBio/goquery v1.10.3 // indirect
|
||||
github.com/ajg/form v1.5.1 // indirect
|
||||
github.com/apache/arrow/go/v15 v15.0.2 // indirect
|
||||
@@ -160,7 +159,7 @@ require (
|
||||
go.opencensus.io v0.24.0 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
|
||||
go.opentelemetry.io/contrib/detectors/gcp v1.36.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.63.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.62.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/aws v1.37.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.37.0 // indirect
|
||||
@@ -171,15 +170,15 @@ require (
|
||||
go.uber.org/multierr v1.11.0 // indirect
|
||||
go.uber.org/zap v1.27.0 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||
golang.org/x/crypto v0.45.0 // indirect
|
||||
golang.org/x/mod v0.29.0 // indirect
|
||||
golang.org/x/net v0.47.0 // indirect
|
||||
golang.org/x/crypto v0.43.0 // indirect
|
||||
golang.org/x/mod v0.28.0 // indirect
|
||||
golang.org/x/net v0.46.0 // indirect
|
||||
golang.org/x/sync v0.18.0 // indirect
|
||||
golang.org/x/sys v0.38.0 // indirect
|
||||
golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8 // indirect
|
||||
golang.org/x/text v0.31.0 // indirect
|
||||
golang.org/x/sys v0.37.0 // indirect
|
||||
golang.org/x/telemetry v0.0.0-20250908211612-aef8a434d053 // indirect
|
||||
golang.org/x/text v0.30.0 // indirect
|
||||
golang.org/x/time v0.14.0 // indirect
|
||||
golang.org/x/tools v0.38.0 // indirect
|
||||
golang.org/x/tools v0.37.0 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20251014184007-4626949a642f // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251103181224-f26f9409b101 // indirect
|
||||
|
||||
46
go.sum
46
go.sum
@@ -669,12 +669,12 @@ github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0/go.mod h1:Cz6ft6Dkn3Et6l2v2a9/RpN7epQ1GtDlO6lj8bEcOvw=
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0 h1:owcC2UnmsZycprQ5RfRgjydWhuoxg71LUfyiQdijZuM=
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0/go.mod h1:ZPpqegjbE99EPKsu3iUWV22A04wzGPcAY/ziSIQEEgs=
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.30.0 h1:5eCqTd9rTwMlE62z0xFdzPJ+3pji75hJrwq1jrCjo5w=
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.30.0/go.mod h1:4BcvJy7WxY8X2eX49z2VO1ByhO+CcQK8lKPCH/QlZvo=
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.54.0 h1:xfK3bbi6F2RDtaZFtUdKO3osOBIhNb+xTs8lFW6yx9o=
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.54.0/go.mod h1:vB2GH9GAYYJTO3mEn8oYwzEdhlayZIdQz6zdzgUIRvA=
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.54.0 h1:s0WlVbf9qpvkh1c/uDAPElam0WrL7fHRIidgZJ7UqZI=
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.54.0/go.mod h1:Mf6O40IAyB9zR/1J8nGDDPirZQQPbYJni8Yisy7NTMc=
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.29.0 h1:YVtMlmfRUTaWs3+1acwMBp7rBUo6zrxl6Kn13/R9YW4=
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.29.0/go.mod h1:rKOFVIPbNs2wZeh7ZeQ0D9p/XLgbNiTr5m7x6KuAshk=
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.53.0 h1:4LP6hvB4I5ouTbGgWtixJhgED6xdf67twf9PoY96Tbg=
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.53.0/go.mod h1:jUZ5LYlw40WMd07qxcQJD5M40aUxrfwqQX1g7zxYnrQ=
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0 h1:Ron4zCA/yk6U7WOBXhTJcDpsUBG9npumK6xw2auFltQ=
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0/go.mod h1:cSgYe11MCNYunTnRXrKiR/tHc0eoKjICUuWpNZoVCOo=
|
||||
github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c/go.mod h1:X0CRv0ky0k6m906ixxpzmDRLvX58TFUKS2eePweuyxk=
|
||||
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
|
||||
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
|
||||
@@ -858,8 +858,6 @@ github.com/gabriel-vasile/mimetype v1.4.10/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9t
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/go-chi/chi/v5 v5.2.3 h1:WQIt9uxdsAbgIYgid+BpYc+liqQZGMHRaUwp0JUcvdE=
|
||||
github.com/go-chi/chi/v5 v5.2.3/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops=
|
||||
github.com/go-chi/cors v1.2.2 h1:Jmey33TE+b+rB7fT8MUy1u0I4L+NARQlK6LhzKPSyQE=
|
||||
github.com/go-chi/cors v1.2.2/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58=
|
||||
github.com/go-chi/httplog/v2 v2.1.1 h1:ojojiu4PIaoeJ/qAO4GWUxJqvYUTobeo7zmuHQJAxRk=
|
||||
github.com/go-chi/httplog/v2 v2.1.1/go.mod h1:/XXdxicJsp4BA5fapgIC3VuTD+z0Z/VzukoB3VDc1YE=
|
||||
github.com/go-chi/render v1.0.3 h1:AsXqd2a1/INaIfUSKq3G5uA8weYx20FOsM7uSoCyyt4=
|
||||
@@ -1322,8 +1320,8 @@ go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ
|
||||
go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
|
||||
go.opentelemetry.io/contrib/detectors/gcp v1.36.0 h1:F7q2tNlCaHY9nMKHR6XH9/qkp8FktLnIcy6jJNyOCQw=
|
||||
go.opentelemetry.io/contrib/detectors/gcp v1.36.0/go.mod h1:IbBN8uAIIx734PTonTPxAxnjc2pQTxWNkwfstZ+6H2k=
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.63.0 h1:YH4g8lQroajqUwWbq/tr2QX1JFmEXaDLgG+ew9bLMWo=
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.63.0/go.mod h1:fvPi2qXDqFs8M4B4fmJhE92TyQs9Ydjlg3RvfUp+NbQ=
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.62.0 h1:rbRJ8BBoVMsQShESYZ0FkvcITu8X8QNwJogcLUmDNNw=
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.62.0/go.mod h1:ru6KHrNtNHxM4nD/vd6QrLVWgKhxPYgblq4VAtNawTQ=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0 h1:RbKq8BG0FI8OiXhBfcRtqqHcZcka+gU3cskNuf05R18=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0/go.mod h1:h06DGIukJOevXaj/xrNjhi/2098RZzcLTbc0jDAUbsg=
|
||||
go.opentelemetry.io/contrib/propagators/autoprop v0.62.0 h1:1+EHlhAe/tukctfePZRrDruB9vn7MdwyC+rf36nUSPM=
|
||||
@@ -1384,8 +1382,8 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
|
||||
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
|
||||
golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=
|
||||
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
|
||||
golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
@@ -1448,8 +1446,8 @@ golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
|
||||
golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w=
|
||||
golang.org/x/mod v0.28.0 h1:gQBtGhjxykdjY9YhZpSlZIsbnaE2+PgjfLWUQTnoZ1U=
|
||||
golang.org/x/mod v0.28.0/go.mod h1:yfB/L0NOf/kmEbXjzCPOx1iK1fRutOydrCMsqRhEBxI=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
@@ -1513,8 +1511,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
|
||||
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
|
||||
golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=
|
||||
golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
@@ -1653,11 +1651,11 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
|
||||
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||
golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8 h1:LvzTn0GQhWuvKH/kVRS3R3bVAsdQWI7hvfLHGgh9+lU=
|
||||
golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8/go.mod h1:Pi4ztBfryZoJEkyFTI5/Ocsu2jXyDr6iSdgJiYE/uwE=
|
||||
golang.org/x/telemetry v0.0.0-20250908211612-aef8a434d053 h1:dHQOQddU4YHS5gY33/6klKjq7Gp3WwMyOXGNp5nzRj8=
|
||||
golang.org/x/telemetry v0.0.0-20250908211612-aef8a434d053/go.mod h1:+nZKN+XVh4LCiA9DV3ywrzN4gumyCnKjau3NGb9SGoE=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
|
||||
@@ -1691,8 +1689,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
||||
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
||||
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
|
||||
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
@@ -1767,8 +1765,8 @@ golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s=
|
||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ=
|
||||
golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs=
|
||||
golang.org/x/tools v0.37.0 h1:DVSRzp7FwePZW356yEAChSdNcQo6Nsp+fex1SUW09lE=
|
||||
golang.org/x/tools v0.37.0/go.mod h1:MBN5QPQtLMHVdvsbtarmTNukZDdgwdwlO5qGacAzF0w=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
|
||||
@@ -175,7 +175,7 @@ tools:
|
||||
list_schemas:
|
||||
kind: postgres-list-schemas
|
||||
source: alloydb-pg-source
|
||||
|
||||
|
||||
list_indexes:
|
||||
kind: postgres-list-indexes
|
||||
source: alloydb-pg-source
|
||||
@@ -192,22 +192,6 @@ tools:
|
||||
kind: postgres-list-triggers
|
||||
source: alloydb-pg-source
|
||||
|
||||
list_query_stats:
|
||||
kind: postgres-list-query-stats
|
||||
source: alloydb-pg-source
|
||||
|
||||
get_column_cardinality:
|
||||
kind: postgres-get-column-cardinality
|
||||
source: alloydb-pg-source
|
||||
|
||||
list_publication_tables:
|
||||
kind: postgres-list-publication-tables
|
||||
source: alloydb-pg-source
|
||||
|
||||
list_tablespaces:
|
||||
kind: postgres-list-tablespaces
|
||||
source: alloydb-pg-source
|
||||
|
||||
toolsets:
|
||||
alloydb_postgres_database_tools:
|
||||
- execute_sql
|
||||
@@ -230,7 +214,4 @@ toolsets:
|
||||
- long_running_transactions
|
||||
- list_locks
|
||||
- replication_stats
|
||||
- list_query_stats
|
||||
- get_column_cardinality
|
||||
- list_publication_tables
|
||||
- list_tablespaces
|
||||
|
||||
|
||||
@@ -39,10 +39,6 @@ tools:
|
||||
wait_for_operation:
|
||||
kind: cloud-sql-wait-for-operation
|
||||
source: cloud-sql-admin-source
|
||||
multiplier: 4
|
||||
clone_instance:
|
||||
kind: cloud-sql-clone-instance
|
||||
source: cloud-sql-admin-source
|
||||
|
||||
toolsets:
|
||||
cloud_sql_mssql_admin_tools:
|
||||
@@ -53,4 +49,3 @@ toolsets:
|
||||
- list_databases
|
||||
- create_user
|
||||
- wait_for_operation
|
||||
- clone_instance
|
||||
|
||||
@@ -39,10 +39,6 @@ tools:
|
||||
wait_for_operation:
|
||||
kind: cloud-sql-wait-for-operation
|
||||
source: cloud-sql-admin-source
|
||||
multiplier: 4
|
||||
clone_instance:
|
||||
kind: cloud-sql-clone-instance
|
||||
source: cloud-sql-admin-source
|
||||
|
||||
toolsets:
|
||||
cloud_sql_mysql_admin_tools:
|
||||
@@ -53,4 +49,3 @@ toolsets:
|
||||
- list_databases
|
||||
- create_user
|
||||
- wait_for_operation
|
||||
- clone_instance
|
||||
|
||||
@@ -39,10 +39,6 @@ tools:
|
||||
wait_for_operation:
|
||||
kind: cloud-sql-wait-for-operation
|
||||
source: cloud-sql-admin-source
|
||||
multiplier: 4
|
||||
clone_instance:
|
||||
kind: cloud-sql-clone-instance
|
||||
source: cloud-sql-admin-source
|
||||
postgres_upgrade_precheck:
|
||||
kind: postgres-upgrade-precheck
|
||||
source: cloud-sql-admin-source
|
||||
@@ -57,4 +53,3 @@ toolsets:
|
||||
- create_user
|
||||
- wait_for_operation
|
||||
- postgres_upgrade_precheck
|
||||
- clone_instance
|
||||
|
||||
@@ -177,7 +177,7 @@ tools:
|
||||
list_schemas:
|
||||
kind: postgres-list-schemas
|
||||
source: cloudsql-pg-source
|
||||
|
||||
|
||||
database_overview:
|
||||
kind: postgres-database-overview
|
||||
source: cloudsql-pg-source
|
||||
@@ -194,22 +194,6 @@ tools:
|
||||
kind: postgres-list-sequences
|
||||
source: cloudsql-pg-source
|
||||
|
||||
list_query_stats:
|
||||
kind: postgres-list-query-stats
|
||||
source: cloudsql-pg-source
|
||||
|
||||
get_column_cardinality:
|
||||
kind: postgres-get-column-cardinality
|
||||
source: cloudsql-pg-source
|
||||
|
||||
list_publication_tables:
|
||||
kind: postgres-list-publication-tables
|
||||
source: cloudsql-pg-source
|
||||
|
||||
list_tablespaces:
|
||||
kind: postgres-list-tablespaces
|
||||
source: cloudsql-pg-source
|
||||
|
||||
toolsets:
|
||||
cloud_sql_postgres_database_tools:
|
||||
- execute_sql
|
||||
@@ -232,7 +216,3 @@ toolsets:
|
||||
- long_running_transactions
|
||||
- list_locks
|
||||
- replication_stats
|
||||
- list_query_stats
|
||||
- get_column_cardinality
|
||||
- list_publication_tables
|
||||
- list_tablespaces
|
||||
|
||||
@@ -15,8 +15,8 @@
|
||||
sources:
|
||||
mssql-source:
|
||||
kind: mssql
|
||||
host: ${MSSQL_HOST:localhost}
|
||||
port: ${MSSQL_PORT:1433}
|
||||
host: ${MSSQL_HOST}
|
||||
port: ${MSSQL_PORT}
|
||||
database: ${MSSQL_DATABASE}
|
||||
user: ${MSSQL_USER}
|
||||
password: ${MSSQL_PASSWORD}
|
||||
|
||||
@@ -176,7 +176,7 @@ tools:
|
||||
list_schemas:
|
||||
kind: postgres-list-schemas
|
||||
source: postgresql-source
|
||||
|
||||
|
||||
database_overview:
|
||||
kind: postgres-database-overview
|
||||
source: postgresql-source
|
||||
@@ -193,22 +193,6 @@ tools:
|
||||
kind: postgres-list-sequences
|
||||
source: postgresql-source
|
||||
|
||||
list_query_stats:
|
||||
kind: postgres-list-query-stats
|
||||
source: postgresql-source
|
||||
|
||||
get_column_cardinality:
|
||||
kind: postgres-get-column-cardinality
|
||||
source: postgresql-source
|
||||
|
||||
list_publication_tables:
|
||||
kind: postgres-list-publication-tables
|
||||
source: postgresql-source
|
||||
|
||||
list_tablespaces:
|
||||
kind: postgres-list-tablespaces
|
||||
source: postgresql-source
|
||||
|
||||
toolsets:
|
||||
postgres_database_tools:
|
||||
- execute_sql
|
||||
@@ -231,7 +215,3 @@ toolsets:
|
||||
- long_running_transactions
|
||||
- list_locks
|
||||
- replication_stats
|
||||
- list_query_stats
|
||||
- get_column_cardinality
|
||||
- list_publication_tables
|
||||
- list_tablespaces
|
||||
|
||||
@@ -28,17 +28,9 @@ tools:
|
||||
cancel_batch:
|
||||
kind: serverless-spark-cancel-batch
|
||||
source: serverless-spark-source
|
||||
create_pyspark_batch:
|
||||
kind: serverless-spark-create-pyspark-batch
|
||||
source: serverless-spark-source
|
||||
create_spark_batch:
|
||||
kind: serverless-spark-create-spark-batch
|
||||
source: serverless-spark-source
|
||||
|
||||
toolsets:
|
||||
serverless_spark_tools:
|
||||
- list_batches
|
||||
- get_batch
|
||||
- cancel_batch
|
||||
- create_pyspark_batch
|
||||
- create_spark_batch
|
||||
|
||||
@@ -35,16 +35,10 @@ tools:
|
||||
list_tables:
|
||||
kind: spanner-list-tables
|
||||
source: spanner-source
|
||||
description: "Lists detailed schema information (object type, columns, constraints, indexes) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas. The output can be 'simple' (table names only) or 'detailed' (full schema)."
|
||||
|
||||
list_graphs:
|
||||
kind: spanner-list-graphs
|
||||
source: spanner-source
|
||||
description: "Lists detailed graph schema information (node tables, edge tables, labels and property declarations) as JSON for user-created graphs. Filters by a comma-separated list of graph names. If names are omitted, lists all graphs. The output can be 'simple' (graph names only) or 'detailed' (full schema)."
|
||||
description: "Lists detailed schema information (object type, columns, constraints, indexes) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
|
||||
|
||||
toolsets:
|
||||
spanner-database-tools:
|
||||
- execute_sql
|
||||
- execute_sql_dql
|
||||
- list_tables
|
||||
- list_graphs
|
||||
|
||||
@@ -46,7 +46,7 @@ func (t PromptsetConfig) Initialize(serverVersion string, promptsMap map[string]
|
||||
var promptset Promptset
|
||||
promptset.Name = t.Name
|
||||
if !tools.IsValidName(promptset.Name) {
|
||||
return promptset, fmt.Errorf("invalid promptset name: %s", promptset.Name)
|
||||
return promptset, fmt.Errorf("invalid promptset name: %s", t)
|
||||
}
|
||||
promptset.Prompts = make([]*Prompt, 0, len(t.PromptNames))
|
||||
promptset.McpManifest = make([]McpManifest, 0, len(t.PromptNames))
|
||||
@@ -57,7 +57,7 @@ func (t PromptsetConfig) Initialize(serverVersion string, promptsMap map[string]
|
||||
for _, promptName := range t.PromptNames {
|
||||
prompt, ok := promptsMap[promptName]
|
||||
if !ok {
|
||||
return promptset, fmt.Errorf("prompt does not exist: %s", promptName)
|
||||
return promptset, fmt.Errorf("prompt does not exist: %s", t)
|
||||
}
|
||||
promptset.Prompts = append(promptset.Prompts, &prompt)
|
||||
promptset.Manifest.PromptsManifest[promptName] = prompt.Manifest()
|
||||
|
||||
@@ -172,7 +172,7 @@ func toolInvokeHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
accessToken := tools.AccessToken(r.Header.Get("Authorization"))
|
||||
|
||||
// Check if this specific tool requires the standard authorization header
|
||||
if tool.RequiresClientAuthorization(s.ResourceMgr) {
|
||||
if tool.RequiresClientAuthorization() {
|
||||
if accessToken == "" {
|
||||
err = fmt.Errorf("tool requires client authorization but access token is missing from the request header")
|
||||
s.logger.DebugContext(ctx, err.Error())
|
||||
@@ -239,7 +239,7 @@ func toolInvokeHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
s.logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
||||
|
||||
res, err := tool.Invoke(ctx, s.ResourceMgr, params, accessToken)
|
||||
res, err := tool.Invoke(ctx, params, accessToken)
|
||||
|
||||
// Determine what error to return to the users.
|
||||
if err != nil {
|
||||
@@ -255,7 +255,7 @@ func toolInvokeHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
if statusCode == http.StatusUnauthorized || statusCode == http.StatusForbidden {
|
||||
if tool.RequiresClientAuthorization(s.ResourceMgr) {
|
||||
if tool.RequiresClientAuthorization() {
|
||||
// Propagate the original 401/403 error.
|
||||
s.logger.DebugContext(ctx, fmt.Sprintf("error invoking tool. Client credentials lack authorization to the source: %v", err))
|
||||
_ = render.Render(w, r, newErrResponse(err, statusCode))
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user