mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-01-15 02:18:10 -05:00
Compare commits
30 Commits
docs-py-sd
...
config-pre
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8c786786d5 | ||
|
|
94e19b62b6 | ||
|
|
8e0fb03483 | ||
|
|
2817dd1e5d | ||
|
|
68a218407e | ||
|
|
d69792d843 | ||
|
|
647b04d3a7 | ||
|
|
030df9766f | ||
|
|
5dbf207162 | ||
|
|
9c3720e31d | ||
|
|
3cd3c39d66 | ||
|
|
0691a6f715 | ||
|
|
467b96a23b | ||
|
|
4abf0c39e7 | ||
|
|
dd7b9de623 | ||
|
|
41b518b955 | ||
|
|
6e8a9eb8ec | ||
|
|
ef8f3b02f2 | ||
|
|
351b007fe3 | ||
|
|
9d1feca108 | ||
|
|
17b41f6453 | ||
|
|
a7799757c9 | ||
|
|
d961e373e1 | ||
|
|
bcb40a720d | ||
|
|
4a4cf1e712 | ||
|
|
b706b5bc68 | ||
|
|
4d3332d37d | ||
|
|
1203b7370a | ||
|
|
4a26ce3c1b | ||
|
|
306b5becda |
@@ -825,7 +825,27 @@ steps:
|
||||
elasticsearch \
|
||||
elasticsearch
|
||||
|
||||
|
||||
- id: "snowflake"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
- "SNOWFLAKE_DATABASE=$_SNOWFLAKE_DATABASE"
|
||||
- "SNOWFLAKE_SCHEMA=$_SNOWFLAKE_SCHEMA"
|
||||
secretEnv: ["CLIENT_ID", "SNOWFLAKE_USER", "SNOWFLAKE_PASS", "SNOWFLAKE_ACCOUNT"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Snowflake" \
|
||||
snowflake \
|
||||
snowflake
|
||||
|
||||
- id: "cassandra"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
@@ -1038,6 +1058,12 @@ availableSecrets:
|
||||
env: ELASTICSEARCH_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/elastic_search_pass/versions/latest
|
||||
env: ELASTICSEARCH_PASS
|
||||
- versionName: projects/$PROJECT_ID/secrets/snowflake_account/versions/latest
|
||||
env: SNOWFLAKE_ACCOUNT
|
||||
- versionName: projects/$PROJECT_ID/secrets/snowflake_user/versions/latest
|
||||
env: SNOWFLAKE_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/snowflake_pass/versions/latest
|
||||
env: SNOWFLAKE_PASS
|
||||
- versionName: projects/$PROJECT_ID/secrets/cassandra_user/versions/latest
|
||||
env: CASSANDRA_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/cassandra_pass/versions/latest
|
||||
@@ -1124,4 +1150,5 @@ substitutions:
|
||||
_SINGLESTORE_USER: "root"
|
||||
_MARIADB_PORT: "3307"
|
||||
_MARIADB_DATABASE: test_database
|
||||
|
||||
_SNOWFLAKE_DATABASE: "test"
|
||||
_SNOWFLAKE_SCHEMA: "PUBLIC"
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -20,4 +20,4 @@ node_modules
|
||||
|
||||
# executable
|
||||
genai-toolbox
|
||||
toolbox
|
||||
toolbox
|
||||
@@ -51,6 +51,10 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
|
||||
# Add a new version block here before every release
|
||||
# The order of versions in this file is mirrored into the dropdown
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.25.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.25.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.24.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.24.0/"
|
||||
|
||||
25
CHANGELOG.md
25
CHANGELOG.md
@@ -1,5 +1,30 @@
|
||||
# Changelog
|
||||
|
||||
## [0.25.0](https://github.com/googleapis/genai-toolbox/compare/v0.24.0...v0.25.0) (2026-01-08)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add `embeddingModel` support ([#2121](https://github.com/googleapis/genai-toolbox/issues/2121)) ([9c62f31](https://github.com/googleapis/genai-toolbox/commit/9c62f313ff5edf0a3b5b8a3e996eba078fba4095))
|
||||
* Add `allowed-hosts` flag ([#2254](https://github.com/googleapis/genai-toolbox/issues/2254)) ([17b41f6](https://github.com/googleapis/genai-toolbox/commit/17b41f64531b8fe417c28ada45d1992ba430dc1b))
|
||||
* Add parameter default value to manifest ([#2264](https://github.com/googleapis/genai-toolbox/issues/2264)) ([9d1feca](https://github.com/googleapis/genai-toolbox/commit/9d1feca10810fa42cb4c94a409252f1bd373ee36))
|
||||
* **snowflake:** Add Snowflake Source and Tools ([#858](https://github.com/googleapis/genai-toolbox/issues/858)) ([b706b5b](https://github.com/googleapis/genai-toolbox/commit/b706b5bc685aeda277f277868bae77d38d5fd7b6))
|
||||
* **prebuilt/cloud-sql-mysql:** Update CSQL MySQL prebuilt tools to use IAM ([#2202](https://github.com/googleapis/genai-toolbox/issues/2202)) ([731a32e](https://github.com/googleapis/genai-toolbox/commit/731a32e5360b4d6862d81fcb27d7127c655679a8))
|
||||
* **sources/bigquery:** Make credentials scope configurable ([#2210](https://github.com/googleapis/genai-toolbox/issues/2210)) ([a450600](https://github.com/googleapis/genai-toolbox/commit/a4506009b93771b77fb05ae97044f914967e67ed))
|
||||
* **sources/trino:** Add ssl verification options and fix docs example ([#2155](https://github.com/googleapis/genai-toolbox/issues/2155)) ([4a4cf1e](https://github.com/googleapis/genai-toolbox/commit/4a4cf1e712b671853678dba99c4dc49dd4fc16a2))
|
||||
* **tools/looker:** Add ability to set destination folder with `make_look` and `make_dashboard`. ([#2245](https://github.com/googleapis/genai-toolbox/issues/2245)) ([eb79339](https://github.com/googleapis/genai-toolbox/commit/eb793398cd1cc4006d9808ccda5dc7aea5e92bd5))
|
||||
* **tools/postgressql:** Add tool to list store procedure ([#2156](https://github.com/googleapis/genai-toolbox/issues/2156)) ([cf0fc51](https://github.com/googleapis/genai-toolbox/commit/cf0fc515b57d9b84770076f3c0c5597c4597ef62))
|
||||
* **tools/postgressql:** Add Parameter `embeddedBy` config support ([#2151](https://github.com/googleapis/genai-toolbox/issues/2151)) ([17b70cc](https://github.com/googleapis/genai-toolbox/commit/17b70ccaa754d15bcc33a1a3ecb7e652520fa600))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **server:** Add `embeddingModel` config initialization ([#2281](https://github.com/googleapis/genai-toolbox/issues/2281)) ([a779975](https://github.com/googleapis/genai-toolbox/commit/a7799757c9345f99b6d2717841fbf792d364e1a2))
|
||||
* **sources/cloudgda:** Add import for cloudgda source ([#2217](https://github.com/googleapis/genai-toolbox/issues/2217)) ([7daa411](https://github.com/googleapis/genai-toolbox/commit/7daa4111f4ebfb0a35319fd67a8f7b9f0f99efcf))
|
||||
* **tools/alloydb-wait-for-operation:** Fix connection message generation ([#2228](https://github.com/googleapis/genai-toolbox/issues/2228)) ([7053fbb](https://github.com/googleapis/genai-toolbox/commit/7053fbb1953653143d39a8510916ea97a91022a6))
|
||||
* **tools/alloydbainl:** Only add psv when NL Config Param is defined ([#2265](https://github.com/googleapis/genai-toolbox/issues/2265)) ([ef8f3b0](https://github.com/googleapis/genai-toolbox/commit/ef8f3b02f2f38ce94a6ba9acf35d08b9469bef4e))
|
||||
* **tools/looker:** Looker client OAuth nil pointer error ([#2231](https://github.com/googleapis/genai-toolbox/issues/2231)) ([268700b](https://github.com/googleapis/genai-toolbox/commit/268700bdbf8281de0318d60ca613ed3672990b20))
|
||||
|
||||
## [0.24.0](https://github.com/googleapis/genai-toolbox/compare/v0.23.0...v0.24.0) (2025-12-19)
|
||||
|
||||
|
||||
|
||||
@@ -59,6 +59,13 @@ You can manually trigger the bot by commenting on your Pull Request:
|
||||
* `/gemini summary`: Posts a summary of the changes in the pull request.
|
||||
* `/gemini help`: Overview of the available commands
|
||||
|
||||
## Guidelines for Pull Requests
|
||||
|
||||
1. Please keep your PR small for more thorough review and easier updates. In case of regression, it also allows us to roll back a single feature instead of multiple ones.
|
||||
1. For non-trivial changes, consider opening an issue and discussing it with the code owners first.
|
||||
1. Provide a good PR description as a record of what change is being made and why it was made. Link to a GitHub issue if it exists.
|
||||
1. Make sure your code is thoroughly tested with unit tests and integration tests. Remember to clean up the test instances properly in your code to avoid memory leaks.
|
||||
|
||||
## Adding a New Database Source or Tool
|
||||
|
||||
Please create an
|
||||
@@ -85,11 +92,11 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s
|
||||
`newdb.go`. Create a `Config` struct to include all the necessary parameters
|
||||
for connecting to the database (e.g., host, port, username, password, database
|
||||
name) and a `Source` struct to store necessary parameters for tools (e.g.,
|
||||
Name, Kind, connection object, additional config).
|
||||
Name, Type, connection object, additional config).
|
||||
* **Implement the
|
||||
[`SourceConfig`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/sources/sources.go#L57)
|
||||
interface**. This interface requires two methods:
|
||||
* `SourceConfigKind() string`: Returns a unique string identifier for your
|
||||
* `SourceConfigType() string`: Returns a unique string identifier for your
|
||||
data source (e.g., `"newdb"`).
|
||||
* `Initialize(ctx context.Context, tracer trace.Tracer) (Source, error)`:
|
||||
Creates a new instance of your data source and establishes a connection to
|
||||
@@ -97,7 +104,7 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s
|
||||
* **Implement the
|
||||
[`Source`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/sources/sources.go#L63)
|
||||
interface**. This interface requires one method:
|
||||
* `SourceKind() string`: Returns the same string identifier as `SourceConfigKind()`.
|
||||
* `SourceType() string`: Returns the same string identifier as `SourceConfigType()`.
|
||||
* **Implement `init()`** to register the new Source.
|
||||
* **Implement Unit Tests** in a file named `newdb_test.go`.
|
||||
|
||||
@@ -110,6 +117,8 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s
|
||||
We recommend looking at an [example tool
|
||||
implementation](https://github.com/googleapis/genai-toolbox/tree/main/internal/tools/postgres/postgressql).
|
||||
|
||||
Remember to keep your PRs small. For example, if you are contributing a new Source, only include one or two core Tools within the same PR, the rest of the Tools can come in subsequent PRs.
|
||||
|
||||
* **Create a new directory** under `internal/tools` for your tool type (e.g., `internal/tools/newdb/newdbtool`).
|
||||
* **Define a configuration struct** for your tool in a file named `newdbtool.go`.
|
||||
Create a `Config` struct and a `Tool` struct to store necessary parameters for
|
||||
@@ -117,7 +126,7 @@ tools.
|
||||
* **Implement the
|
||||
[`ToolConfig`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/tools/tools.go#L61)
|
||||
interface**. This interface requires one method:
|
||||
* `ToolConfigKind() string`: Returns a unique string identifier for your tool
|
||||
* `ToolConfigType() string`: Returns a unique string identifier for your tool
|
||||
(e.g., `"newdb-tool"`).
|
||||
* `Initialize(sources map[string]Source) (Tool, error)`: Creates a new
|
||||
instance of your tool and validates that it can connect to the specified
|
||||
@@ -163,6 +172,8 @@ tools.
|
||||
parameters][temp-param-doc]. Only run this test if template
|
||||
parameters apply to your tool.
|
||||
|
||||
* **Add additional tests** for the tools that are not covered by the predefined tests. Every tool must be tested!
|
||||
|
||||
* **Add the new database to the integration test workflow** in
|
||||
[integration.cloudbuild.yaml](.ci/integration.cloudbuild.yaml).
|
||||
|
||||
@@ -232,7 +243,7 @@ resources.
|
||||
| style | Update src code, with only formatting and whitespace updates (e.g. code formatter or linter changes). |
|
||||
|
||||
Pull requests should always add scope whenever possible. The scope is
|
||||
formatted as `<scope-type>/<scope-kind>` (e.g., `sources/postgres`, or
|
||||
formatted as `<scope-resource>/<scope-type>` (e.g., `sources/postgres`, or
|
||||
`tools/mssql-sql`).
|
||||
|
||||
Ideally, **each PR covers only one scope**, if this is
|
||||
@@ -244,4 +255,4 @@ resources.
|
||||
* **PR Description:** PR description should **always** be included. It should
|
||||
include a concise description of the changes, it's impact, along with a
|
||||
summary of the solution. If the PR is related to a specific issue, the issue
|
||||
number should be mentioned in the PR description (e.g. `Fixes #1`).
|
||||
number should be mentioned in the PR description (e.g. `Fixes #1`).
|
||||
|
||||
17
DEVELOPER.md
17
DEVELOPER.md
@@ -379,6 +379,23 @@ to approve PRs for main. TeamSync is used to create this team from the MDB
|
||||
Group `toolbox-contributors`. Googlers who are developing for MCP-Toolbox
|
||||
but aren't part of the core team should join this group.
|
||||
|
||||
### Issue/PR Triage and SLO
|
||||
After an issue is created, maintainers will assign the following labels:
|
||||
* `Priority` (defaulted to P0)
|
||||
* `Type` (if applicable)
|
||||
* `Product` (if applicable)
|
||||
|
||||
All incoming issues and PRs will follow the following SLO:
|
||||
| Type | Priority | Objective |
|
||||
|-----------------|----------|------------------------------------------------------------------------|
|
||||
| Feature Request | P0 | Must respond within **5 days** |
|
||||
| Process | P0 | Must respond within **5 days** |
|
||||
| Bugs | P0 | Must respond within **5 days**, and resolve/closure within **14 days** |
|
||||
| Bugs | P1 | Must respond within **7 days**, and resolve/closure within **90 days** |
|
||||
| Bugs | P2 | Must respond within **30 days**
|
||||
|
||||
_Types that are not listed in the table do not adhere to any SLO._
|
||||
|
||||
### Releasing
|
||||
|
||||
Toolbox has two types of releases: versioned and continuous. It uses Google
|
||||
|
||||
18
README.md
18
README.md
@@ -140,7 +140,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.24.0
|
||||
> export VERSION=0.25.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -153,7 +153,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.24.0
|
||||
> export VERSION=0.25.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -166,7 +166,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.24.0
|
||||
> export VERSION=0.25.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -179,7 +179,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```cmd
|
||||
> :: see releases page for other versions
|
||||
> set VERSION=0.24.0
|
||||
> set VERSION=0.25.0
|
||||
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
> ```
|
||||
>
|
||||
@@ -191,7 +191,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```powershell
|
||||
> # see releases page for other versions
|
||||
> $VERSION = "0.24.0"
|
||||
> $VERSION = "0.25.0"
|
||||
> curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||
> ```
|
||||
>
|
||||
@@ -204,7 +204,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -228,7 +228,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.24.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.25.0
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -272,7 +272,7 @@ To run Toolbox from binary:
|
||||
To run the server after pulling the [container image](#installing-the-server):
|
||||
|
||||
```sh
|
||||
export VERSION=0.11.0 # Use the version you pulled
|
||||
export VERSION=0.24.0 # Use the version you pulled
|
||||
docker run -p 5000:5000 \
|
||||
-v $(pwd)/tools.yaml:/app/tools.yaml \
|
||||
us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION \
|
||||
@@ -954,7 +954,7 @@ For more details on configuring different types of sources, see the
|
||||
### Tools
|
||||
|
||||
The `tools` section of a `tools.yaml` define the actions an agent can take: what
|
||||
kind of tool it is, which source(s) it affects, what parameters it uses, etc.
|
||||
type of tool it is, which source(s) it affects, what parameters it uses, etc.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
|
||||
105
cmd/root.go
105
cmd/root.go
@@ -15,6 +15,7 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
_ "embed"
|
||||
"fmt"
|
||||
@@ -92,6 +93,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomstudies"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcloneinstance"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatebackup"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatedatabase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreateusers"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
|
||||
@@ -215,6 +217,8 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparklistbatches"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoreexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoresql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakeexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlistgraphs"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlisttables"
|
||||
@@ -263,6 +267,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/redis"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/serverlessspark"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/singlestore"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/snowflake"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/spanner"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/tidb"
|
||||
@@ -378,7 +383,9 @@ func NewCommand(opts ...Option) *Command {
|
||||
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
|
||||
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
|
||||
flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
|
||||
// TODO: Insecure by default. Might consider updating this for v1.0.0
|
||||
flags.StringSliceVar(&cmd.cfg.AllowedOrigins, "allowed-origins", []string{"*"}, "Specifies a list of origins permitted to access this server. Defaults to '*'.")
|
||||
flags.StringSliceVar(&cmd.cfg.AllowedHosts, "allowed-hosts", []string{"*"}, "Specifies a list of hosts permitted to access this server. Defaults to '*'.")
|
||||
|
||||
// wrap RunE command so that we have access to original Command object
|
||||
cmd.RunE = func(*cobra.Command, []string) error { return run(cmd) }
|
||||
@@ -419,6 +426,103 @@ func parseEnv(input string) (string, error) {
|
||||
return output, err
|
||||
}
|
||||
|
||||
func convertToolsFile(ctx context.Context, raw []byte) ([]byte, error) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// TODO: add embeddingmodels when available
|
||||
keysToCheck := []string{"sources", "authServices", "authSources", "tools", "prompts", "toolsets"}
|
||||
var input yaml.MapSlice
|
||||
decoder := yaml.NewDecoder(bytes.NewReader(raw), yaml.UseOrderedMap())
|
||||
if err := decoder.Decode(&input); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Convert raw MapSlice to a helper map for quick lookup
|
||||
// while keeping the values as MapSlices to preserve internal order
|
||||
lookup := make(map[string]yaml.MapSlice)
|
||||
for _, item := range input {
|
||||
key := item.Key.(string)
|
||||
if slice, ok := item.Value.(yaml.MapSlice); ok {
|
||||
// convert authSources to authServices
|
||||
if key == "authSources" {
|
||||
logger.WarnContext(ctx, "`authSources` is deprecated, use `authServices` instead")
|
||||
key = "authServices"
|
||||
}
|
||||
// works even if lookup[key] is nil
|
||||
lookup[key] = append(lookup[key], slice...)
|
||||
} else {
|
||||
// toolsfile is already v2
|
||||
if key == "kind" {
|
||||
return raw, nil
|
||||
}
|
||||
return nil, fmt.Errorf("'%s' is not a map", key)
|
||||
}
|
||||
}
|
||||
// convert to tools file v2
|
||||
var buf bytes.Buffer
|
||||
encoder := yaml.NewEncoder(&buf)
|
||||
for _, kind := range keysToCheck {
|
||||
data, exists := lookup[kind]
|
||||
if !exists {
|
||||
// if this is skipped for all keys, the tools file is in v2
|
||||
continue
|
||||
}
|
||||
// Transform each entry
|
||||
for _, entry := range data {
|
||||
entryName := entry.Key.(string)
|
||||
entryBody := ProcessValue(entry.Value, kind == "toolsets")
|
||||
|
||||
transformed := yaml.MapSlice{
|
||||
{Key: "kind", Value: kind},
|
||||
{Key: "name", Value: entryName},
|
||||
}
|
||||
|
||||
// Merge the transformed body into our result
|
||||
if bodySlice, ok := entryBody.(yaml.MapSlice); ok {
|
||||
transformed = append(transformed, bodySlice...)
|
||||
} else {
|
||||
return nil, fmt.Errorf("unable to convert entryBody to MapSlice")
|
||||
}
|
||||
|
||||
if err := encoder.Encode(transformed); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
// ProcessValue recursively looks for MapSlices to rename 'kind' -> 'type'
|
||||
func ProcessValue(v any, isToolset bool) any {
|
||||
switch val := v.(type) {
|
||||
case yaml.MapSlice:
|
||||
for i := range val {
|
||||
// Perform renaming
|
||||
if val[i].Key == "kind" {
|
||||
val[i].Key = "type"
|
||||
}
|
||||
// Recursive call for nested values (e.g., nested objects or lists)
|
||||
val[i].Value = ProcessValue(val[i].Value, false)
|
||||
}
|
||||
return val
|
||||
case []any:
|
||||
// Process lists: If it's a toolset top-level list, wrap it.
|
||||
if isToolset {
|
||||
return yaml.MapSlice{{Key: "tools", Value: val}}
|
||||
}
|
||||
// Otherwise, recurse into list items (to catch nested objects)
|
||||
for i := range val {
|
||||
val[i] = ProcessValue(val[i], false)
|
||||
}
|
||||
return val
|
||||
default:
|
||||
return val
|
||||
}
|
||||
}
|
||||
|
||||
// parseToolsFile parses the provided yaml into appropriate configs.
|
||||
func parseToolsFile(ctx context.Context, raw []byte) (ToolsFile, error) {
|
||||
var toolsFile ToolsFile
|
||||
@@ -944,6 +1048,7 @@ func run(cmd *Command) error {
|
||||
|
||||
cmd.cfg.SourceConfigs = finalToolsFile.Sources
|
||||
cmd.cfg.AuthServiceConfigs = finalToolsFile.AuthServices
|
||||
cmd.cfg.EmbeddingModelConfigs = finalToolsFile.EmbeddingModels
|
||||
cmd.cfg.ToolConfigs = finalToolsFile.Tools
|
||||
cmd.cfg.ToolsetConfigs = finalToolsFile.Toolsets
|
||||
cmd.cfg.PromptConfigs = finalToolsFile.Prompts
|
||||
|
||||
302
cmd/root_test.go
302
cmd/root_test.go
@@ -23,12 +23,14 @@ import (
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/auth/google"
|
||||
@@ -67,6 +69,9 @@ func withDefaults(c server.ServerConfig) server.ServerConfig {
|
||||
if c.AllowedOrigins == nil {
|
||||
c.AllowedOrigins = []string{"*"}
|
||||
}
|
||||
if c.AllowedHosts == nil {
|
||||
c.AllowedHosts = []string{"*"}
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
@@ -220,6 +225,13 @@ func TestServerConfigFlags(t *testing.T) {
|
||||
AllowedOrigins: []string{"http://foo.com", "http://bar.com"},
|
||||
}),
|
||||
},
|
||||
{
|
||||
desc: "allowed hosts",
|
||||
args: []string{"--allowed-hosts", "http://foo.com,http://bar.com"},
|
||||
want: withDefaults(server.ServerConfig{
|
||||
AllowedHosts: []string{"http://foo.com", "http://bar.com"},
|
||||
}),
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
@@ -484,6 +496,235 @@ func TestDefaultLogLevel(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestConvertToolsFile(t *testing.T) {
|
||||
ctx, cancelCtx := context.WithTimeout(context.Background(), time.Minute)
|
||||
defer cancelCtx()
|
||||
pr, pw := io.Pipe()
|
||||
defer pw.Close()
|
||||
defer pr.Close()
|
||||
|
||||
logger, err := log.NewStdLogger(pw, pw, "DEBUG")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup logger %s", err)
|
||||
}
|
||||
ctx = util.WithLogger(ctx, logger)
|
||||
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want string
|
||||
isErr bool
|
||||
errStr string
|
||||
}{
|
||||
{
|
||||
desc: "basic convert",
|
||||
in: `
|
||||
sources:
|
||||
my-pg-instance:
|
||||
kind: cloud-sql-postgres
|
||||
project: my-project
|
||||
region: my-region
|
||||
instance: my-instance
|
||||
database: my_db
|
||||
user: my_user
|
||||
password: my_pass
|
||||
tools:
|
||||
example_tool:
|
||||
kind: postgres-sql
|
||||
source: my-pg-instance
|
||||
description: some description
|
||||
statement: |
|
||||
SELECT * FROM SQL_STATEMENT;
|
||||
parameters:
|
||||
- name: country
|
||||
type: string
|
||||
description: some description
|
||||
toolsets:
|
||||
example_toolset:
|
||||
- example_tool`,
|
||||
want: `
|
||||
kind: sources
|
||||
name: my-pg-instance
|
||||
type: cloud-sql-postgres
|
||||
project: my-project
|
||||
region: my-region
|
||||
instance: my-instance
|
||||
database: my_db
|
||||
user: my_user
|
||||
password: my_pass
|
||||
---
|
||||
kind: tools
|
||||
name: example_tool
|
||||
type: postgres-sql
|
||||
source: my-pg-instance
|
||||
description: some description
|
||||
statement: |
|
||||
SELECT * FROM SQL_STATEMENT;
|
||||
parameters:
|
||||
- name: country
|
||||
type: string
|
||||
description: some description
|
||||
---
|
||||
kind: toolsets
|
||||
name: example_toolset
|
||||
tools:
|
||||
- example_tool`,
|
||||
},
|
||||
{
|
||||
desc: "rearrange resource order",
|
||||
in: `
|
||||
tools:
|
||||
example_tool:
|
||||
kind: postgres-sql
|
||||
source: my-pg-instance
|
||||
description: some description
|
||||
statement: |
|
||||
SELECT * FROM SQL_STATEMENT;
|
||||
parameters:
|
||||
- name: country
|
||||
type: string
|
||||
description: some description
|
||||
sources:
|
||||
my-pg-instance:
|
||||
kind: cloud-sql-postgres
|
||||
project: my-project
|
||||
region: my-region
|
||||
instance: my-instance
|
||||
database: my_db
|
||||
user: my_user
|
||||
password: my_pass
|
||||
toolsets:
|
||||
example_toolset:
|
||||
- example_tool`,
|
||||
want: `
|
||||
kind: sources
|
||||
name: my-pg-instance
|
||||
type: cloud-sql-postgres
|
||||
project: my-project
|
||||
region: my-region
|
||||
instance: my-instance
|
||||
database: my_db
|
||||
user: my_user
|
||||
password: my_pass
|
||||
---
|
||||
kind: tools
|
||||
name: example_tool
|
||||
type: postgres-sql
|
||||
source: my-pg-instance
|
||||
description: some description
|
||||
statement: |
|
||||
SELECT * FROM SQL_STATEMENT;
|
||||
parameters:
|
||||
- name: country
|
||||
type: string
|
||||
description: some description
|
||||
---
|
||||
kind: toolsets
|
||||
name: example_toolset
|
||||
tools:
|
||||
- example_tool`,
|
||||
},
|
||||
{
|
||||
desc: "no convertion needed",
|
||||
in: `
|
||||
kind: sources
|
||||
name: my-pg-instance
|
||||
type: cloud-sql-postgres
|
||||
project: my-project
|
||||
region: my-region
|
||||
instance: my-instance
|
||||
database: my_db
|
||||
user: my_user
|
||||
password: my_pass
|
||||
---
|
||||
kind: tools
|
||||
name: example_tool
|
||||
type: postgres-sql
|
||||
source: my-pg-instance
|
||||
description: some description
|
||||
statement: |
|
||||
SELECT * FROM SQL_STATEMENT;
|
||||
parameters:
|
||||
- name: country
|
||||
type: string
|
||||
description: some description
|
||||
---
|
||||
kind: toolsets
|
||||
name: example_toolset
|
||||
tools:
|
||||
- example_tool`,
|
||||
want: `
|
||||
kind: sources
|
||||
name: my-pg-instance
|
||||
type: cloud-sql-postgres
|
||||
project: my-project
|
||||
region: my-region
|
||||
instance: my-instance
|
||||
database: my_db
|
||||
user: my_user
|
||||
password: my_pass
|
||||
---
|
||||
kind: tools
|
||||
name: example_tool
|
||||
type: postgres-sql
|
||||
source: my-pg-instance
|
||||
description: some description
|
||||
statement: |
|
||||
SELECT * FROM SQL_STATEMENT;
|
||||
parameters:
|
||||
- name: country
|
||||
type: string
|
||||
description: some description
|
||||
---
|
||||
kind: toolsets
|
||||
name: example_toolset
|
||||
tools:
|
||||
- example_tool`,
|
||||
},
|
||||
{
|
||||
desc: "invalid source",
|
||||
in: `sources: invalid`,
|
||||
isErr: true,
|
||||
errStr: "'sources' is not a map",
|
||||
},
|
||||
{
|
||||
desc: "invalid toolset",
|
||||
in: `toolsets: invalid`,
|
||||
isErr: true,
|
||||
errStr: "'toolsets' is not a map",
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
output, err := convertToolsFile(ctx, []byte(tc.in))
|
||||
if tc.isErr {
|
||||
if err == nil {
|
||||
t.Fatalf("missing error: %s", tc.errStr)
|
||||
}
|
||||
if err.Error() != tc.errStr {
|
||||
t.Fatalf("invalid error string: got %s, want %s", err, tc.errStr)
|
||||
}
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
// ensures that the order is correct
|
||||
var doc1, doc2 yaml.MapSlice
|
||||
if err := yaml.Unmarshal(output, &doc1); err != nil {
|
||||
t.Fatalf("unable to unmarshal output: %s", string(output))
|
||||
}
|
||||
if err := yaml.Unmarshal([]byte(tc.want), &doc2); err != nil {
|
||||
t.Fatalf("unable to unmarshal output: %s", tc.want)
|
||||
}
|
||||
if !reflect.DeepEqual(doc1, doc2) {
|
||||
t.Fatalf("incorrect output: got %s, want %s", doc1, doc2)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseToolFile(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
@@ -525,7 +766,7 @@ func TestParseToolFile(t *testing.T) {
|
||||
Sources: server.SourceConfigs{
|
||||
"my-pg-instance": cloudsqlpgsrc.Config{
|
||||
Name: "my-pg-instance",
|
||||
Kind: cloudsqlpgsrc.SourceKind,
|
||||
Type: cloudsqlpgsrc.SourceType,
|
||||
Project: "my-project",
|
||||
Region: "my-region",
|
||||
Instance: "my-instance",
|
||||
@@ -538,7 +779,7 @@ func TestParseToolFile(t *testing.T) {
|
||||
Tools: server.ToolConfigs{
|
||||
"example_tool": postgressql.Config{
|
||||
Name: "example_tool",
|
||||
Kind: "postgres-sql",
|
||||
Type: "postgres-sql",
|
||||
Source: "my-pg-instance",
|
||||
Description: "some description",
|
||||
Statement: "SELECT * FROM SQL_STATEMENT;\n",
|
||||
@@ -679,7 +920,7 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
||||
Sources: server.SourceConfigs{
|
||||
"my-pg-instance": cloudsqlpgsrc.Config{
|
||||
Name: "my-pg-instance",
|
||||
Kind: cloudsqlpgsrc.SourceKind,
|
||||
Type: cloudsqlpgsrc.SourceType,
|
||||
Project: "my-project",
|
||||
Region: "my-region",
|
||||
Instance: "my-instance",
|
||||
@@ -692,19 +933,19 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
||||
AuthServices: server.AuthServiceConfigs{
|
||||
"my-google-service": google.Config{
|
||||
Name: "my-google-service",
|
||||
Kind: google.AuthServiceKind,
|
||||
Type: google.AuthServiceType,
|
||||
ClientID: "my-client-id",
|
||||
},
|
||||
"other-google-service": google.Config{
|
||||
Name: "other-google-service",
|
||||
Kind: google.AuthServiceKind,
|
||||
Type: google.AuthServiceType,
|
||||
ClientID: "other-client-id",
|
||||
},
|
||||
},
|
||||
Tools: server.ToolConfigs{
|
||||
"example_tool": postgressql.Config{
|
||||
Name: "example_tool",
|
||||
Kind: "postgres-sql",
|
||||
Type: "postgres-sql",
|
||||
Source: "my-pg-instance",
|
||||
Description: "some description",
|
||||
Statement: "SELECT * FROM SQL_STATEMENT;\n",
|
||||
@@ -779,7 +1020,7 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
||||
Sources: server.SourceConfigs{
|
||||
"my-pg-instance": cloudsqlpgsrc.Config{
|
||||
Name: "my-pg-instance",
|
||||
Kind: cloudsqlpgsrc.SourceKind,
|
||||
Type: cloudsqlpgsrc.SourceType,
|
||||
Project: "my-project",
|
||||
Region: "my-region",
|
||||
Instance: "my-instance",
|
||||
@@ -792,19 +1033,19 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
||||
AuthSources: server.AuthServiceConfigs{
|
||||
"my-google-service": google.Config{
|
||||
Name: "my-google-service",
|
||||
Kind: google.AuthServiceKind,
|
||||
Type: google.AuthServiceType,
|
||||
ClientID: "my-client-id",
|
||||
},
|
||||
"other-google-service": google.Config{
|
||||
Name: "other-google-service",
|
||||
Kind: google.AuthServiceKind,
|
||||
Type: google.AuthServiceType,
|
||||
ClientID: "other-client-id",
|
||||
},
|
||||
},
|
||||
Tools: server.ToolConfigs{
|
||||
"example_tool": postgressql.Config{
|
||||
Name: "example_tool",
|
||||
Kind: "postgres-sql",
|
||||
Type: "postgres-sql",
|
||||
Source: "my-pg-instance",
|
||||
Description: "some description",
|
||||
Statement: "SELECT * FROM SQL_STATEMENT;\n",
|
||||
@@ -881,7 +1122,7 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
||||
Sources: server.SourceConfigs{
|
||||
"my-pg-instance": cloudsqlpgsrc.Config{
|
||||
Name: "my-pg-instance",
|
||||
Kind: cloudsqlpgsrc.SourceKind,
|
||||
Type: cloudsqlpgsrc.SourceType,
|
||||
Project: "my-project",
|
||||
Region: "my-region",
|
||||
Instance: "my-instance",
|
||||
@@ -894,19 +1135,19 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
||||
AuthServices: server.AuthServiceConfigs{
|
||||
"my-google-service": google.Config{
|
||||
Name: "my-google-service",
|
||||
Kind: google.AuthServiceKind,
|
||||
Type: google.AuthServiceType,
|
||||
ClientID: "my-client-id",
|
||||
},
|
||||
"other-google-service": google.Config{
|
||||
Name: "other-google-service",
|
||||
Kind: google.AuthServiceKind,
|
||||
Type: google.AuthServiceType,
|
||||
ClientID: "other-client-id",
|
||||
},
|
||||
},
|
||||
Tools: server.ToolConfigs{
|
||||
"example_tool": postgressql.Config{
|
||||
Name: "example_tool",
|
||||
Kind: "postgres-sql",
|
||||
Type: "postgres-sql",
|
||||
Source: "my-pg-instance",
|
||||
Description: "some description",
|
||||
Statement: "SELECT * FROM SQL_STATEMENT;\n",
|
||||
@@ -1052,7 +1293,7 @@ func TestEnvVarReplacement(t *testing.T) {
|
||||
Sources: server.SourceConfigs{
|
||||
"my-http-instance": httpsrc.Config{
|
||||
Name: "my-http-instance",
|
||||
Kind: httpsrc.SourceKind,
|
||||
Type: httpsrc.SourceType,
|
||||
BaseURL: "http://test_server/",
|
||||
Timeout: "10s",
|
||||
DefaultHeaders: map[string]string{"Authorization": "ACTUAL_HEADER"},
|
||||
@@ -1062,19 +1303,19 @@ func TestEnvVarReplacement(t *testing.T) {
|
||||
AuthServices: server.AuthServiceConfigs{
|
||||
"my-google-service": google.Config{
|
||||
Name: "my-google-service",
|
||||
Kind: google.AuthServiceKind,
|
||||
Type: google.AuthServiceType,
|
||||
ClientID: "ACTUAL_CLIENT_ID",
|
||||
},
|
||||
"other-google-service": google.Config{
|
||||
Name: "other-google-service",
|
||||
Kind: google.AuthServiceKind,
|
||||
Type: google.AuthServiceType,
|
||||
ClientID: "ACTUAL_CLIENT_ID_2",
|
||||
},
|
||||
},
|
||||
Tools: server.ToolConfigs{
|
||||
"example_tool": http.Config{
|
||||
Name: "example_tool",
|
||||
Kind: "http",
|
||||
Type: "http",
|
||||
Source: "my-instance",
|
||||
Method: "GET",
|
||||
Path: "search?name=alice&pet=cat",
|
||||
@@ -1352,6 +1593,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
cloudsqlmssqlobsvconfig, _ := prebuiltconfigs.Get("cloud-sql-mssql-observability")
|
||||
serverless_spark_config, _ := prebuiltconfigs.Get("serverless-spark")
|
||||
cloudhealthcare_config, _ := prebuiltconfigs.Get("cloud-healthcare")
|
||||
snowflake_config, _ := prebuiltconfigs.Get("snowflake")
|
||||
|
||||
// Set environment variables
|
||||
t.Setenv("API_KEY", "your_api_key")
|
||||
@@ -1449,6 +1691,14 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
t.Setenv("CLOUD_HEALTHCARE_REGION", "your_gcp_region")
|
||||
t.Setenv("CLOUD_HEALTHCARE_DATASET", "your_healthcare_dataset")
|
||||
|
||||
t.Setenv("SNOWFLAKE_ACCOUNT", "your_account")
|
||||
t.Setenv("SNOWFLAKE_USER", "your_username")
|
||||
t.Setenv("SNOWFLAKE_PASSWORD", "your_pass")
|
||||
t.Setenv("SNOWFLAKE_DATABASE", "your_db")
|
||||
t.Setenv("SNOWFLAKE_SCHEMA", "your_schema")
|
||||
t.Setenv("SNOWFLAKE_WAREHOUSE", "your_wh")
|
||||
t.Setenv("SNOWFLAKE_ROLE", "your_role")
|
||||
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
@@ -1474,7 +1724,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_postgres_admin_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_postgres_admin_tools",
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck", "clone_instance"},
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck", "clone_instance", "create_backup"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1484,7 +1734,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_mysql_admin_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_mysql_admin_tools",
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance"},
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1494,7 +1744,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_mssql_admin_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_mssql_admin_tools",
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance"},
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1746,6 +1996,16 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Snowflake prebuilt tool",
|
||||
in: snowflake_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"snowflake_tools": tools.ToolsetConfig{
|
||||
Name: "snowflake_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
|
||||
0
cmd/test.db
Normal file
0
cmd/test.db
Normal file
@@ -1 +1 @@
|
||||
0.24.0
|
||||
0.25.0
|
||||
|
||||
18
docs/en/blogs/_index.md
Normal file
18
docs/en/blogs/_index.md
Normal file
@@ -0,0 +1,18 @@
|
||||
---
|
||||
title: "Featured Articles"
|
||||
weight: 3
|
||||
description: Toolbox Medium Blogs
|
||||
manualLink: "https://medium.com/@mcp_toolbox"
|
||||
manualLinkTarget: _blank
|
||||
---
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<title>Redirecting to Featured Articles</title>
|
||||
<link rel="canonical" href="https://medium.com/@mcp_toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://medium.com/@mcp_toolbox"/>
|
||||
</head>
|
||||
<body>
|
||||
<p>If you are not automatically redirected, please <a href="https://medium.com/@mcp_toolbox">follow this link to our articles</a>.</p>
|
||||
</body>
|
||||
</html>
|
||||
@@ -234,7 +234,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.24.0\" # x-release-please-version\n",
|
||||
"version = \"0.25.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -103,7 +103,7 @@ To install Toolbox as a binary on Linux (AMD64):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -114,7 +114,7 @@ To install Toolbox as a binary on macOS (Apple Silicon):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -125,7 +125,7 @@ To install Toolbox as a binary on macOS (Intel):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -136,7 +136,7 @@ To install Toolbox as a binary on Windows (Command Prompt):
|
||||
|
||||
```cmd
|
||||
:: see releases page for other versions
|
||||
set VERSION=0.24.0
|
||||
set VERSION=0.25.0
|
||||
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
```
|
||||
|
||||
@@ -146,7 +146,7 @@ To install Toolbox as a binary on Windows (PowerShell):
|
||||
|
||||
```powershell
|
||||
# see releases page for other versions
|
||||
$VERSION = "0.24.0"
|
||||
$VERSION = "0.25.0"
|
||||
curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||
```
|
||||
|
||||
@@ -158,7 +158,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -177,7 +177,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.24.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.25.0
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -569,11 +569,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/jws": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
||||
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
|
||||
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"jwa": "^2.0.0",
|
||||
"jwa": "^2.0.1",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -882,11 +882,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/jws": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
||||
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
|
||||
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"jwa": "^2.0.0",
|
||||
"jwa": "^2.0.1",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
google-genai==1.56.0
|
||||
google-genai==1.57.0
|
||||
toolbox-core==0.5.4
|
||||
pytest==9.0.2
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
langchain==1.2.0
|
||||
langchain-google-vertexai==3.2.0
|
||||
langchain==1.2.2
|
||||
langchain-google-vertexai==3.2.1
|
||||
langgraph==1.0.5
|
||||
toolbox-langchain==0.5.4
|
||||
pytest==9.0.2
|
||||
|
||||
@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -48,6 +48,7 @@ instance, database and users:
|
||||
* `roles/cloudsql.editor`: Provides permissions to manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* `roles/cloudsql.admin`: Provides full control over all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
@@ -299,6 +300,7 @@ instances and interacting with your database:
|
||||
* **create_user**: Creates a new user in a Cloud SQL instance.
|
||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for SQL Server instance.
|
||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
|
||||
@@ -48,6 +48,7 @@ database and users:
|
||||
* `roles/cloudsql.editor`: Provides permissions to manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* `roles/cloudsql.admin`: Provides full control over all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
@@ -299,6 +300,7 @@ instances and interacting with your database:
|
||||
* **create_user**: Creates a new user in a Cloud SQL instance.
|
||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for MySQL instance.
|
||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
|
||||
@@ -48,6 +48,7 @@ instance, database and users:
|
||||
* `roles/cloudsql.editor`: Provides permissions to manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* `roles/cloudsql.admin`: Provides full control over all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
@@ -299,6 +300,7 @@ instances and interacting with your database:
|
||||
* **create_user**: Creates a new user in a Cloud SQL instance.
|
||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for PostgreSQL instance.
|
||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
|
||||
@@ -100,19 +100,19 @@ After you install Looker in the MCP Store, resources and tools from the server a
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -45,19 +45,19 @@ instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -43,19 +43,19 @@ expose your developer assistant tools to a MySQL instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -44,19 +44,19 @@ expose your developer assistant tools to a Neo4j instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -43,19 +43,19 @@ to expose your developer assistant tools to a SQLite instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -68,7 +68,12 @@ networks:
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
To prevent DNS rebinding attack, use the `--allowed-origins` flag to specify a
|
||||
To prevent DNS rebinding attack, use the `--allowed-hosts` flag to specify a
|
||||
list of hosts for validation. E.g. `command: [ "toolbox",
|
||||
"--tools-file", "/config/tools.yaml", "--address", "0.0.0.0",
|
||||
"--allowed-hosts", "localhost:5000"]`
|
||||
|
||||
To implement CORs, use the `--allowed-origins` flag to specify a
|
||||
list of origins permitted to access the server. E.g. `command: [ "toolbox",
|
||||
"--tools-file", "/config/tools.yaml", "--address", "0.0.0.0",
|
||||
"--allowed-origins", "https://foo.bar"]`
|
||||
|
||||
@@ -188,9 +188,13 @@ description: >
|
||||
path: tools.yaml
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
{{< notice tip >}}
|
||||
To prevent DNS rebinding attack, use the `--allowed-origins` flag to specify a
|
||||
list of origins permitted to access the server. E.g. `args: ["--address",
|
||||
"0.0.0.0", "--allowed-hosts", "foo.bar:5000"]`
|
||||
|
||||
To implement CORs, use the `--allowed-origins` flag to specify a
|
||||
list of origins permitted to access the server. E.g. `args: ["--address",
|
||||
"0.0.0.0", "--allowed-origins", "https://foo.bar"]`
|
||||
{{< /notice >}}
|
||||
|
||||
|
||||
@@ -142,14 +142,18 @@ deployment will time out.
|
||||
|
||||
### Update deployed server to be secure
|
||||
|
||||
To prevent DNS rebinding attack, use the `--allowed-origins` flag to specify a
|
||||
list of origins permitted to access the server. In order to do that, you will
|
||||
To prevent DNS rebinding attack, use the `--allowed-hosts` flag to specify a
|
||||
list of hosts. In order to do that, you will
|
||||
have to re-deploy the cloud run service with the new flag.
|
||||
|
||||
To implement CORs checks, use the `--allowed-origins` flag to specify a list of
|
||||
origins permitted to access the server.
|
||||
|
||||
1. Set an environment variable to the cloud run url:
|
||||
|
||||
```bash
|
||||
export URL=<cloud run url>
|
||||
export HOST=<cloud run host>
|
||||
```
|
||||
|
||||
2. Redeploy Toolbox:
|
||||
@@ -160,7 +164,7 @@ have to re-deploy the cloud run service with the new flag.
|
||||
--service-account toolbox-identity \
|
||||
--region us-central1 \
|
||||
--set-secrets "/app/tools.yaml=tools:latest" \
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL"
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL","--allowed-hosts=$HOST"
|
||||
# --allow-unauthenticated # https://cloud.google.com/run/docs/authenticating/public#gcloud
|
||||
```
|
||||
|
||||
@@ -172,7 +176,7 @@ have to re-deploy the cloud run service with the new flag.
|
||||
--service-account toolbox-identity \
|
||||
--region us-central1 \
|
||||
--set-secrets "/app/tools.yaml=tools:latest" \
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL" \
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL","--allowed-hosts=$HOST" \
|
||||
# TODO(dev): update the following to match your VPC if necessary
|
||||
--network default \
|
||||
--subnet default
|
||||
|
||||
@@ -8,25 +8,26 @@ description: >
|
||||
|
||||
## Reference
|
||||
|
||||
| Flag (Short) | Flag (Long) | Description | Default |
|
||||
|--------------|----------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------|
|
||||
| `-a` | `--address` | Address of the interface the server will listen on. | `127.0.0.1` |
|
||||
| | `--disable-reload` | Disables dynamic reloading of tools file. | |
|
||||
| `-h` | `--help` | help for toolbox | |
|
||||
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
|
||||
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
|
||||
| `-p` | `--port` | Port the server will listen on. | `5000` |
|
||||
| | `--prebuilt` | Use a prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
|
||||
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
|
||||
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
|
||||
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
|
||||
| | `--telemetry-service-name` | Sets the value of the service.name resource attribute for telemetry data. | `toolbox` |
|
||||
| Flag (Short) | Flag (Long) | Description | Default |
|
||||
|--------------|----------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------|
|
||||
| `-a` | `--address` | Address of the interface the server will listen on. | `127.0.0.1` |
|
||||
| | `--disable-reload` | Disables dynamic reloading of tools file. | |
|
||||
| `-h` | `--help` | help for toolbox | |
|
||||
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
|
||||
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
|
||||
| `-p` | `--port` | Port the server will listen on. | `5000` |
|
||||
| | `--prebuilt` | Use a prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
|
||||
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
|
||||
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
|
||||
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
|
||||
| | `--telemetry-service-name` | Sets the value of the service.name resource attribute for telemetry data. | `toolbox` |
|
||||
| | `--tools-file` | File path specifying the tool configuration. Cannot be used with --tools-files or --tools-folder. | |
|
||||
| | `--tools-files` | Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file or --tools-folder. | |
|
||||
| | `--tools-folder` | Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file or --tools-files. | |
|
||||
| | `--ui` | Launches the Toolbox UI web server. | |
|
||||
| | `--allowed-origins` | Specifies a list of origins permitted to access this server. | `*` |
|
||||
| `-v` | `--version` | version for toolbox | |
|
||||
| | `--ui` | Launches the Toolbox UI web server. | |
|
||||
| | `--allowed-origins` | Specifies a list of origins permitted to access this server for CORs access. | `*` |
|
||||
| | `--allowed-hosts` | Specifies a list of hosts permitted to access this server to prevent DNS rebinding attacks. | `*` |
|
||||
| `-v` | `--version` | version for toolbox | |
|
||||
|
||||
## Examples
|
||||
|
||||
|
||||
@@ -187,6 +187,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* **Cloud SQL Admin** (`roles/cloudsql.admin`): Provides full control over
|
||||
all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
@@ -203,6 +204,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `create_user`: Creates a new user in a Cloud SQL instance.
|
||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for MySQL instance.
|
||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||
|
||||
## Cloud SQL for PostgreSQL
|
||||
|
||||
@@ -275,6 +277,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* **Cloud SQL Admin** (`roles/cloudsql.admin`): Provides full control over
|
||||
all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
@@ -290,6 +293,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `create_user`: Creates a new user in a Cloud SQL instance.
|
||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for PostgreSQL instance.
|
||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||
|
||||
## Cloud SQL for SQL Server
|
||||
|
||||
@@ -336,6 +340,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* **Cloud SQL Admin** (`roles/cloudsql.admin`): Provides full control over
|
||||
all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
@@ -351,6 +356,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `create_user`: Creates a new user in a Cloud SQL instance.
|
||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for SQL Server instance.
|
||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||
|
||||
## Dataplex
|
||||
|
||||
|
||||
@@ -134,6 +134,7 @@ sources:
|
||||
# scopes: # Optional: List of OAuth scopes to request.
|
||||
# - "https://www.googleapis.com/auth/bigquery"
|
||||
# - "https://www.googleapis.com/auth/drive.readonly"
|
||||
# maxQueryResultRows: 50 # Optional: Limits the number of rows returned by queries. Defaults to 50.
|
||||
```
|
||||
|
||||
Initialize a BigQuery source that uses the client's access token:
|
||||
@@ -153,6 +154,7 @@ sources:
|
||||
# scopes: # Optional: List of OAuth scopes to request.
|
||||
# - "https://www.googleapis.com/auth/bigquery"
|
||||
# - "https://www.googleapis.com/auth/drive.readonly"
|
||||
# maxQueryResultRows: 50 # Optional: Limits the number of rows returned by queries. Defaults to 50.
|
||||
```
|
||||
|
||||
## Reference
|
||||
@@ -167,3 +169,4 @@ sources:
|
||||
| useClientOAuth | bool | false | If true, forwards the client's OAuth access token from the "Authorization" header to downstream queries. **Note:** This cannot be used with `writeMode: protected`. |
|
||||
| scopes | []string | false | A list of OAuth 2.0 scopes to use for the credentials. If not provided, default scopes are used. |
|
||||
| impersonateServiceAccount | string | false | Service account email to impersonate when making BigQuery and Dataplex API calls. The authenticated principal must have the `roles/iam.serviceAccountTokenCreator` role on the target service account. [Learn More](https://cloud.google.com/iam/docs/service-account-impersonation) |
|
||||
| maxQueryResultRows | int | false | The maximum number of rows to return from a query. Defaults to 50. |
|
||||
|
||||
63
docs/en/resources/sources/snowflake.md
Normal file
63
docs/en/resources/sources/snowflake.md
Normal file
@@ -0,0 +1,63 @@
|
||||
---
|
||||
title: "Snowflake"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Snowflake is a cloud-based data platform.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
[Snowflake][sf-docs] is a cloud data platform that provides a data warehouse-as-a-service designed for the cloud.
|
||||
|
||||
[sf-docs]: https://docs.snowflake.com/
|
||||
|
||||
## Available Tools
|
||||
|
||||
- [`snowflake-sql`](../tools/snowflake/snowflake-sql.md)
|
||||
Execute SQL queries as prepared statements in Snowflake.
|
||||
|
||||
- [`snowflake-execute-sql`](../tools/snowflake/snowflake-execute-sql.md)
|
||||
Run parameterized SQL statements in Snowflake.
|
||||
|
||||
## Requirements
|
||||
|
||||
### Database User
|
||||
|
||||
This source only uses standard authentication. You will need to create a
|
||||
Snowflake user to login to the database with.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-sf-source:
|
||||
kind: snowflake
|
||||
account: ${SNOWFLAKE_ACCOUNT}
|
||||
user: ${SNOWFLAKE_USER}
|
||||
password: ${SNOWFLAKE_PASSWORD}
|
||||
database: ${SNOWFLAKE_DATABASE}
|
||||
schema: ${SNOWFLAKE_SCHEMA}
|
||||
warehouse: ${SNOWFLAKE_WAREHOUSE}
|
||||
role: ${SNOWFLAKE_ROLE}
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
Use environment variable replacement with the format ${ENV_NAME}
|
||||
instead of hardcoding your secrets into the configuration file.
|
||||
{{< /notice >}}
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "snowflake". |
|
||||
| account | string | true | Your Snowflake account identifier. |
|
||||
| user | string | true | Name of the Snowflake user to connect as (e.g. "my-sf-user"). |
|
||||
| password | string | true | Password of the Snowflake user (e.g. "my-password"). |
|
||||
| database | string | true | Name of the Snowflake database to connect to (e.g. "my_db"). |
|
||||
| schema | string | true | Name of the schema to use (e.g. "my_schema"). |
|
||||
| warehouse | string | false | The virtual warehouse to use. Defaults to "COMPUTE_WH". |
|
||||
| role | string | false | The security role to use. Defaults to "ACCOUNTADMIN". |
|
||||
| timeout | integer | false | The connection timeout in seconds. Defaults to 60. |
|
||||
@@ -50,16 +50,19 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------------|:--------:|:------------:|------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "trino". |
|
||||
| host | string | true | Trino coordinator hostname (e.g. "trino.example.com") |
|
||||
| port | string | true | Trino coordinator port (e.g. "8080", "8443") |
|
||||
| user | string | false | Username for authentication (e.g. "analyst"). Optional for anonymous access. |
|
||||
| password | string | false | Password for basic authentication |
|
||||
| catalog | string | true | Default catalog to use for queries (e.g. "hive") |
|
||||
| schema | string | true | Default schema to use for queries (e.g. "default") |
|
||||
| queryTimeout | string | false | Query timeout duration (e.g. "30m", "1h") |
|
||||
| accessToken | string | false | JWT access token for authentication |
|
||||
| kerberosEnabled | boolean | false | Enable Kerberos authentication (default: false) |
|
||||
| sslEnabled | boolean | false | Enable SSL/TLS (default: false) |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ---------------------- | :------: | :----------: | ---------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "trino". |
|
||||
| host | string | true | Trino coordinator hostname (e.g. "trino.example.com") |
|
||||
| port | string | true | Trino coordinator port (e.g. "8080", "8443") |
|
||||
| user | string | false | Username for authentication (e.g. "analyst"). Optional for anonymous access. |
|
||||
| password | string | false | Password for basic authentication |
|
||||
| catalog | string | true | Default catalog to use for queries (e.g. "hive") |
|
||||
| schema | string | true | Default schema to use for queries (e.g. "default") |
|
||||
| queryTimeout | string | false | Query timeout duration (e.g. "30m", "1h") |
|
||||
| accessToken | string | false | JWT access token for authentication |
|
||||
| kerberosEnabled | boolean | false | Enable Kerberos authentication (default: false) |
|
||||
| sslEnabled | boolean | false | Enable SSL/TLS (default: false) |
|
||||
| disableSslVerification | boolean | false | Skip SSL/TLS certificate verification (default: false) |
|
||||
| sslCertPath | string | false | Path to a custom SSL/TLS certificate file |
|
||||
| sslCert | string | false | Custom SSL/TLS certificate content |
|
||||
|
||||
@@ -91,8 +91,8 @@ visible to the LLM.
|
||||
https://cloud.google.com/alloydb/docs/parameterized-secure-views-overview
|
||||
|
||||
{{< notice tip >}} Make sure to enable the `parameterized_views` extension
|
||||
before running this tool. You can do so by running this command in the AlloyDB
|
||||
studio:
|
||||
to utilize PSV feature (`nlConfigParameters`) with this tool. You can do so by
|
||||
running this command in the AlloyDB studio:
|
||||
|
||||
```sql
|
||||
CREATE EXTENSION IF NOT EXISTS parameterized_views;
|
||||
|
||||
45
docs/en/resources/tools/cloudsql/cloudsqlcreatebackup.md
Normal file
45
docs/en/resources/tools/cloudsql/cloudsqlcreatebackup.md
Normal file
@@ -0,0 +1,45 @@
|
||||
---
|
||||
title: cloud-sql-create-backup
|
||||
type: docs
|
||||
weight: 10
|
||||
description: "Creates a backup on a Cloud SQL instance."
|
||||
---
|
||||
|
||||
The `cloud-sql-create-backup` tool creates an on-demand backup on a Cloud SQL instance using the Cloud SQL Admin API.
|
||||
|
||||
{{< notice info dd>}}
|
||||
This tool uses a `source` of kind `cloud-sql-admin`.
|
||||
{{< /notice >}}
|
||||
|
||||
## Examples
|
||||
|
||||
Basic backup creation (current state)
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
backup-creation-basic:
|
||||
kind: cloud-sql-create-backup
|
||||
source: cloud-sql-admin-source
|
||||
description: "Creates a backup on the given Cloud SQL instance."
|
||||
```
|
||||
## Reference
|
||||
### Tool Configuration
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| -------------- | :------: | :----------: | ------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "cloud-sql-create-backup". |
|
||||
| source | string | true | The name of the `cloud-sql-admin` source to use. |
|
||||
| description | string | false | A description of the tool. |
|
||||
|
||||
### Tool Inputs
|
||||
|
||||
| **parameter** | **type** | **required** | **description** |
|
||||
| -------------------------- | :------: | :----------: | ------------------------------------------------------------------------------- |
|
||||
| project | string | true | The project ID. |
|
||||
| instance | string | true | The name of the instance to take a backup on. Does not include the project ID. |
|
||||
| location | string | false | (Optional) Location of the backup run. |
|
||||
| backup_description | string | false | (Optional) The description of this backup run. |
|
||||
|
||||
## See Also
|
||||
- [Cloud SQL Admin API documentation](https://cloud.google.com/sql/docs/mysql/admin-api)
|
||||
- [Toolbox Cloud SQL tools documentation](../cloudsql)
|
||||
- [Cloud SQL Backup API documentation](https://cloud.google.com/sql/docs/mysql/backup-recovery/backups)
|
||||
7
docs/en/resources/tools/snowflake/_index.md
Normal file
7
docs/en/resources/tools/snowflake/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "Snowflake"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with Snowflake Sources.
|
||||
---
|
||||
40
docs/en/resources/tools/snowflake/snowflake-execute-sql.md
Normal file
40
docs/en/resources/tools/snowflake/snowflake-execute-sql.md
Normal file
@@ -0,0 +1,40 @@
|
||||
---
|
||||
title: "snowflake-execute-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "snowflake-execute-sql" tool executes a SQL statement against a Snowflake
|
||||
database.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `snowflake-execute-sql` tool executes a SQL statement against a Snowflake
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [snowflake](../../sources/snowflake.md)
|
||||
|
||||
`snowflake-execute-sql` takes one input parameter `sql` and run the sql
|
||||
statement against the `source`.
|
||||
|
||||
> **Note:** This tool is intended for developer assistant workflows with
|
||||
> human-in-the-loop and shouldn't be used for production agents.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
execute_sql_tool:
|
||||
kind: snowflake-execute-sql
|
||||
source: my-snowflake-instance
|
||||
description: Use this tool to execute sql statement.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------|:-------------:|:------------:|-----------------------------------------------------------|
|
||||
| kind | string | true | Must be "snowflake-execute-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| authRequired | array[string] | false | List of auth services that are required to use this tool. |
|
||||
103
docs/en/resources/tools/snowflake/snowflake-sql.md
Normal file
103
docs/en/resources/tools/snowflake/snowflake-sql.md
Normal file
@@ -0,0 +1,103 @@
|
||||
---
|
||||
title: "snowflake-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "snowflake-sql" tool executes a pre-defined SQL statement against a
|
||||
Snowflake database.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `snowflake-sql` tool executes a pre-defined SQL statement against a Snowflake
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [snowflake](../../sources/snowflake.md)
|
||||
|
||||
The specified SQL statement is executed as a prepared statement, and specified
|
||||
parameters will be inserted according to their position: e.g. `:1` will be the
|
||||
first parameter specified, `:2` will be the second parameter, and so on.
|
||||
|
||||
> **Note:** This tool uses parameterized queries to prevent SQL injections.
|
||||
> Query parameters can be used as substitutes for arbitrary expressions.
|
||||
> Parameters cannot be used as substitutes for identifiers, column names, table
|
||||
> names, or other parts of the query.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search_flights_by_number:
|
||||
kind: snowflake-sql
|
||||
source: my-snowflake-instance
|
||||
statement: |
|
||||
SELECT * FROM flights
|
||||
WHERE airline = :1
|
||||
AND flight_number = :2
|
||||
LIMIT 10
|
||||
description: |
|
||||
Use this tool to get information for a specific flight.
|
||||
Takes an airline code and flight number and returns info on the flight.
|
||||
Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number.
|
||||
A airline code is a code for an airline service consisting of two-character
|
||||
airline designator and followed by flight number, which is 1 to 4 digit number.
|
||||
For example, if given CY 0123, the airline is "CY", and flight_number is "123".
|
||||
Another example for this is DL 1234, the airline is "DL", and flight_number is "1234".
|
||||
If the tool returns more than one option choose the date closes to today.
|
||||
Example:
|
||||
{{
|
||||
"airline": "CY",
|
||||
"flight_number": "888",
|
||||
}}
|
||||
Example:
|
||||
{{
|
||||
"airline": "DL",
|
||||
"flight_number": "1234",
|
||||
}}
|
||||
parameters:
|
||||
- name: airline
|
||||
type: string
|
||||
description: Airline unique 2 letter identifier
|
||||
- name: flight_number
|
||||
type: string
|
||||
description: 1 to 4 digit number
|
||||
```
|
||||
|
||||
### Example with Template Parameters
|
||||
|
||||
> **Note:** This tool allows direct modifications to the SQL statement,
|
||||
> including identifiers, column names, and table names. **This makes it more
|
||||
> vulnerable to SQL injections**. Using basic parameters only (see above) is
|
||||
> recommended for performance and safety reasons. For more details, please check
|
||||
> [templateParameters](..#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_table:
|
||||
kind: snowflake
|
||||
source: my-snowflake-instance
|
||||
statement: |
|
||||
SELECT * FROM {{.tableName}};
|
||||
description: |
|
||||
Use this tool to list all information from a specific table.
|
||||
Example:
|
||||
{{
|
||||
"tableName": "flights",
|
||||
}}
|
||||
templateParameters:
|
||||
- name: tableName
|
||||
type: string
|
||||
description: Table to select from
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------------|:--------------------------------------------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "snowflake-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute on. |
|
||||
| parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](..#template-parameters) | false | List of [templateParameters](..#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
| authRequired | array[string] | false | List of auth services that are required to use this tool. |
|
||||
@@ -16,11 +16,7 @@ database. It's compatible with any of the following sources:
|
||||
|
||||
- [trino](../../sources/trino.md)
|
||||
|
||||
The specified SQL statement is executed as a [prepared statement][trino-prepare],
|
||||
and specified parameters will be inserted according to their position: e.g. `$1`
|
||||
will be the first parameter specified, `$2` will be the second parameter, and so
|
||||
on. If template parameters are included, they will be resolved before execution
|
||||
of the prepared statement.
|
||||
The specified SQL statement is executed as a [prepared statement][trino-prepare], and expects parameters in the SQL query to be in the form of placeholders `?`.
|
||||
|
||||
[trino-prepare]: https://trino.io/docs/current/sql/prepare.html
|
||||
|
||||
@@ -38,8 +34,8 @@ tools:
|
||||
source: my-trino-instance
|
||||
statement: |
|
||||
SELECT * FROM hive.sales.orders
|
||||
WHERE region = $1
|
||||
AND order_date >= DATE($2)
|
||||
WHERE region = ?
|
||||
AND order_date >= DATE(?)
|
||||
LIMIT 10
|
||||
description: |
|
||||
Use this tool to get information for orders in a specific region.
|
||||
|
||||
@@ -771,7 +771,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.24.0\" # x-release-please-version\n",
|
||||
"version = \"0.25.0\" # x-release-please-version\n",
|
||||
"! curl -L -o /content/toolbox https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -123,7 +123,7 @@ In this section, we will download and install the Toolbox binary.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
export VERSION="0.24.0"
|
||||
export VERSION="0.25.0"
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -220,7 +220,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.24.0\" # x-release-please-version\n",
|
||||
"version = \"0.25.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -179,7 +179,7 @@ to use BigQuery, and then run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
7
docs/en/samples/neo4j/_index.md
Normal file
7
docs/en/samples/neo4j/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "Neo4j"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
How to get started with Toolbox using Neo4j.
|
||||
---
|
||||
141
docs/en/samples/neo4j/mcp_quickstart.md
Normal file
141
docs/en/samples/neo4j/mcp_quickstart.md
Normal file
@@ -0,0 +1,141 @@
|
||||
---
|
||||
title: "Quickstart (MCP with Neo4j)"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
How to get started running Toolbox with MCP Inspector and Neo4j as the source.
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
[Model Context Protocol](https://modelcontextprotocol.io) is an open protocol that standardizes how applications provide context to LLMs. Check out this page on how to [connect to Toolbox via MCP](../../how-to/connect_via_mcp.md).
|
||||
|
||||
|
||||
## Step 1: Set up your Neo4j Database and Data
|
||||
|
||||
In this section, you'll set up a database and populate it with sample data for a movies-related agent. This guide assumes you have a running Neo4j instance, either locally or in the cloud.
|
||||
|
||||
. **Populate the database with data.**
|
||||
To make this quickstart straightforward, we'll use the built-in Movies dataset available in Neo4j.
|
||||
|
||||
. In your Neo4j Browser, run the following command to create and populate the database:
|
||||
+
|
||||
```cypher
|
||||
:play movies
|
||||
````
|
||||
|
||||
. Follow the instructions to load the data. This will create a graph with `Movie`, `Person`, and `Actor` nodes and their relationships.
|
||||
|
||||
|
||||
## Step 2: Install and configure Toolbox
|
||||
|
||||
In this section, we will install the MCP Toolbox, configure our tools in a `tools.yaml` file, and then run the Toolbox server.
|
||||
|
||||
. **Install the Toolbox binary.**
|
||||
The simplest way to get started is to download the latest binary for your operating system.
|
||||
|
||||
. Download the latest version of Toolbox as a binary:
|
||||
\+
|
||||
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O [https://storage.googleapis.com/genai-toolbox/v0.16.0/$OS/toolbox](https://storage.googleapis.com/genai-toolbox/v0.16.0/$OS/toolbox)
|
||||
```
|
||||
|
||||
+
|
||||
. Make the binary executable:
|
||||
\+
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
. **Create the `tools.yaml` file.**
|
||||
This file defines your Neo4j source and the specific tools that will be exposed to your AI agent.
|
||||
\+
|
||||
{{\< notice tip \>}}
|
||||
Authentication for the Neo4j source uses standard username and password fields. For production use, it is highly recommended to use environment variables for sensitive information like passwords.
|
||||
{{\< /notice \>}}
|
||||
\+
|
||||
Write the following into a `tools.yaml` file:
|
||||
\+
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-neo4j-source:
|
||||
kind: neo4j
|
||||
uri: bolt://localhost:7687
|
||||
user: neo4j
|
||||
password: my-password # Replace with your actual password
|
||||
|
||||
tools:
|
||||
search-movies-by-actor:
|
||||
kind: neo4j-cypher
|
||||
source: my-neo4j-source
|
||||
description: "Searches for movies an actor has appeared in based on their name. Useful for questions like 'What movies has Tom Hanks been in?'"
|
||||
parameters:
|
||||
- name: actor_name
|
||||
type: string
|
||||
description: The full name of the actor to search for.
|
||||
statement: |
|
||||
MATCH (p:Person {name: $actor_name}) -[:ACTED_IN]-> (m:Movie)
|
||||
RETURN m.title AS title, m.year AS year, m.genre AS genre
|
||||
|
||||
get-actor-for-movie:
|
||||
kind: neo4j-cypher
|
||||
source: my-neo4j-source
|
||||
description: "Finds the actors who starred in a specific movie. Useful for questions like 'Who acted in Inception?'"
|
||||
parameters:
|
||||
- name: movie_title
|
||||
type: string
|
||||
description: The exact title of the movie.
|
||||
statement: |
|
||||
MATCH (p:Person) -[:ACTED_IN]-> (m:Movie {title: $movie_title})
|
||||
RETURN p.name AS actor
|
||||
```
|
||||
|
||||
. **Start the Toolbox server.**
|
||||
Run the Toolbox server, pointing to the `tools.yaml` file you created earlier.
|
||||
\+
|
||||
|
||||
```bash
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
## Step 3: Connect to MCP Inspector
|
||||
|
||||
. **Run the MCP Inspector:**
|
||||
\+
|
||||
|
||||
```bash
|
||||
npx @modelcontextprotocol/inspector
|
||||
```
|
||||
|
||||
. Type `y` when it asks to install the inspector package.
|
||||
. It should show the following when the MCP Inspector is up and running (please take note of `<YOUR_SESSION_TOKEN>`):
|
||||
\+
|
||||
|
||||
```bash
|
||||
Starting MCP inspector...
|
||||
⚙️ Proxy server listening on localhost:6277
|
||||
🔑 Session token: <YOUR_SESSION_TOKEN>
|
||||
Use this token to authenticate requests or set DANGEROUSLY_OMIT_AUTH=true to disable auth
|
||||
|
||||
🚀 MCP Inspector is up and running at:
|
||||
http://localhost:6274/?MCP_PROXY_AUTH_TOKEN=<YOUR_SESSION_TOKEN>
|
||||
```
|
||||
|
||||
1. Open the above link in your browser.
|
||||
|
||||
1. For `Transport Type`, select `Streamable HTTP`.
|
||||
|
||||
1. For `URL`, type in `http://127.0.0.1:5000/mcp`.
|
||||
|
||||
1. For `Configuration` -\> `Proxy Session Token`, make sure `<YOUR_SESSION_TOKEN>` is present.
|
||||
|
||||
1. Click `Connect`.
|
||||
|
||||
1. Select `List Tools`, you will see a list of tools configured in `tools.yaml`.
|
||||
|
||||
1. Test out your tools here\!
|
||||
|
||||
161
docs/en/samples/snowflake/_index.md
Normal file
161
docs/en/samples/snowflake/_index.md
Normal file
@@ -0,0 +1,161 @@
|
||||
---
|
||||
title: "Snowflake"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
How to get started running Toolbox with MCP Inspector and Snowflake as the source.
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
[Model Context Protocol](https://modelcontextprotocol.io) is an open protocol
|
||||
that standardizes how applications provide context to LLMs. Check out this page
|
||||
on how to [connect to Toolbox via MCP](../../how-to/connect_via_mcp.md).
|
||||
|
||||
## Before you begin
|
||||
|
||||
This guide assumes you have already done the following:
|
||||
|
||||
1. [Create a Snowflake account](https://signup.snowflake.com/).
|
||||
1. Connect to the instance using [SnowSQL](https://docs.snowflake.com/en/user-guide/snowsql), or any other Snowflake client.
|
||||
|
||||
## Step 1: Set up your environment
|
||||
|
||||
Copy the environment template and update it with your Snowflake credentials:
|
||||
|
||||
```bash
|
||||
cp examples/snowflake-env.sh my-snowflake-env.sh
|
||||
```
|
||||
|
||||
Edit `my-snowflake-env.sh` with your actual Snowflake connection details:
|
||||
|
||||
```bash
|
||||
export SNOWFLAKE_ACCOUNT="your-account-identifier"
|
||||
export SNOWFLAKE_USER="your-username"
|
||||
export SNOWFLAKE_PASSWORD="your-password"
|
||||
export SNOWFLAKE_DATABASE="your-database"
|
||||
export SNOWFLAKE_SCHEMA="your-schema"
|
||||
export SNOWFLAKE_WAREHOUSE="COMPUTE_WH"
|
||||
export SNOWFLAKE_ROLE="ACCOUNTADMIN"
|
||||
```
|
||||
|
||||
## Step 2: Install Toolbox
|
||||
|
||||
In this section, we will download and install the Toolbox binary.
|
||||
|
||||
1. Download the latest version of Toolbox as a binary:
|
||||
|
||||
{{< notice tip >}}
|
||||
Select the
|
||||
[correct binary](https://github.com/googleapis/genai-toolbox/releases)
|
||||
corresponding to your OS and CPU architecture.
|
||||
{{< /notice >}}
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
export VERSION="0.10.0"
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
## Step 3: Configure the tools
|
||||
|
||||
You have two options:
|
||||
|
||||
#### Option A: Use the prebuilt configuration
|
||||
|
||||
```bash
|
||||
./toolbox --prebuilt snowflake
|
||||
```
|
||||
|
||||
#### Option B: Use the custom configuration
|
||||
|
||||
Create a `tools.yaml` file and add the following content. You must replace the placeholders with your actual Snowflake configuration.
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
snowflake-source:
|
||||
kind: snowflake
|
||||
account: ${SNOWFLAKE_ACCOUNT}
|
||||
user: ${SNOWFLAKE_USER}
|
||||
password: ${SNOWFLAKE_PASSWORD}
|
||||
database: ${SNOWFLAKE_DATABASE}
|
||||
schema: ${SNOWFLAKE_SCHEMA}
|
||||
warehouse: ${SNOWFLAKE_WAREHOUSE}
|
||||
role: ${SNOWFLAKE_ROLE}
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: snowflake-execute-sql
|
||||
source: snowflake-source
|
||||
description: Use this tool to execute SQL.
|
||||
|
||||
list_tables:
|
||||
kind: snowflake-sql
|
||||
source: snowflake-source
|
||||
description: "Lists detailed schema information for user-created tables."
|
||||
statement: |
|
||||
SELECT table_name, table_type
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = current_schema()
|
||||
ORDER BY table_name;
|
||||
```
|
||||
|
||||
For more info on tools, check out the
|
||||
[Tools](../../resources/tools/) section.
|
||||
|
||||
## Step 4: Run the Toolbox server
|
||||
|
||||
Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
|
||||
|
||||
```bash
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
## Step 5: Connect to MCP Inspector
|
||||
|
||||
1. Run the MCP Inspector:
|
||||
|
||||
```bash
|
||||
npx @modelcontextprotocol/inspector
|
||||
```
|
||||
|
||||
1. Type `y` when it asks to install the inspector package.
|
||||
|
||||
1. It should show the following when the MCP Inspector is up and running (please take note of `<YOUR_SESSION_TOKEN>`):
|
||||
|
||||
```bash
|
||||
Starting MCP inspector...
|
||||
⚙️ Proxy server listening on localhost:6277
|
||||
🔑 Session token: <YOUR_SESSION_TOKEN>
|
||||
Use this token to authenticate requests or set DANGEROUSLY_OMIT_AUTH=true to disable auth
|
||||
|
||||
🚀 MCP Inspector is up and running at:
|
||||
http://localhost:6274/?MCP_PROXY_AUTH_TOKEN=<YOUR_SESSION_TOKEN>
|
||||
```
|
||||
|
||||
1. Open the above link in your browser.
|
||||
|
||||
1. For `Transport Type`, select `Streamable HTTP`.
|
||||
|
||||
1. For `URL`, type in `http://127.0.0.1:5000/mcp`.
|
||||
|
||||
1. For `Configuration` -> `Proxy Session Token`, make sure `<YOUR_SESSION_TOKEN>` is present.
|
||||
|
||||
1. Click Connect.
|
||||
|
||||
1. Select `List Tools`, you will see a list of tools configured in `tools.yaml`.
|
||||
|
||||
1. Test out your tools here!
|
||||
|
||||
## What's next
|
||||
|
||||
- Learn more about [MCP Inspector](../../how-to/connect_via_mcp.md).
|
||||
- Learn more about [Toolbox Resources](../../resources/).
|
||||
- Learn more about [Toolbox How-to guides](../../how-to/).
|
||||
18
docs/en/samples/snowflake/runme.py
Normal file
18
docs/en/samples/snowflake/runme.py
Normal file
@@ -0,0 +1,18 @@
|
||||
import asyncio
|
||||
from toolbox_core import ToolboxClient
|
||||
|
||||
|
||||
async def main():
|
||||
# Replace with the actual URL where your Toolbox service is running
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as toolbox:
|
||||
tool = await toolbox.load_tool("execute_sql")
|
||||
result = await tool("SELECT 1")
|
||||
print(result)
|
||||
|
||||
tool = await toolbox.load_tool("list_tables")
|
||||
result = await tool(table_names="DIM_DATE")
|
||||
print(result)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
68
docs/en/samples/snowflake/snowflake-config.yaml
Normal file
68
docs/en/samples/snowflake/snowflake-config.yaml
Normal file
@@ -0,0 +1,68 @@
|
||||
# Copyright 2026 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
sources:
|
||||
my-snowflake-db:
|
||||
kind: snowflake
|
||||
account: ${SNOWFLAKE_ACCOUNT}
|
||||
user: ${SNOWFLAKE_USER}
|
||||
password: ${SNOWFLAKE_PASSWORD}
|
||||
database: ${SNOWFLAKE_DATABASE}
|
||||
schema: ${SNOWFLAKE_SCHEMA}
|
||||
warehouse: ${SNOWFLAKE_WAREHOUSE} # Optional, defaults to COMPUTE_WH if not set
|
||||
role: ${SNOWFLAKE_ROLE} # Optional, defaults to ACCOUNTADMIN if not set
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: snowflake-execute-sql
|
||||
source: my-snowflake-db
|
||||
description: Execute arbitrary SQL statements on Snowflake
|
||||
|
||||
get_customer_orders:
|
||||
kind: snowflake-sql
|
||||
source: my-snowflake-db
|
||||
description: Get orders for a specific customer
|
||||
statement: |
|
||||
SELECT o.order_id, o.order_date, o.total_amount, o.status
|
||||
FROM orders o
|
||||
WHERE o.customer_id = $1
|
||||
ORDER BY o.order_date DESC
|
||||
parameters:
|
||||
- name: customer_id
|
||||
type: string
|
||||
description: The customer ID to look up orders for
|
||||
|
||||
daily_sales_report:
|
||||
kind: snowflake-sql
|
||||
source: my-snowflake-db
|
||||
description: Generate daily sales report for a specific date
|
||||
statement: |
|
||||
SELECT
|
||||
DATE(order_date) as sales_date,
|
||||
COUNT(*) as total_orders,
|
||||
SUM(total_amount) as total_revenue,
|
||||
AVG(total_amount) as avg_order_value
|
||||
FROM orders
|
||||
WHERE DATE(order_date) = $1
|
||||
GROUP BY DATE(order_date)
|
||||
parameters:
|
||||
- name: report_date
|
||||
type: string
|
||||
description: The date to generate report for (YYYY-MM-DD format)
|
||||
|
||||
toolsets:
|
||||
snowflake-analytics:
|
||||
- execute_sql
|
||||
- get_customer_orders
|
||||
- daily_sales_report
|
||||
28
docs/en/samples/snowflake/snowflake-env.sh
Normal file
28
docs/en/samples/snowflake/snowflake-env.sh
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Snowflake Connection Configuration
|
||||
# Copy this file to snowflake-env.sh and update with your actual values
|
||||
# Then source it before running the toolbox: source snowflake-env.sh
|
||||
|
||||
# Required environment variables
|
||||
export SNOWFLAKE_ACCOUNT="your-account-identifier" # e.g., "xy12345.snowflakecomputing.com"
|
||||
export SNOWFLAKE_USER="your-username" # Your Snowflake username
|
||||
export SNOWFLAKE_PASSWORD="your-password" # Your Snowflake password
|
||||
export SNOWFLAKE_DATABASE="your-database" # Database name
|
||||
export SNOWFLAKE_SCHEMA="your-schema" # Schema name (usually "PUBLIC")
|
||||
|
||||
# Optional environment variables (will use defaults if not set)
|
||||
export SNOWFLAKE_WAREHOUSE="COMPUTE_WH" # Warehouse name (default: COMPUTE_WH)
|
||||
export SNOWFLAKE_ROLE="ACCOUNTADMIN" # Role name (default: ACCOUNTADMIN)
|
||||
|
||||
echo "Snowflake environment variables have been set!"
|
||||
echo "Account: $SNOWFLAKE_ACCOUNT"
|
||||
echo "User: $SNOWFLAKE_USER"
|
||||
echo "Database: $SNOWFLAKE_DATABASE"
|
||||
echo "Schema: $SNOWFLAKE_SCHEMA"
|
||||
echo "Warehouse: $SNOWFLAKE_WAREHOUSE"
|
||||
echo "Role: $SNOWFLAKE_ROLE"
|
||||
echo ""
|
||||
echo "You can now run the toolbox with:"
|
||||
echo " ./toolbox --prebuilt snowflake # Use prebuilt configuration"
|
||||
echo " ./toolbox --tools-file docs/en/samples/snowflake/snowflake-config.yaml # Use custom configuration"
|
||||
56
docs/en/samples/snowflake/test-snowflake.sh
Normal file
56
docs/en/samples/snowflake/test-snowflake.sh
Normal file
@@ -0,0 +1,56 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Test script to demonstrate Snowflake configuration with environment variables
|
||||
# This script shows how to set up and test the Snowflake toolbox configuration
|
||||
|
||||
echo "=== Testing Snowflake Configuration ==="
|
||||
echo ""
|
||||
|
||||
# Set up test environment variables (replace with your actual values)
|
||||
echo "Setting up test environment variables..."
|
||||
export SNOWFLAKE_ACCOUNT="test-account"
|
||||
export SNOWFLAKE_USER="test-user"
|
||||
export SNOWFLAKE_PASSWORD="test-password"
|
||||
export SNOWFLAKE_DATABASE="test-database"
|
||||
export SNOWFLAKE_SCHEMA="test-schema"
|
||||
export SNOWFLAKE_WAREHOUSE="COMPUTE_WH"
|
||||
export SNOWFLAKE_ROLE="ACCOUNTADMIN"
|
||||
|
||||
echo "Environment variables set:"
|
||||
echo " SNOWFLAKE_ACCOUNT: $SNOWFLAKE_ACCOUNT"
|
||||
echo " SNOWFLAKE_USER: $SNOWFLAKE_USER"
|
||||
echo " SNOWFLAKE_DATABASE: $SNOWFLAKE_DATABASE"
|
||||
echo " SNOWFLAKE_SCHEMA: $SNOWFLAKE_SCHEMA"
|
||||
echo " SNOWFLAKE_WAREHOUSE: $SNOWFLAKE_WAREHOUSE"
|
||||
echo " SNOWFLAKE_ROLE: $SNOWFLAKE_ROLE"
|
||||
echo ""
|
||||
|
||||
echo "=== Testing Prebuilt Configuration ==="
|
||||
echo "This will attempt to initialize with the prebuilt Snowflake configuration:"
|
||||
echo "Command: ./toolbox --prebuilt snowflake --stdio"
|
||||
echo ""
|
||||
echo "Expected result: Connection failure due to test credentials (this is normal)"
|
||||
echo ""
|
||||
|
||||
# Test the prebuilt configuration (this will fail with test credentials, which is expected)
|
||||
timeout 5s ./toolbox --prebuilt snowflake --stdio 2>&1 | head -5
|
||||
|
||||
echo ""
|
||||
echo "=== Testing Custom Configuration ==="
|
||||
echo "This will attempt to initialize with the custom Snowflake configuration:"
|
||||
echo "Command: ./toolbox --tools-file docs/en/samples/snowflake/snowflake-config.yaml --stdio"
|
||||
echo ""
|
||||
echo "Expected result: Connection failure due to test credentials (this is normal)"
|
||||
echo ""
|
||||
|
||||
# Test the custom configuration (this will fail with test credentials, which is expected)
|
||||
timeout 5s ./toolbox --tools-file docs/en/samples/snowflake/snowflake-config.yaml --stdio 2>&1 | head -5
|
||||
|
||||
echo ""
|
||||
echo "=== Instructions for Real Usage ==="
|
||||
echo "1. Copy docs/en/samples/snowflake/snowflake-env.sh to your own file"
|
||||
echo "2. Edit it with your actual Snowflake credentials"
|
||||
echo "3. Source the file: source your-snowflake-env.sh"
|
||||
echo "4. Run: ./toolbox --prebuilt snowflake"
|
||||
echo ""
|
||||
echo "For more information, see docs/en/samples/snowflake"
|
||||
@@ -1,25 +0,0 @@
|
||||
---
|
||||
title: "JS SDK"
|
||||
type: docs
|
||||
weight: 7
|
||||
description: >
|
||||
JS SDKs to connect to the MCP Toolbox server.
|
||||
---
|
||||
|
||||
|
||||
## Overview
|
||||
|
||||
The MCP Toolbox service provides a centralized way to manage and expose tools
|
||||
(like API connectors, database query tools, etc.) for use by GenAI applications.
|
||||
|
||||
These JS SDKs act as clients for that service. They handle the communication needed to:
|
||||
|
||||
* Fetch tool definitions from your running Toolbox instance.
|
||||
* Provide convenient JS objects or functions representing those tools.
|
||||
* Invoke the tools (calling the underlying APIs/services configured in Toolbox).
|
||||
* Handle authentication and parameter binding as needed.
|
||||
|
||||
By using these SDKs, you can easily leverage your Toolbox-managed tools directly
|
||||
within your JS applications or AI orchestration frameworks.
|
||||
|
||||
[Github](https://github.com/googleapis/mcp-toolbox-sdk-js)
|
||||
15
docs/en/sdks/go-sdk.md
Normal file
15
docs/en/sdks/go-sdk.md
Normal file
@@ -0,0 +1,15 @@
|
||||
---
|
||||
title: "Go SDK"
|
||||
weight: 2
|
||||
description: Go lang client SDK
|
||||
icon: fa-brands fa-golang
|
||||
manualLink: "https://github.com/googleapis/mcp-toolbox-sdk-go"
|
||||
manualLinkTarget: _blank
|
||||
---
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://github.com/googleapis/mcp-toolbox-sdk-go"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://github.com/googleapis/mcp-toolbox-sdk-go"/>
|
||||
</head>
|
||||
</html>
|
||||
@@ -1,25 +0,0 @@
|
||||
---
|
||||
title: "Go SDK"
|
||||
type: docs
|
||||
weight: 7
|
||||
description: >
|
||||
Go SDKs to connect to the MCP Toolbox server.
|
||||
---
|
||||
|
||||
|
||||
## Overview
|
||||
|
||||
The MCP Toolbox service provides a centralized way to manage and expose tools
|
||||
(like API connectors, database query tools, etc.) for use by GenAI applications.
|
||||
|
||||
The Go SDK act as clients for that service. They handle the communication needed to:
|
||||
|
||||
* Fetch tool definitions from your running Toolbox instance.
|
||||
* Provide convenient Go structs representing those tools.
|
||||
* Invoke the tools (calling the underlying APIs/services configured in Toolbox).
|
||||
* Handle authentication and parameter binding as needed.
|
||||
|
||||
By using the SDK, you can easily leverage your Toolbox-managed tools directly
|
||||
within your Go applications or AI orchestration frameworks.
|
||||
|
||||
[Github](https://github.com/googleapis/mcp-toolbox-sdk-go)
|
||||
15
docs/en/sdks/js-sdk.md
Normal file
15
docs/en/sdks/js-sdk.md
Normal file
@@ -0,0 +1,15 @@
|
||||
---
|
||||
title: "JS SDK"
|
||||
weight: 2
|
||||
description: Javascript client SDK
|
||||
icon: fa-brands fa-node-js
|
||||
manualLink: "https://github.com/googleapis/mcp-toolbox-sdk-js"
|
||||
manualLinkTarget: _blank
|
||||
---
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://github.com/googleapis/mcp-toolbox-sdk-js"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://github.com/googleapis/mcp-toolbox-sdk-js"/>
|
||||
</head>
|
||||
</html>
|
||||
15
docs/en/sdks/python-sdk.md
Normal file
15
docs/en/sdks/python-sdk.md
Normal file
@@ -0,0 +1,15 @@
|
||||
---
|
||||
title: "Python SDK"
|
||||
weight: 2
|
||||
description: Python client SDK
|
||||
icon: fa-brands fa-python
|
||||
manualLink: "https://github.com/googleapis/mcp-toolbox-sdk-python"
|
||||
manualLinkTarget: _blank
|
||||
---
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://github.com/googleapis/mcp-toolbox-sdk-python"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://github.com/googleapis/mcp-toolbox-sdk-python"/>
|
||||
</head>
|
||||
</html>
|
||||
@@ -1,57 +0,0 @@
|
||||
---
|
||||
title: "Python SDK"
|
||||
type: docs
|
||||
weight: 7
|
||||
description: >
|
||||
Python SDKs to connect to the MCP Toolbox server.
|
||||
---
|
||||
|
||||
|
||||
## Overview
|
||||
|
||||
The MCP Toolbox service provides a centralized way to manage and expose tools
|
||||
(like API connectors, database query tools, etc.) for use by GenAI applications.
|
||||
|
||||
These Python SDKs act as clients for that service. They handle the communication needed to:
|
||||
|
||||
* Fetch tool definitions from your running Toolbox instance.
|
||||
* Provide convenient Python objects or functions representing those tools.
|
||||
* Invoke the tools (calling the underlying APIs/services configured in Toolbox).
|
||||
* Handle authentication and parameter binding as needed.
|
||||
|
||||
By using these SDKs, you can easily leverage your Toolbox-managed tools directly
|
||||
within your Python applications or AI orchestration frameworks.
|
||||
|
||||
## Which Package Should I Use?
|
||||
|
||||
Choosing the right package depends on how you are building your application:
|
||||
|
||||
* [`toolbox-langchain`](langchain):
|
||||
Use this package if you are building your application using the LangChain or
|
||||
LangGraph frameworks. It provides tools that are directly compatible with the
|
||||
LangChain ecosystem (`BaseTool` interface), simplifying integration.
|
||||
* [`toolbox-llamaindex`](llamaindex):
|
||||
Use this package if you are building your application using the LlamaIndex framework.
|
||||
It provides tools that are directly compatible with the
|
||||
LlamaIndex ecosystem (`BaseTool` interface), simplifying integration.
|
||||
* [`toolbox-core`](core):
|
||||
Use this package if you are not using LangChain/LangGraph or any other
|
||||
orchestration framework, or if you need a framework-agnostic way to interact
|
||||
with Toolbox tools (e.g., for custom orchestration logic or direct use in
|
||||
Python scripts).
|
||||
|
||||
## Available Packages
|
||||
|
||||
This repository hosts the following Python packages. See the package-specific
|
||||
README for detailed installation and usage instructions:
|
||||
|
||||
| Package | Target Use Case | Integration | Path | Details (README) | PyPI Status |
|
||||
| :------ | :---------- | :---------- | :---------------------- | :---------- | :---------
|
||||
| `toolbox-core` | Framework-agnostic / Custom applications | Use directly / Custom | `packages/toolbox-core/` | 📄 [View README](https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/packages/toolbox-core/README.md) |  |
|
||||
| `toolbox-langchain` | LangChain / LangGraph applications | LangChain / LangGraph | `packages/toolbox-langchain/` | 📄 [View README](https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/packages/toolbox-langchain/README.md) |  |
|
||||
| `toolbox-llamaindex` | LlamaIndex applications | LlamaIndex | `packages/toolbox-llamaindex/` | 📄 [View README](https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/packages/toolbox-llamaindex/README.md) |  |
|
||||
|
||||
|
||||
{{< notice note >}}
|
||||
Source code for [python-sdk](https://github.com/googleapis/mcp-toolbox-sdk-python)
|
||||
{{< /notice >}}
|
||||
@@ -1,499 +0,0 @@
|
||||
---
|
||||
title: "core"
|
||||
type: docs
|
||||
weight: 8
|
||||
description: >
|
||||
Toolbox-core SDK for connecting to the MCP Toolbox server and invoking tools programmatically.
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
The `toolbox-core` package provides a Python interface to the MCP Toolbox service, enabling you to load and invoke tools from your own applications.
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
pip install toolbox-core
|
||||
```
|
||||
|
||||
{{< notice note >}}
|
||||
* The primary `ToolboxClient` is asynchronous and requires using `await` for loading and invoking tools, as shown in most examples.
|
||||
* Asynchronous code needs to run within an event loop (e.g., using `asyncio.run()` or in an async framework). See the [Python `asyncio` documentation](https://docs.python.org/3/library/asyncio-task.html) for more details.
|
||||
* If you prefer synchronous execution, refer to the [Synchronous Usage](#synchronous-usage) section below.
|
||||
{{< /notice >}}
|
||||
|
||||
{{< notice note>}}
|
||||
The `ToolboxClient` (and its synchronous counterpart `ToolboxSyncClient`) interacts with network resources using an underlying HTTP client session. You should remember to use a context manager or explicitly call `close()` to clean up these resources. If you provide your own session, you'll need to close it in addition to calling `ToolboxClient.close()`.
|
||||
{{< /notice >}}
|
||||
|
||||
## Quickstart
|
||||
|
||||
1. **Start the Toolbox Service**
|
||||
- Make sure the MCP Toolbox service is running. See the [Toolbox Getting Started Guide](/getting-started/introduction/#getting-started).
|
||||
|
||||
2. **Minimal Example**
|
||||
|
||||
```python
|
||||
import asyncio
|
||||
from toolbox_core import ToolboxClient
|
||||
|
||||
async def main():
|
||||
# Replace with the actual URL where your Toolbox service is running
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as toolbox:
|
||||
weather_tool = await toolbox.load_tool("get_weather")
|
||||
result = await weather_tool(location="London")
|
||||
print(result)
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
```
|
||||
|
||||
|
||||
{{< notice tip>}}
|
||||
For a complete, end-to-end example including setting up the service and using an SDK, see the full tutorial: [**Toolbox Quickstart Tutorial**](https://googleapis.github.io/genai-toolbox/getting-started/local_quickstart)
|
||||
{{< /notice >}}
|
||||
|
||||
{{< notice note>}}
|
||||
If you initialize `ToolboxClient` without providing an external session and cannot use `async with`, you must explicitly close the client using `await toolbox.close()` in a `finally` block. This ensures the internally created session is closed.
|
||||
|
||||
```py
|
||||
toolbox = ToolboxClient("http://127.0.0.1:5000")
|
||||
try:
|
||||
# ... use toolbox ...
|
||||
finally:
|
||||
await toolbox.close()
|
||||
```
|
||||
{{< /notice >}}
|
||||
|
||||
## Usage
|
||||
|
||||
Import and initialize a Toolbox client, pointing it to the URL of your running
|
||||
Toolbox service.
|
||||
|
||||
```py
|
||||
from toolbox_core import ToolboxClient
|
||||
|
||||
# Replace with your Toolbox service's URL
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as toolbox:
|
||||
```
|
||||
|
||||
All interactions for loading and invoking tools happen through this client.
|
||||
|
||||
{{< notice note>}}
|
||||
For advanced use cases, you can provide an external `aiohttp.ClientSession` during initialization (e.g., `ToolboxClient(url, session=my_session)`). If you provide your own session, you are responsible for managing its lifecycle; `ToolboxClient` *will not* close it.
|
||||
{{< /notice >}}
|
||||
|
||||
{{< notice note>}}
|
||||
Closing the `ToolboxClient` also closes the underlying network session shared by all tools loaded from that client. As a result, any tool instances you have loaded will cease to function and will raise an error if you attempt to invoke them after the client is closed.
|
||||
{{< /notice >}}
|
||||
|
||||
## Loading Tools
|
||||
|
||||
You can load tools individually or in groups (toolsets) as defined in your
|
||||
Toolbox service configuration. Loading a toolset is convenient when working with
|
||||
multiple related functions, while loading a single tool offers more granular
|
||||
control.
|
||||
|
||||
### Load a toolset
|
||||
|
||||
A toolset is a collection of related tools. You can load all tools in a toolset
|
||||
or a specific one:
|
||||
|
||||
```py
|
||||
# Load all tools
|
||||
tools = await toolbox.load_toolset()
|
||||
|
||||
# Load a specific toolset
|
||||
tools = await toolbox.load_toolset("my-toolset")
|
||||
```
|
||||
|
||||
### Load a single tool
|
||||
|
||||
Loads a specific tool by its unique name. This provides fine-grained control.
|
||||
|
||||
```py
|
||||
tool = await toolbox.load_tool("my-tool")
|
||||
```
|
||||
|
||||
## Invoking Tools
|
||||
|
||||
Once loaded, tools behave like awaitable Python functions. You invoke them using
|
||||
`await` and pass arguments corresponding to the parameters defined in the tool's
|
||||
configuration within the Toolbox service.
|
||||
|
||||
```py
|
||||
tool = await toolbox.load_tool("my-tool")
|
||||
result = await tool("foo", bar="baz")
|
||||
```
|
||||
|
||||
{{< notice tip>}}
|
||||
For a more comprehensive guide on setting up the Toolbox service itself, which you'll need running to use this SDK, please refer to the [Toolbox Quickstart Guide](getting-started/local_quickstart).
|
||||
{{< /notice >}}
|
||||
|
||||
## Synchronous Usage
|
||||
|
||||
By default, the `ToolboxClient` and the `ToolboxTool` objects it produces behave like asynchronous Python functions, requiring the use of `await`.
|
||||
|
||||
If your application primarily uses synchronous code, or you prefer not to manage an asyncio event loop, you can use the synchronous alternatives provided:
|
||||
|
||||
* `ToolboxSyncClient`: The synchronous counterpart to `ToolboxClient`.
|
||||
* `ToolboxSyncTool`: The synchronous counterpart to `ToolboxTool`.
|
||||
|
||||
The `ToolboxSyncClient` handles communication with the Toolbox service synchronously and produces `ToolboxSyncTool` instances when you load tools. You do not use the `await` keyword when interacting with these synchronous versions.
|
||||
|
||||
```py
|
||||
from toolbox_core import ToolboxSyncClient
|
||||
|
||||
with ToolboxSyncClient("http://127.0.0.1:5000") as toolbox:
|
||||
weather_tool = toolbox.load_tool("get_weather")
|
||||
result = weather_tool(location="Paris")
|
||||
print(result)
|
||||
```
|
||||
|
||||
{{< notice tip>}}
|
||||
While synchronous invocation is available for convenience, it's generally considered best practice to use asynchronous operations (like those provided by the default `ToolboxClient` and `ToolboxTool`) for an I/O-bound task like tool invocation. Asynchronous programming allows for cooperative multitasking, often leading to better performance and resource utilization, especially in applications handling concurrent requests.
|
||||
{{< /notice >}}
|
||||
|
||||
## Use with LangGraph
|
||||
|
||||
The Toolbox Core SDK integrates smoothly with frameworks like LangGraph,
|
||||
allowing you to incorporate tools managed by the Toolbox service into your
|
||||
agentic workflows.
|
||||
|
||||
{{< notice tip>}}
|
||||
The loaded tools (both async `ToolboxTool` and sync `ToolboxSyncTool`) are callable and can often be used directly. However, to ensure parameter descriptions from Google-style docstrings are accurately parsed and made available to the LLM (via `bind_tools()`) and LangGraph internals, it's recommended to wrap the loaded tools using LangChain's [`StructuredTool`](https://python.langchain.com/api_reference/core/tools/langchain_core.tools.structured.StructuredTool.html).
|
||||
{{< /notice >}}
|
||||
|
||||
|
||||
Here's a conceptual example adapting the [official LangGraph tool calling
|
||||
guide](https://langchain-ai.github.io/langgraph/how-tos/tool-calling):
|
||||
|
||||
```py
|
||||
import asyncio
|
||||
from typing import Annotated
|
||||
from typing_extensions import TypedDict
|
||||
from langchain_core.messages import HumanMessage, BaseMessage
|
||||
from toolbox_core import ToolboxClient
|
||||
from langchain_google_vertexai import ChatVertexAI
|
||||
from langgraph.graph import StateGraph, START, END
|
||||
from langgraph.prebuilt import ToolNode
|
||||
from langchain.tools import StructuredTool
|
||||
from langgraph.graph.message import add_messages
|
||||
|
||||
class State(TypedDict):
|
||||
messages: Annotated[list[BaseMessage], add_messages]
|
||||
|
||||
async def main():
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as toolbox:
|
||||
tools = await toolbox.load_toolset()
|
||||
wrapped_tools = [StructuredTool.from_function(tool, parse_docstring=True) for tool in tools]
|
||||
model_with_tools = ChatVertexAI(model="gemini-2.5-flash").bind_tools(wrapped_tools)
|
||||
tool_node = ToolNode(wrapped_tools)
|
||||
|
||||
def call_agent(state: State):
|
||||
response = model_with_tools.invoke(state["messages"])
|
||||
return {"messages": [response]}
|
||||
|
||||
def should_continue(state: State):
|
||||
last_message = state["messages"][-1]
|
||||
if last_message.tool_calls:
|
||||
return "tools"
|
||||
return END
|
||||
|
||||
graph_builder = StateGraph(State)
|
||||
|
||||
graph_builder.add_node("agent", call_agent)
|
||||
graph_builder.add_node("tools", tool_node)
|
||||
|
||||
graph_builder.add_edge(START, "agent")
|
||||
graph_builder.add_conditional_edges(
|
||||
"agent",
|
||||
should_continue,
|
||||
)
|
||||
graph_builder.add_edge("tools", "agent")
|
||||
|
||||
app = graph_builder.compile()
|
||||
|
||||
prompt = "What is the weather in London?"
|
||||
inputs = {"messages": [HumanMessage(content=prompt)]}
|
||||
|
||||
print(f"User: {prompt}\n")
|
||||
print("--- Streaming Agent Steps ---")
|
||||
|
||||
events = app.stream(
|
||||
inputs,
|
||||
stream_mode="values",
|
||||
)
|
||||
|
||||
for event in events:
|
||||
event["messages"][-1].pretty_print()
|
||||
print("\n---\n")
|
||||
|
||||
asyncio.run(main())
|
||||
```
|
||||
|
||||
## Client to Server Authentication
|
||||
|
||||
This section describes how to authenticate the ToolboxClient itself when
|
||||
connecting to a Toolbox server instance that requires authentication. This is
|
||||
crucial for securing your Toolbox server endpoint, especially when deployed on
|
||||
platforms like Cloud Run, GKE, or any environment where unauthenticated access is restricted.
|
||||
|
||||
This client-to-server authentication ensures that the Toolbox server can verify
|
||||
the identity of the client making the request before any tool is loaded or
|
||||
called. It is different from [Authenticating Tools](#authenticating-tools),
|
||||
which deals with providing credentials for specific tools within an already
|
||||
connected Toolbox session.
|
||||
|
||||
### When is Client-to-Server Authentication Needed?
|
||||
|
||||
You'll need this type of authentication if your Toolbox server is configured to
|
||||
deny unauthenticated requests. For example:
|
||||
|
||||
- Your Toolbox server is deployed on Cloud Run and configured to "Require authentication."
|
||||
- Your server is behind an Identity-Aware Proxy (IAP) or a similar
|
||||
authentication layer.
|
||||
- You have custom authentication middleware on your self-hosted Toolbox server.
|
||||
|
||||
Without proper client authentication in these scenarios, attempts to connect or
|
||||
make calls (like `load_tool`) will likely fail with `Unauthorized` errors.
|
||||
|
||||
### How it works
|
||||
|
||||
The `ToolboxClient` (and `ToolboxSyncClient`) allows you to specify functions
|
||||
(or coroutines for the async client) that dynamically generate HTTP headers for
|
||||
every request sent to the Toolbox server. The most common use case is to add an
|
||||
Authorization header with a bearer token (e.g., a Google ID token).
|
||||
|
||||
These header-generating functions are called just before each request, ensuring
|
||||
that fresh credentials or header values can be used.
|
||||
|
||||
### Configuration
|
||||
|
||||
You can configure these dynamic headers as seen below:
|
||||
|
||||
```python
|
||||
from toolbox_core import ToolboxClient
|
||||
|
||||
async with ToolboxClient("toolbox-url", client_headers={"header1": header1_getter, "header2": header2_getter, ...}) as client:
|
||||
# Use client
|
||||
pass
|
||||
```
|
||||
|
||||
### Authenticating with Google Cloud Servers
|
||||
|
||||
For Toolbox servers hosted on Google Cloud (e.g., Cloud Run) and requiring
|
||||
`Google ID token` authentication, the helper module
|
||||
[auth_methods](https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/packages/toolbox-core/src/toolbox_core/auth_methods.py) provides utility functions.
|
||||
|
||||
### Step by Step Guide for Cloud Run
|
||||
|
||||
1. **Configure Permissions**: [Grant](https://cloud.google.com/run/docs/securing/managing-access#service-add-principals) the `roles/run.invoker` IAM role on the Cloud
|
||||
Run service to the principal. This could be your `user account email` or a
|
||||
`service account`.
|
||||
2. **Configure Credentials**
|
||||
- Local Development: Set up
|
||||
[ADC](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment).
|
||||
- Google Cloud Environments: When running within Google Cloud (e.g., Compute
|
||||
Engine, GKE, another Cloud Run service, Cloud Functions), ADC is typically
|
||||
configured automatically, using the environment's default service account.
|
||||
3. **Connect to the Toolbox Server**
|
||||
|
||||
```python
|
||||
from toolbox_core import auth_methods
|
||||
|
||||
auth_token_provider = auth_methods.aget_google_id_token(URL) # can also use sync method
|
||||
async with ToolboxClient(
|
||||
URL,
|
||||
client_headers={"Authorization": auth_token_provider},
|
||||
) as client:
|
||||
tools = await client.load_toolset()
|
||||
|
||||
# Now, you can use the client as usual.
|
||||
```
|
||||
|
||||
## Authenticating Tools
|
||||
|
||||
{{< notice info >}}
|
||||
**Always use HTTPS** to connect your application with the Toolbox service, especially in **production environments** or whenever the communication involves **sensitive data** (including scenarios where tools require authentication tokens). Using plain HTTP lacks encryption and exposes your application and data to significant security risks, such as eavesdropping and tampering.
|
||||
{{</notice>}}
|
||||
|
||||
Tools can be configured within the Toolbox service to require authentication,
|
||||
ensuring only authorized users or applications can invoke them, especially when
|
||||
accessing sensitive data.
|
||||
|
||||
### When is Authentication Needed?
|
||||
|
||||
Authentication is configured per-tool within the Toolbox service itself. If a
|
||||
tool you intend to use is marked as requiring authentication in the service, you
|
||||
must configure the SDK client to provide the necessary credentials (currently
|
||||
Oauth2 tokens) when invoking that specific tool.
|
||||
|
||||
### Supported Authentication Mechanisms
|
||||
|
||||
The Toolbox service enables secure tool usage through **Authenticated Parameters**. For detailed information on how these mechanisms work within the Toolbox service and how to configure them, please refer to [Authenticated Parameters](resources/tools/#authenticated-parameters)
|
||||
|
||||
### Step 1: Configure Tools in Toolbox Service
|
||||
|
||||
First, ensure the target tool(s) are configured correctly in the Toolbox service
|
||||
to require authentication. Refer to the [Authenticated Parameters](https://googleapis.github.io/genai-toolbox/resources/tools/#authenticated-parameters)
|
||||
for instructions.
|
||||
|
||||
### Step 2: Configure SDK Client
|
||||
|
||||
Your application needs a way to obtain the required Oauth2 token for the
|
||||
authenticated user. The SDK requires you to provide a function capable of
|
||||
retrieving this token *when the tool is invoked*.
|
||||
|
||||
#### Provide an ID Token Retriever Function
|
||||
|
||||
You must provide the SDK with a function (sync or async) that returns the
|
||||
necessary token when called. The implementation depends on your application's
|
||||
authentication flow (e.g., retrieving a stored token, initiating an OAuth flow).
|
||||
|
||||
{{< notice info>}}
|
||||
The name used when registering the getter function with the SDK (e.g.,`"my_api_token"`) must exactly match the `name` of the corresponding `authServices` defined in the tool's configuration within the Toolbox service.
|
||||
{{</notice>}}
|
||||
|
||||
```py
|
||||
async def get_auth_token():
|
||||
# ... Logic to retrieve ID token (e.g., from local storage, OAuth flow)
|
||||
# This example just returns a placeholder. Replace with your actual token retrieval.
|
||||
return "YOUR_ID_TOKEN" # Placeholder
|
||||
```
|
||||
|
||||
{{< notice tip>}}
|
||||
Your token retriever function is invoked every time an authenticated parameter requires a token for a tool call. Consider implementing caching logic within this function to avoid redundant token fetching or generation, especially for tokens with longer validity periods or if the retrieval process is resource-intensive.
|
||||
{{</notice>}}
|
||||
|
||||
#### Option A: Add Authentication to a Loaded Tool
|
||||
|
||||
You can add the token retriever function to a tool object *after* it has been
|
||||
loaded. This modifies the specific tool instance.
|
||||
|
||||
```py
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as toolbox:
|
||||
tool = await toolbox.load_tool("my-tool")
|
||||
|
||||
auth_tool = tool.add_auth_token_getter("my_auth", get_auth_token) # Single token
|
||||
|
||||
# OR
|
||||
|
||||
multi_auth_tool = tool.add_auth_token_getters({
|
||||
"my_auth_1": get_auth_token_1,
|
||||
"my_auth_2": get_auth_token_2,
|
||||
}) # Multiple tokens
|
||||
```
|
||||
|
||||
#### Option B: Add Authentication While Loading Tools
|
||||
|
||||
You can provide the token retriever(s) directly during the `load_tool` or
|
||||
`load_toolset` calls. This applies the authentication configuration only to the
|
||||
tools loaded in that specific call, without modifying the original tool objects
|
||||
if they were loaded previously.
|
||||
|
||||
```py
|
||||
auth_tool = await toolbox.load_tool(auth_token_getters={"my_auth": get_auth_token})
|
||||
|
||||
# OR
|
||||
|
||||
auth_tools = await toolbox.load_toolset(auth_token_getters={"my_auth": get_auth_token})
|
||||
```
|
||||
|
||||
{{< notice >}}
|
||||
Adding auth tokens during loading only affect the tools loaded within that call.
|
||||
{{</notice>}}
|
||||
|
||||
### Complete Authentication Example
|
||||
|
||||
```py
|
||||
import asyncio
|
||||
from toolbox_core import ToolboxClient
|
||||
|
||||
async def get_auth_token():
|
||||
# ... Logic to retrieve ID token (e.g., from local storage, OAuth flow)
|
||||
# This example just returns a placeholder. Replace with your actual token retrieval.
|
||||
return "YOUR_ID_TOKEN" # Placeholder
|
||||
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as toolbox:
|
||||
tool = await toolbox.load_tool("my-tool")
|
||||
|
||||
auth_tool = tool.add_auth_token_getters({"my_auth": get_auth_token})
|
||||
result = auth_tool(input="some input")
|
||||
print(result)
|
||||
```
|
||||
|
||||
{{< notice >}}
|
||||
An auth token getter for a specific name (e.g., “GOOGLE_ID”) will replace any client header with the same name followed by “_token” (e.g., “GOOGLE_ID_token”).
|
||||
{{</notice>}}
|
||||
|
||||
|
||||
## Binding Parameter Values
|
||||
|
||||
The SDK allows you to pre-set, or "bind", values for specific tool parameters
|
||||
before the tool is invoked or even passed to an LLM. These bound values are
|
||||
fixed and will not be requested or modified by the LLM during tool use.
|
||||
|
||||
### Why Bind Parameters?
|
||||
|
||||
- **Protecting sensitive information:** API keys, secrets, etc.
|
||||
- **Enforcing consistency:** Ensuring specific values for certain parameters.
|
||||
- **Pre-filling known data:** Providing defaults or context.
|
||||
|
||||
{{< notice info >}}
|
||||
The parameter names used for binding (e.g., "api_key") must exactly match the parameter names defined in the tool’s configuration within the Toolbox service.
|
||||
{{</notice>}}
|
||||
|
||||
{{< notice >}}
|
||||
You do not need to modify the tool’s configuration in the Toolbox service to bind parameter values using the SDK.
|
||||
{{</notice>}}
|
||||
|
||||
### Option A: Binding Parameters to a Loaded Tool
|
||||
|
||||
Bind values to a tool object *after* it has been loaded. This modifies the
|
||||
specific tool instance.
|
||||
|
||||
```py
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as toolbox:
|
||||
tool = await toolbox.load_tool("my-tool")
|
||||
|
||||
bound_tool = tool.bind_param("param", "value")
|
||||
|
||||
# OR
|
||||
|
||||
bound_tool = tool.bind_params({"param": "value"})
|
||||
```
|
||||
|
||||
### Option B: Binding Parameters While Loading Tools
|
||||
|
||||
Specify bound parameters directly when loading tools. This applies the binding
|
||||
only to the tools loaded in that specific call.
|
||||
|
||||
```py
|
||||
bound_tool = await toolbox.load_tool("my-tool", bound_params={"param": "value"})
|
||||
|
||||
# OR
|
||||
|
||||
bound_tools = await toolbox.load_toolset(bound_params={"param": "value"})
|
||||
```
|
||||
{{< notice >}}
|
||||
Bound values during loading only affect the tools loaded in that call.
|
||||
{{</notice>}}
|
||||
|
||||
### Binding Dynamic Values
|
||||
|
||||
Instead of a static value, you can bind a parameter to a synchronous or
|
||||
asynchronous function. This function will be called *each time* the tool is
|
||||
invoked to dynamically determine the parameter's value at runtime.
|
||||
|
||||
{{< notice >}}
|
||||
You don’t need to modify tool configurations to bind parameter values.
|
||||
{{</notice>}}
|
||||
|
||||
```py
|
||||
async def get_dynamic_value():
|
||||
# Logic to determine the value
|
||||
return "dynamic_value"
|
||||
|
||||
dynamic_bound_tool = tool.bind_param("param", get_dynamic_value)
|
||||
```
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "mcp-toolbox-for-databases",
|
||||
"version": "0.24.0",
|
||||
"version": "0.25.0",
|
||||
"description": "MCP Toolbox for Databases is an open-source MCP server for more than 30 different datasources.",
|
||||
"contextFileName": "MCP-TOOLBOX-EXTENSION.md"
|
||||
}
|
||||
41
go.mod
41
go.mod
@@ -37,12 +37,14 @@ require (
|
||||
github.com/google/go-cmp v0.7.0
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/jackc/pgx/v5 v5.7.6
|
||||
github.com/jmoiron/sqlx v1.4.0
|
||||
github.com/looker-open-source/sdk-codegen/go v0.25.21
|
||||
github.com/microsoft/go-mssqldb v1.9.3
|
||||
github.com/nakagami/firebirdsql v0.9.15
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4
|
||||
github.com/redis/go-redis/v9 v9.17.2
|
||||
github.com/sijms/go-ora/v2 v2.9.0
|
||||
github.com/snowflakedb/gosnowflake v1.18.1
|
||||
github.com/spf13/cobra v1.10.1
|
||||
github.com/thlib/go-timezone-local v0.0.7
|
||||
github.com/trinodb/trino-go-client v0.330.0
|
||||
@@ -88,13 +90,40 @@ require (
|
||||
cloud.google.com/go/monitoring v1.24.3 // indirect
|
||||
cloud.google.com/go/trace v1.11.7 // indirect
|
||||
filippo.io/edwards25519 v1.1.0 // indirect
|
||||
github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect
|
||||
github.com/99designs/keyring v1.2.2 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 // indirect
|
||||
github.com/BurntSushi/toml v1.4.0 // indirect
|
||||
github.com/GoogleCloudPlatform/grpc-gcp-go/grpcgcp v1.5.3 // indirect
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0 // indirect
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.54.0 // indirect
|
||||
github.com/PuerkitoBio/goquery v1.10.3 // indirect
|
||||
github.com/VictoriaMetrics/easyproto v0.1.4 // indirect
|
||||
github.com/ajg/form v1.5.1 // indirect
|
||||
github.com/apache/arrow-go/v18 v18.4.0 // indirect
|
||||
github.com/apache/arrow/go/v15 v15.0.2 // indirect
|
||||
github.com/apache/thrift v0.22.0 // indirect
|
||||
github.com/aws/aws-sdk-go-v2 v1.39.0 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/config v1.31.8 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.18.12 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.15 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.8.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.88.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.29.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.34.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.38.4 // indirect
|
||||
github.com/aws/smithy-go v1.23.0 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 // indirect
|
||||
github.com/couchbase/gocbcore/v10 v10.8.1 // indirect
|
||||
@@ -102,8 +131,10 @@ require (
|
||||
github.com/couchbase/goprotostellar v1.0.2 // indirect
|
||||
github.com/couchbase/tools-common/errors v1.0.0 // indirect
|
||||
github.com/couchbaselabs/gocbconnstr/v2 v2.0.0 // indirect
|
||||
github.com/danieljoos/wincred v1.2.2 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/dvsekhvalnov/jose2go v1.7.0 // indirect
|
||||
github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect
|
||||
github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
@@ -115,7 +146,9 @@ require (
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect
|
||||
github.com/godror/knownpb v0.3.0 // indirect
|
||||
github.com/golang-jwt/jwt/v5 v5.3.0 // indirect
|
||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect
|
||||
github.com/golang-sql/sqlexp v0.1.0 // indirect
|
||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
|
||||
@@ -127,6 +160,7 @@ require (
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 // indirect
|
||||
github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect
|
||||
github.com/hashicorp/go-uuid v1.0.3 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
@@ -140,19 +174,25 @@ require (
|
||||
github.com/jcmturner/rpc/v2 v2.0.3 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0 // indirect
|
||||
github.com/klauspost/asmfmt v1.3.2 // indirect
|
||||
github.com/klauspost/compress v1.18.0 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.11 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 // indirect
|
||||
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/montanaflynn/stats v0.7.1 // indirect
|
||||
github.com/mtibben/percent v0.2.1 // indirect
|
||||
github.com/nakagami/chacha20 v0.1.0 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/pierrec/lz4 v2.6.1+incompatible // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
|
||||
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/sirupsen/logrus v1.9.3 // indirect
|
||||
github.com/spf13/pflag v1.0.9 // indirect
|
||||
github.com/spiffe/go-spiffe/v2 v2.5.0 // indirect
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
||||
@@ -182,6 +222,7 @@ require (
|
||||
golang.org/x/sync v0.18.0 // indirect
|
||||
golang.org/x/sys v0.38.0 // indirect
|
||||
golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8 // indirect
|
||||
golang.org/x/term v0.37.0 // indirect
|
||||
golang.org/x/text v0.31.0 // indirect
|
||||
golang.org/x/time v0.14.0 // indirect
|
||||
golang.org/x/tools v0.38.0 // indirect
|
||||
|
||||
40
go.sum
40
go.sum
@@ -643,6 +643,10 @@ filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
|
||||
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
|
||||
gioui.org v0.0.0-20210308172011-57750fc8a0a6/go.mod h1:RSH6KIUZ0p2xy5zHDxgAM4zumjgTw83q2ge/PI+yyw8=
|
||||
git.sr.ht/~sbinet/gg v0.3.1/go.mod h1:KGYtlADtqsqANL9ueOFkWymvzUvLMQllU5Ixo+8v3pc=
|
||||
github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMbk2FiG/kXiLl8BRyzTWDw7gX/Hz7Dd5eDMs=
|
||||
github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4=
|
||||
github.com/99designs/keyring v1.2.2 h1:pZd3neh/EmUzWONb35LxQfvuY7kiSXAq3HQd97+XBn0=
|
||||
github.com/99designs/keyring v1.2.2/go.mod h1:wes/FrByc8j7lFOAGLGSNEg8f/PaI3cgTBqhFkHUrPk=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1 h1:B+blDbyVIG3WaikNxPnhPiJ1MThR03b3vKGtER95TP4=
|
||||
@@ -653,11 +657,15 @@ github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.3.1 h1:Wgf5rZb
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.3.1/go.mod h1:xxCBG/f/4Vbmh2XQJBsOmNdxWUY5j/s27jujKPbQf14=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.1.1 h1:bFWuoEKg+gImo7pvkiQEFAc8ocibADgXeiLAxWhWmkI=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.1.1/go.mod h1:Vih/3yc6yac2JzU4hzpaDupBJP0Flaia9rXXrU8xyww=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 h1:u/LLAOFgsMv7HmNL4Qufg58y+qElGOt5qv0z1mURkRY=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0/go.mod h1:2e8rMJtl2+2j+HXbTBwnyGpm5Nou7KhvSfxOq8JpTag=
|
||||
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c h1:udKWzYgxTojEKWjV8V+WSxDXJ4NFATAsZjh8iIbsQIg=
|
||||
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0=
|
||||
github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
|
||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||
github.com/ClickHouse/ch-go v0.68.0 h1:zd2VD8l2aVYnXFRyhTyKCrxvhSz1AaY4wBUXu/f0GiU=
|
||||
github.com/ClickHouse/ch-go v0.68.0/go.mod h1:C89Fsm7oyck9hr6rRo5gqqiVtaIY6AjdD0WFMyNRQ5s=
|
||||
@@ -701,6 +709,8 @@ github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUS
|
||||
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
|
||||
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
|
||||
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||
github.com/apache/arrow-go/v18 v18.4.0 h1:/RvkGqH517iY8bZKc4FD5/kkdwXJGjxf28JIXbJ/oB0=
|
||||
github.com/apache/arrow-go/v18 v18.4.0/go.mod h1:Aawvwhj8x2jURIzD9Moy72cF0FyJXOpkYpdmGRHcw14=
|
||||
github.com/apache/arrow/go/v10 v10.0.1/go.mod h1:YvhnlEePVnBS4+0z3fhPfUy7W1Ikj0Ih0vcRo/gZ1M0=
|
||||
github.com/apache/arrow/go/v11 v11.0.0/go.mod h1:Eg5OsL5H+e299f7u5ssuXsuHQVEGC4xei5aX110hRiI=
|
||||
github.com/apache/arrow/go/v15 v15.0.2 h1:60IliRbiyTWCWjERBCkO1W4Qun9svcYoZrSLcyOsMLE=
|
||||
@@ -708,6 +718,8 @@ github.com/apache/arrow/go/v15 v15.0.2/go.mod h1:DGXsR3ajT524njufqf95822i+KTh+ye
|
||||
github.com/apache/cassandra-gocql-driver/v2 v2.0.0 h1:Omnzb1Z/P90Dr2TbVNu54ICQL7TKVIIsJO231w484HU=
|
||||
github.com/apache/cassandra-gocql-driver/v2 v2.0.0/go.mod h1:QH/asJjB3mHvY6Dot6ZKMMpTcOrWJ8i9GhsvG1g0PK4=
|
||||
github.com/apache/thrift v0.16.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU=
|
||||
github.com/apache/thrift v0.22.0 h1:r7mTJdj51TMDe6RtcmNdQxgn9XcyfGDOzegMDRg47uc=
|
||||
github.com/apache/thrift v0.22.0/go.mod h1:1e7J/O1Ae6ZQMTYdy9xa3w9k+XHWPfRvdPyJeynQ+/g=
|
||||
github.com/aws/aws-sdk-go v1.55.8 h1:JRmEUbU52aJQZ2AjX4q4Wu7t4uZjOu71uyNmaWlUkJQ=
|
||||
github.com/aws/aws-sdk-go v1.55.8/go.mod h1:ZkViS9AqA6otK+JBBNH2++sx1sgxrPKcSzPPvQkUtXk=
|
||||
github.com/aws/aws-sdk-go-v2 v1.39.0 h1:xm5WV/2L4emMRmMjHFykqiA4M/ra0DJVSWUkDyBjbg4=
|
||||
@@ -720,6 +732,8 @@ github.com/aws/aws-sdk-go-v2/credentials v1.18.12 h1:zmc9e1q90wMn8wQbjryy8IwA6Q4
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.18.12/go.mod h1:3VzdRDR5u3sSJRI4kYcOSIBbeYsgtVk7dG5R/U6qLWY=
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.7 h1:Is2tPmieqGS2edBnmOJIbdvOA6Op+rRpaYR60iBAwXM=
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.7/go.mod h1:F1i5V5421EGci570yABvpIXgRIBPb5JM+lSkHF6Dq5w=
|
||||
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.15 h1:7Zwtt/lP3KNRkeZre7soMELMGNoBrutx8nobg1jKWmo=
|
||||
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.15/go.mod h1:436h2adoHb57yd+8W+gYPrrA9U/R/SuAuOO42Ushzhw=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.7 h1:UCxq0X9O3xrlENdKf1r9eRJoKz/b0AfGkpp3a7FPlhg=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.7/go.mod h1:rHRoJUNUASj5Z/0eqI4w32vKvC7atoWR0jC+IkmVH8k=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.7 h1:Y6DTZUn7ZUC4th9FMBbo8LVE+1fyq3ofw+tRwkUd3PY=
|
||||
@@ -804,6 +818,8 @@ github.com/couchbaselabs/gocbconnstr/v2 v2.0.0 h1:HU9DlAYYWR69jQnLN6cpg0fh0hxW/8
|
||||
github.com/couchbaselabs/gocbconnstr/v2 v2.0.0/go.mod h1:o7T431UOfFVHDNvMBUmUxpHnhivwv7BziUao/nMl81E=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/danieljoos/wincred v1.2.2 h1:774zMFJrqaeYCK2W57BgAem/MLi6mtSE47MB6BOJ0i0=
|
||||
github.com/danieljoos/wincred v1.2.2/go.mod h1:w7w4Utbrz8lqeMbDAK0lkNJUv5sAOkFi7nd/ogr0Uh8=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
|
||||
@@ -822,6 +838,8 @@ github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3
|
||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/dvsekhvalnov/jose2go v1.7.0 h1:bnQc8+GMnidJZA8zc6lLEAb4xNrIqHwO+9TzqvtQZPo=
|
||||
github.com/dvsekhvalnov/jose2go v1.7.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.8.0 h1:7k1Ua+qluFr6p1jfJjGDl97ssJS/P7cHNInzfxgBQAo=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.8.0/go.mod h1:YLHer5cj0csTzNFXoNQ8qhtGY1GTvSqPnKWKaqQE3Hk=
|
||||
github.com/elastic/go-elasticsearch/v9 v9.2.0 h1:COeL/g20+ixnUbffe4Wfbu88emrHjAq/LhVfmrjqRQs=
|
||||
@@ -905,6 +923,7 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn
|
||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||
github.com/go-playground/validator/v10 v10.28.0 h1:Q7ibns33JjyW48gHkuFT91qX48KG0ktULL6FgHdG688=
|
||||
github.com/go-playground/validator/v10 v10.28.0/go.mod h1:GoI6I1SjPBh9p7ykNE/yj3fFYbyDOpwMn5KXd+m2hUU=
|
||||
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
|
||||
github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo=
|
||||
github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
@@ -915,6 +934,8 @@ github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
|
||||
github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
|
||||
github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 h1:ZpnhV/YsD2/4cESfV5+Hoeu/iUR3ruzNvZ+yQfO03a0=
|
||||
github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4=
|
||||
github.com/godror/godror v0.49.6 h1:ts4ZGw8uLJ42e1D7aXmVuSrld0/lzUzmIUjuUuQOgGM=
|
||||
github.com/godror/godror v0.49.6/go.mod h1:kTMcxZzRw73RT5kn9v3JkBK4kHI6dqowHotqV72ebU8=
|
||||
github.com/godror/knownpb v0.3.0 h1:+caUdy8hTtl7X05aPl3tdL540TvCcaQA6woZQroLZMw=
|
||||
@@ -1066,6 +1087,8 @@ github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4Zs
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3/go.mod h1:o//XUCC/F+yRGJoPO/VU0GSB0f8Nhgmxx0VIRUvaC0w=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 h1:8Tjv8EJ+pM1xP8mK6egEbD1OgnVTyacbefKhmbLhIhU=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2/go.mod h1:pkJQ2tZHJ0aFOVEEot6oZmaVEZcRme73eIFmhiVuRWs=
|
||||
github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8ZofjG1Y75iExal34USq5p+wiN1tpie8IrU=
|
||||
github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0=
|
||||
github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8=
|
||||
github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
@@ -1110,6 +1133,8 @@ github.com/jcmturner/gokrb5/v8 v8.4.4 h1:x1Sv4HaTpepFkXbt2IkL29DXRf8sOfZXo8eRKh6
|
||||
github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs=
|
||||
github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY=
|
||||
github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc=
|
||||
github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o=
|
||||
github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||
@@ -1121,6 +1146,7 @@ github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALr
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
|
||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4=
|
||||
github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE=
|
||||
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||
github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU=
|
||||
@@ -1144,6 +1170,8 @@ github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0
|
||||
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
|
||||
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
|
||||
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||
github.com/looker-open-source/sdk-codegen/go v0.25.21 h1:nlZ1nz22SKluBNkzplrMHBPEVgJO3zVLF6aAws1rrRA=
|
||||
github.com/looker-open-source/sdk-codegen/go v0.25.21/go.mod h1:Br1ntSiruDJ/4nYNjpYyWyCbqJ7+GQceWbIgn0hYims=
|
||||
github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
|
||||
@@ -1156,9 +1184,13 @@ github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-sqlite3 v1.14.14/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
|
||||
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/microsoft/go-mssqldb v1.9.3 h1:hy4p+LDC8LIGvI3JATnLVmBOLMJbmn5X400mr5j0lPs=
|
||||
github.com/microsoft/go-mssqldb v1.9.3/go.mod h1:GBbW9ASTiDC+mpgWDGKdm3FnFLTUsLYN3iFL90lQ+PA=
|
||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs=
|
||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY=
|
||||
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8DFdX7uMikMLXX4oubIzJF4kv/wI=
|
||||
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE=
|
||||
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
|
||||
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
|
||||
@@ -1174,6 +1206,8 @@ github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjY
|
||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
||||
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
|
||||
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
|
||||
github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs=
|
||||
github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns=
|
||||
github.com/nakagami/chacha20 v0.1.0 h1:2fbf5KeVUw7oRpAe6/A7DqvBJLYYu0ka5WstFbnkEVo=
|
||||
github.com/nakagami/chacha20 v0.1.0/go.mod h1:xpoujepNFA7MvYLvX5xKHzlOHimDrLI9Ll8zfOJ0l2E=
|
||||
github.com/nakagami/firebirdsql v0.9.15 h1:Mf05jaFI8+kjy6sBstsAu76zOkJ44AGd6cpApWNrp/0=
|
||||
@@ -1182,6 +1216,7 @@ github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdh
|
||||
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4 h1:7toxehVcYkZbyxV4W3Ib9VcnyRBQPucF+VwNNmtSXi4=
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4/go.mod h1:Vff8OwT7QpLm7L2yYr85XNWe9Rbqlbeb9asNXJTHO4k=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/oklog/ulid/v2 v2.0.2 h1:r4fFzBm+bv0wNKNh5eXTwU7i85y5x+uwkxCUTNVQqLc=
|
||||
github.com/oklog/ulid/v2 v2.0.2/go.mod h1:mtBL0Qe/0HAx6/a4Z30qxVIAL1eQDweXq5lxOEiwQ68=
|
||||
github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8=
|
||||
@@ -1247,6 +1282,8 @@ github.com/sijms/go-ora/v2 v2.9.0/go.mod h1:QgFInVi3ZWyqAiJwzBQA+nbKYKH77tdp1PYo
|
||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
github.com/snowflakedb/gosnowflake v1.18.1 h1:Nb4AWSnSBWe1UKpKTwCZxjYhYo1JH7GgKhO3wW1kR10=
|
||||
github.com/snowflakedb/gosnowflake v1.18.1/go.mod h1:7D4+cLepOWrerVsH+tevW3zdMJ5/WrEN7ZceAC6xBv0=
|
||||
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||
github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4=
|
||||
github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I=
|
||||
@@ -1650,10 +1687,12 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20220610221304-9f5ed59c137d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220624220833-87e55d714810/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220829200755-d48e67d00261/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
@@ -2075,6 +2114,7 @@ google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aO
|
||||
google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||
|
||||
@@ -21,13 +21,13 @@ import (
|
||||
|
||||
// AuthServiceConfig is the interface for configuring authentication services.
|
||||
type AuthServiceConfig interface {
|
||||
AuthServiceConfigKind() string
|
||||
AuthServiceConfigType() string
|
||||
Initialize() (AuthService, error)
|
||||
}
|
||||
|
||||
// AuthService is the interface for authentication services.
|
||||
type AuthService interface {
|
||||
AuthServiceKind() string
|
||||
AuthServiceType() string
|
||||
GetName() string
|
||||
GetClaimsFromHeader(context.Context, http.Header) (map[string]any, error)
|
||||
ToConfig() AuthServiceConfig
|
||||
|
||||
@@ -23,7 +23,7 @@ import (
|
||||
"google.golang.org/api/idtoken"
|
||||
)
|
||||
|
||||
const AuthServiceKind string = "google"
|
||||
const AuthServiceType string = "google"
|
||||
|
||||
// validate interface
|
||||
var _ auth.AuthServiceConfig = Config{}
|
||||
@@ -31,13 +31,13 @@ var _ auth.AuthServiceConfig = Config{}
|
||||
// Auth service configuration
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Type string `yaml:"kind" validate:"required"`
|
||||
ClientID string `yaml:"clientId" validate:"required"`
|
||||
}
|
||||
|
||||
// Returns the auth service kind
|
||||
func (cfg Config) AuthServiceConfigKind() string {
|
||||
return AuthServiceKind
|
||||
// Returns the auth service type
|
||||
func (cfg Config) AuthServiceConfigType() string {
|
||||
return AuthServiceType
|
||||
}
|
||||
|
||||
// Initialize a Google auth service
|
||||
@@ -55,9 +55,9 @@ type AuthService struct {
|
||||
Config
|
||||
}
|
||||
|
||||
// Returns the auth service kind
|
||||
func (a AuthService) AuthServiceKind() string {
|
||||
return AuthServiceKind
|
||||
// Returns the auth service type
|
||||
func (a AuthService) AuthServiceType() string {
|
||||
return AuthServiceType
|
||||
}
|
||||
|
||||
func (a AuthService) ToConfig() auth.AuthServiceConfig {
|
||||
|
||||
@@ -22,12 +22,12 @@ import (
|
||||
|
||||
// EmbeddingModelConfig is the interface for configuring embedding models.
|
||||
type EmbeddingModelConfig interface {
|
||||
EmbeddingModelConfigKind() string
|
||||
EmbeddingModelConfigType() string
|
||||
Initialize(context.Context) (EmbeddingModel, error)
|
||||
}
|
||||
|
||||
type EmbeddingModel interface {
|
||||
EmbeddingModelKind() string
|
||||
EmbeddingModelType() string
|
||||
ToConfig() EmbeddingModelConfig
|
||||
EmbedParameters(context.Context, []string) ([][]float32, error)
|
||||
}
|
||||
|
||||
@@ -23,22 +23,22 @@ import (
|
||||
"google.golang.org/genai"
|
||||
)
|
||||
|
||||
const EmbeddingModelKind string = "gemini"
|
||||
const EmbeddingModelType string = "gemini"
|
||||
|
||||
// validate interface
|
||||
var _ embeddingmodels.EmbeddingModelConfig = Config{}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Type string `yaml:"kind" validate:"required"`
|
||||
Model string `yaml:"model" validate:"required"`
|
||||
ApiKey string `yaml:"apiKey"`
|
||||
Dimension int32 `yaml:"dimension"`
|
||||
}
|
||||
|
||||
// Returns the embedding model kind
|
||||
func (cfg Config) EmbeddingModelConfigKind() string {
|
||||
return EmbeddingModelKind
|
||||
// Returns the embedding model type
|
||||
func (cfg Config) EmbeddingModelConfigType() string {
|
||||
return EmbeddingModelType
|
||||
}
|
||||
|
||||
// Initialize a Gemini embedding model
|
||||
@@ -69,9 +69,9 @@ type EmbeddingModel struct {
|
||||
Config
|
||||
}
|
||||
|
||||
// Returns the embedding model kind
|
||||
func (m EmbeddingModel) EmbeddingModelKind() string {
|
||||
return EmbeddingModelKind
|
||||
// Returns the embedding model type
|
||||
func (m EmbeddingModel) EmbeddingModelType() string {
|
||||
return EmbeddingModelType
|
||||
}
|
||||
|
||||
func (m EmbeddingModel) ToConfig() embeddingmodels.EmbeddingModelConfig {
|
||||
|
||||
@@ -42,7 +42,7 @@ func TestParseFromYamlGemini(t *testing.T) {
|
||||
want: map[string]embeddingmodels.EmbeddingModelConfig{
|
||||
"my-gemini-model": gemini.Config{
|
||||
Name: "my-gemini-model",
|
||||
Kind: gemini.EmbeddingModelKind,
|
||||
Type: gemini.EmbeddingModelType,
|
||||
Model: "text-embedding-004",
|
||||
},
|
||||
},
|
||||
@@ -60,7 +60,7 @@ func TestParseFromYamlGemini(t *testing.T) {
|
||||
want: map[string]embeddingmodels.EmbeddingModelConfig{
|
||||
"complex-gemini": gemini.Config{
|
||||
Name: "complex-gemini",
|
||||
Kind: gemini.EmbeddingModelKind,
|
||||
Type: gemini.EmbeddingModelType,
|
||||
Model: "text-embedding-004",
|
||||
ApiKey: "test-api-key",
|
||||
Dimension: 768,
|
||||
|
||||
@@ -49,6 +49,7 @@ var expectedToolSources = []string{
|
||||
"postgres",
|
||||
"serverless-spark",
|
||||
"singlestore",
|
||||
"snowflake",
|
||||
"spanner-postgres",
|
||||
"spanner",
|
||||
"sqlite",
|
||||
@@ -127,9 +128,9 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
sqlite_config, _ := Get("sqlite")
|
||||
neo4jconfig, _ := Get("neo4j")
|
||||
healthcare_config, _ := Get("cloud-healthcare")
|
||||
|
||||
snowflake_config, _ := Get("snowflake")
|
||||
if len(alloydb_admin_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch alloydb prebuilt tools yaml")
|
||||
t.Fatalf("unexpected error: could not fetch alloydb admin prebuilt tools yaml")
|
||||
}
|
||||
if len(alloydb_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch alloydb prebuilt tools yaml")
|
||||
@@ -197,6 +198,9 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
if len(singlestore_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch singlestore prebuilt tools yaml")
|
||||
}
|
||||
if len(snowflake_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch snowflake prebuilt tools yaml")
|
||||
}
|
||||
if len(spanner_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch spanner prebuilt tools yaml")
|
||||
}
|
||||
@@ -218,6 +222,9 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
if len(healthcare_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch healthcare prebuilt tools yaml")
|
||||
}
|
||||
if len(snowflake_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch snowflake prebuilt tools yaml")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFailGetPrebuiltTool(t *testing.T) {
|
||||
|
||||
@@ -19,6 +19,7 @@ sources:
|
||||
location: ${BIGQUERY_LOCATION:}
|
||||
useClientOAuth: ${BIGQUERY_USE_CLIENT_OAUTH:false}
|
||||
scopes: ${BIGQUERY_SCOPES:}
|
||||
maxQueryResultRows: ${BIGQUERY_MAX_QUERY_RESULT_ROWS:50}
|
||||
|
||||
tools:
|
||||
analyze_contribution:
|
||||
|
||||
@@ -43,6 +43,9 @@ tools:
|
||||
clone_instance:
|
||||
kind: cloud-sql-clone-instance
|
||||
source: cloud-sql-admin-source
|
||||
create_backup:
|
||||
kind: cloud-sql-create-backup
|
||||
source: cloud-sql-admin-source
|
||||
|
||||
toolsets:
|
||||
cloud_sql_mssql_admin_tools:
|
||||
@@ -54,3 +57,4 @@ toolsets:
|
||||
- create_user
|
||||
- wait_for_operation
|
||||
- clone_instance
|
||||
- create_backup
|
||||
|
||||
@@ -43,6 +43,9 @@ tools:
|
||||
clone_instance:
|
||||
kind: cloud-sql-clone-instance
|
||||
source: cloud-sql-admin-source
|
||||
create_backup:
|
||||
kind: cloud-sql-create-backup
|
||||
source: cloud-sql-admin-source
|
||||
|
||||
toolsets:
|
||||
cloud_sql_mysql_admin_tools:
|
||||
@@ -54,3 +57,4 @@ toolsets:
|
||||
- create_user
|
||||
- wait_for_operation
|
||||
- clone_instance
|
||||
- create_backup
|
||||
|
||||
@@ -46,6 +46,9 @@ tools:
|
||||
postgres_upgrade_precheck:
|
||||
kind: postgres-upgrade-precheck
|
||||
source: cloud-sql-admin-source
|
||||
create_backup:
|
||||
kind: cloud-sql-create-backup
|
||||
source: cloud-sql-admin-source
|
||||
|
||||
toolsets:
|
||||
cloud_sql_postgres_admin_tools:
|
||||
@@ -58,3 +61,4 @@ toolsets:
|
||||
- wait_for_operation
|
||||
- postgres_upgrade_precheck
|
||||
- clone_instance
|
||||
- create_backup
|
||||
|
||||
142
internal/prebuiltconfigs/tools/snowflake.yaml
Normal file
142
internal/prebuiltconfigs/tools/snowflake.yaml
Normal file
@@ -0,0 +1,142 @@
|
||||
# Copyright 2026 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
sources:
|
||||
snowflake-source:
|
||||
kind: snowflake
|
||||
account: ${SNOWFLAKE_ACCOUNT}
|
||||
user: ${SNOWFLAKE_USER}
|
||||
password: ${SNOWFLAKE_PASSWORD}
|
||||
database: ${SNOWFLAKE_DATABASE}
|
||||
schema: ${SNOWFLAKE_SCHEMA}
|
||||
warehouse: ${SNOWFLAKE_WAREHOUSE}
|
||||
role: ${SNOWFLAKE_ROLE}
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: snowflake-execute-sql
|
||||
source: snowflake-source
|
||||
description: Use this tool to execute SQL.
|
||||
|
||||
list_tables:
|
||||
kind: snowflake-sql
|
||||
source: snowflake-source
|
||||
description: "Lists detailed schema information (object type, columns, constraints, indexes, owner, comment) as JSON for user-created tables. Filters by a comma-separated list of names. If names are omitted, lists all tables in the specified database and schema."
|
||||
statement: |
|
||||
WITH
|
||||
input_param AS (
|
||||
SELECT ? AS param -- Single bind variable here
|
||||
)
|
||||
,
|
||||
all_tables_mode AS (
|
||||
SELECT COALESCE(TRIM(param), '') = '' AS is_all_tables
|
||||
FROM input_param
|
||||
) --SELECT * FROM all_tables_mode;
|
||||
,
|
||||
filtered_table_names AS (
|
||||
SELECT DISTINCT TRIM(LOWER(value)) AS table_name
|
||||
FROM input_param, all_tables_mode, TABLE(SPLIT_TO_TABLE(param, ','))
|
||||
WHERE NOT is_all_tables
|
||||
) -- SELECT * FROM filtered_table_names;
|
||||
,
|
||||
table_info AS (
|
||||
SELECT
|
||||
t.TABLE_CATALOG,
|
||||
t.TABLE_SCHEMA,
|
||||
t.TABLE_NAME,
|
||||
t.TABLE_TYPE,
|
||||
t.TABLE_OWNER,
|
||||
t.COMMENT
|
||||
FROM
|
||||
all_tables_mode
|
||||
CROSS JOIN ${SNOWFLAKE_DATABASE}.INFORMATION_SCHEMA.TABLES T
|
||||
WHERE
|
||||
t.TABLE_TYPE = 'BASE TABLE'
|
||||
AND t.TABLE_SCHEMA NOT IN ('INFORMATION_SCHEMA')
|
||||
AND t.TABLE_SCHEMA = '${SNOWFLAKE_SCHEMA}'
|
||||
AND is_all_tables OR LOWER(T.TABLE_NAME) IN (SELECT table_name FROM filtered_table_names)
|
||||
) -- SELECT * FROM table_info;
|
||||
,
|
||||
columns_info AS (
|
||||
SELECT
|
||||
c.TABLE_CATALOG AS database_name,
|
||||
c.TABLE_SCHEMA AS schema_name,
|
||||
c.TABLE_NAME AS table_name,
|
||||
c.COLUMN_NAME AS column_name,
|
||||
c.DATA_TYPE AS data_type,
|
||||
c.ORDINAL_POSITION AS column_ordinal_position,
|
||||
c.IS_NULLABLE AS is_nullable,
|
||||
c.COLUMN_DEFAULT AS column_default,
|
||||
c.COMMENT AS column_comment
|
||||
FROM
|
||||
${SNOWFLAKE_DATABASE}.INFORMATION_SCHEMA.COLUMNS c
|
||||
INNER JOIN table_info USING (TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME)
|
||||
)
|
||||
,
|
||||
constraints_info AS (
|
||||
SELECT
|
||||
tc.TABLE_CATALOG AS database_name,
|
||||
tc.TABLE_SCHEMA AS schema_name,
|
||||
tc.TABLE_NAME AS table_name,
|
||||
tc.CONSTRAINT_NAME AS constraint_name,
|
||||
tc.CONSTRAINT_TYPE AS constraint_type
|
||||
FROM
|
||||
${SNOWFLAKE_DATABASE}.INFORMATION_SCHEMA.TABLE_CONSTRAINTS tc
|
||||
INNER JOIN table_info USING (TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME)
|
||||
GROUP BY
|
||||
tc.TABLE_CATALOG, tc.TABLE_SCHEMA, tc.TABLE_NAME, tc.CONSTRAINT_NAME, tc.CONSTRAINT_TYPE
|
||||
)
|
||||
SELECT
|
||||
ti.TABLE_SCHEMA AS schema_name,
|
||||
ti.TABLE_NAME AS object_name,
|
||||
OBJECT_CONSTRUCT(
|
||||
'schema_name', ti.TABLE_SCHEMA,
|
||||
'object_name', ti.TABLE_NAME,
|
||||
'object_type', ti.TABLE_TYPE,
|
||||
'owner', ti.TABLE_OWNER,
|
||||
'comment', ti.COMMENT,
|
||||
'columns', COALESCE(
|
||||
(SELECT ARRAY_AGG(
|
||||
OBJECT_CONSTRUCT(
|
||||
'column_name', ci.column_name,
|
||||
'data_type', ci.data_type,
|
||||
'ordinal_position', ci.column_ordinal_position,
|
||||
'is_nullable', ci.is_nullable,
|
||||
'column_default', ci.column_default,
|
||||
'column_comment', ci.column_comment
|
||||
)
|
||||
) FROM columns_info ci WHERE ci.table_name = ti.TABLE_NAME AND ci.schema_name = ti.TABLE_SCHEMA),
|
||||
ARRAY_CONSTRUCT()
|
||||
),
|
||||
'constraints', COALESCE(
|
||||
(SELECT ARRAY_AGG(
|
||||
OBJECT_CONSTRUCT(
|
||||
'constraint_name', cons.constraint_name,
|
||||
'constraint_type', cons.constraint_type
|
||||
)
|
||||
) FROM constraints_info cons WHERE cons.table_name = ti.TABLE_NAME AND cons.schema_name = ti.TABLE_SCHEMA),
|
||||
ARRAY_CONSTRUCT()
|
||||
)
|
||||
) AS object_details
|
||||
FROM table_info ti
|
||||
ORDER BY ti.TABLE_SCHEMA, ti.TABLE_NAME;
|
||||
parameters:
|
||||
- name: table_names
|
||||
type: string
|
||||
description: "Optional: A comma-separated list of table names. If empty, details for all tables in the specified database and schema will be listed."
|
||||
|
||||
toolsets:
|
||||
snowflake_tools:
|
||||
- execute_sql
|
||||
- list_tables
|
||||
@@ -27,10 +27,10 @@ type Message = prompts.Message
|
||||
|
||||
const kind = "custom"
|
||||
|
||||
// init registers this prompt kind with the prompt framework.
|
||||
// init registers this prompt type with the prompt framework.
|
||||
func init() {
|
||||
if !prompts.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("prompt kind %q already registered", kind))
|
||||
panic(fmt.Sprintf("prompt type %q already registered", kind))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,7 +56,7 @@ type Config struct {
|
||||
var _ prompts.PromptConfig = Config{}
|
||||
var _ prompts.Prompt = Prompt{}
|
||||
|
||||
func (c Config) PromptConfigKind() string {
|
||||
func (c Config) PromptConfigType() string {
|
||||
return kind
|
||||
}
|
||||
|
||||
|
||||
@@ -50,8 +50,8 @@ func TestConfig(t *testing.T) {
|
||||
if p == nil {
|
||||
t.Fatal("Initialize() returned a nil prompt")
|
||||
}
|
||||
if cfg.PromptConfigKind() != "custom" {
|
||||
t.Errorf("PromptConfigKind() = %q, want %q", cfg.PromptConfigKind(), "custom")
|
||||
if cfg.PromptConfigType() != "custom" {
|
||||
t.Errorf("PromptConfigType() = %q, want %q", cfg.PromptConfigType(), "custom")
|
||||
}
|
||||
|
||||
t.Run("Manifest", func(t *testing.T) {
|
||||
|
||||
@@ -52,7 +52,7 @@ func DecodeConfig(ctx context.Context, kind, name string, decoder *yaml.Decoder)
|
||||
}
|
||||
|
||||
if !found {
|
||||
return nil, fmt.Errorf("unknown prompt kind: %q", kind)
|
||||
return nil, fmt.Errorf("unknown prompt type: %q", kind)
|
||||
}
|
||||
|
||||
promptConfig, err := factory(ctx, name, decoder)
|
||||
@@ -63,7 +63,7 @@ func DecodeConfig(ctx context.Context, kind, name string, decoder *yaml.Decoder)
|
||||
}
|
||||
|
||||
type PromptConfig interface {
|
||||
PromptConfigKind() string
|
||||
PromptConfigType() string
|
||||
Initialize() (Prompt, error)
|
||||
}
|
||||
|
||||
|
||||
@@ -29,16 +29,16 @@ import (
|
||||
|
||||
type mockPromptConfig struct {
|
||||
name string
|
||||
kind string
|
||||
Type string
|
||||
}
|
||||
|
||||
func (m *mockPromptConfig) PromptConfigKind() string { return m.kind }
|
||||
func (m *mockPromptConfig) PromptConfigType() string { return m.Type }
|
||||
func (m *mockPromptConfig) Initialize() (prompts.Prompt, error) { return nil, nil }
|
||||
|
||||
var errMockFactory = errors.New("mock factory error")
|
||||
|
||||
func mockFactory(ctx context.Context, name string, decoder *yaml.Decoder) (prompts.PromptConfig, error) {
|
||||
return &mockPromptConfig{name: name, kind: "mockKind"}, nil
|
||||
return &mockPromptConfig{name: name, Type: "mockType"}, nil
|
||||
}
|
||||
|
||||
func mockErrorFactory(ctx context.Context, name string, decoder *yaml.Decoder) (prompts.PromptConfig, error) {
|
||||
@@ -50,7 +50,7 @@ func TestRegistry(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
t.Run("RegisterAndDecodeSuccess", func(t *testing.T) {
|
||||
kind := "testKindSuccess"
|
||||
kind := "testTypeSuccess"
|
||||
if !prompts.Register(kind, mockFactory) {
|
||||
t.Fatal("expected registration to succeed")
|
||||
}
|
||||
@@ -69,19 +69,19 @@ func TestRegistry(t *testing.T) {
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("DecodeUnknownKind", func(t *testing.T) {
|
||||
t.Run("DecodeUnknownType", func(t *testing.T) {
|
||||
decoder := yaml.NewDecoder(strings.NewReader(""))
|
||||
_, err := prompts.DecodeConfig(ctx, "unregisteredKind", "testPrompt", decoder)
|
||||
_, err := prompts.DecodeConfig(ctx, "unregisteredType", "testPrompt", decoder)
|
||||
if err == nil {
|
||||
t.Fatal("expected an error for unknown kind, but got nil")
|
||||
}
|
||||
if !strings.Contains(err.Error(), "unknown prompt kind") {
|
||||
t.Errorf("expected error to contain 'unknown prompt kind', but got: %v", err)
|
||||
if !strings.Contains(err.Error(), "unknown prompt type") {
|
||||
t.Errorf("expected error to contain 'unknown prompt type', but got: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("FactoryReturnsError", func(t *testing.T) {
|
||||
kind := "testKindError"
|
||||
kind := "testTypeError"
|
||||
if !prompts.Register(kind, mockErrorFactory) {
|
||||
t.Fatal("expected registration to succeed")
|
||||
}
|
||||
@@ -105,8 +105,8 @@ func TestRegistry(t *testing.T) {
|
||||
if config == nil {
|
||||
t.Fatal("expected a non-nil config for default kind")
|
||||
}
|
||||
if config.PromptConfigKind() != "custom" {
|
||||
t.Errorf("expected default kind to be 'custom', but got %q", config.PromptConfigKind())
|
||||
if config.PromptConfigType() != "custom" {
|
||||
t.Errorf("expected default kind to be 'custom', but got %q", config.PromptConfigType())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -68,6 +68,8 @@ type ServerConfig struct {
|
||||
UI bool
|
||||
// Specifies a list of origins permitted to access this server.
|
||||
AllowedOrigins []string
|
||||
// Specifies a list of hosts permitted to access this server
|
||||
AllowedHosts []string
|
||||
}
|
||||
|
||||
type logFormat string
|
||||
@@ -196,7 +198,7 @@ func (c *AuthServiceConfigs) UnmarshalYAML(ctx context.Context, unmarshal func(i
|
||||
return fmt.Errorf("error creating decoder: %w", err)
|
||||
}
|
||||
switch kind {
|
||||
case google.AuthServiceKind:
|
||||
case google.AuthServiceType:
|
||||
actual := google.Config{Name: name}
|
||||
if err := dec.DecodeContext(ctx, &actual); err != nil {
|
||||
return fmt.Errorf("unable to parse as %q: %w", kind, err)
|
||||
@@ -240,7 +242,7 @@ func (c *EmbeddingModelConfigs) UnmarshalYAML(ctx context.Context, unmarshal fun
|
||||
return fmt.Errorf("error creating decoder: %w", err)
|
||||
}
|
||||
switch kind {
|
||||
case gemini.EmbeddingModelKind:
|
||||
case gemini.EmbeddingModelType:
|
||||
actual := gemini.Config{Name: name}
|
||||
if err := dec.DecodeContext(ctx, &actual); err != nil {
|
||||
return fmt.Errorf("unable to parse as %q: %w", kind, err)
|
||||
|
||||
@@ -32,7 +32,7 @@ func TestUpdateServer(t *testing.T) {
|
||||
"example-source": &alloydbpg.Source{
|
||||
Config: alloydbpg.Config{
|
||||
Name: "example-alloydb-source",
|
||||
Kind: "alloydb-postgres",
|
||||
Type: "alloydb-postgres",
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -92,7 +92,7 @@ func TestUpdateServer(t *testing.T) {
|
||||
"example-source2": &alloydbpg.Source{
|
||||
Config: alloydbpg.Config{
|
||||
Name: "example-alloydb-source2",
|
||||
Kind: "alloydb-postgres",
|
||||
Type: "alloydb-postgres",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -82,7 +82,7 @@ func InitializeConfigs(ctx context.Context, cfg ServerConfig) (
|
||||
childCtx, span := instrumentation.Tracer.Start(
|
||||
ctx,
|
||||
"toolbox/server/source/init",
|
||||
trace.WithAttributes(attribute.String("source_kind", sc.SourceConfigKind())),
|
||||
trace.WithAttributes(attribute.String("source_type", sc.SourceConfigType())),
|
||||
trace.WithAttributes(attribute.String("source_name", name)),
|
||||
)
|
||||
defer span.End()
|
||||
@@ -110,7 +110,7 @@ func InitializeConfigs(ctx context.Context, cfg ServerConfig) (
|
||||
_, span := instrumentation.Tracer.Start(
|
||||
ctx,
|
||||
"toolbox/server/auth/init",
|
||||
trace.WithAttributes(attribute.String("auth_kind", sc.AuthServiceConfigKind())),
|
||||
trace.WithAttributes(attribute.String("auth_type", sc.AuthServiceConfigType())),
|
||||
trace.WithAttributes(attribute.String("auth_name", name)),
|
||||
)
|
||||
defer span.End()
|
||||
@@ -138,7 +138,7 @@ func InitializeConfigs(ctx context.Context, cfg ServerConfig) (
|
||||
_, span := instrumentation.Tracer.Start(
|
||||
ctx,
|
||||
"toolbox/server/embeddingmodel/init",
|
||||
trace.WithAttributes(attribute.String("model_kind", ec.EmbeddingModelConfigKind())),
|
||||
trace.WithAttributes(attribute.String("model_type", ec.EmbeddingModelConfigType())),
|
||||
trace.WithAttributes(attribute.String("model_name", name)),
|
||||
)
|
||||
defer span.End()
|
||||
@@ -166,7 +166,7 @@ func InitializeConfigs(ctx context.Context, cfg ServerConfig) (
|
||||
_, span := instrumentation.Tracer.Start(
|
||||
ctx,
|
||||
"toolbox/server/tool/init",
|
||||
trace.WithAttributes(attribute.String("tool_kind", tc.ToolConfigKind())),
|
||||
trace.WithAttributes(attribute.String("tool_type", tc.ToolConfigType())),
|
||||
trace.WithAttributes(attribute.String("tool_name", name)),
|
||||
)
|
||||
defer span.End()
|
||||
@@ -235,7 +235,7 @@ func InitializeConfigs(ctx context.Context, cfg ServerConfig) (
|
||||
_, span := instrumentation.Tracer.Start(
|
||||
ctx,
|
||||
"toolbox/server/prompt/init",
|
||||
trace.WithAttributes(attribute.String("prompt_kind", pc.PromptConfigKind())),
|
||||
trace.WithAttributes(attribute.String("prompt_type", pc.PromptConfigType())),
|
||||
trace.WithAttributes(attribute.String("prompt_name", name)),
|
||||
)
|
||||
defer span.End()
|
||||
@@ -300,6 +300,21 @@ func InitializeConfigs(ctx context.Context, cfg ServerConfig) (
|
||||
return sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, nil
|
||||
}
|
||||
|
||||
func hostCheck(allowedHosts map[string]struct{}) func(http.Handler) http.Handler {
|
||||
return func(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
_, hasWildcard := allowedHosts["*"]
|
||||
_, hostIsAllowed := allowedHosts[r.Host]
|
||||
if !hasWildcard && !hostIsAllowed {
|
||||
// Return 400 Bad Request or 403 Forbidden to block the attack
|
||||
http.Error(w, "Invalid Host header", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// NewServer returns a Server object based on provided Config.
|
||||
func NewServer(ctx context.Context, cfg ServerConfig) (*Server, error) {
|
||||
instrumentation, err := util.InstrumentationFromContext(ctx)
|
||||
@@ -374,7 +389,7 @@ func NewServer(ctx context.Context, cfg ServerConfig) (*Server, error) {
|
||||
|
||||
// cors
|
||||
if slices.Contains(cfg.AllowedOrigins, "*") {
|
||||
s.logger.WarnContext(ctx, "wildcard (`*`) allows all origin to access the resource and is not secure. Use it with cautious for public, non-sensitive data, or during local development. Recommended to use `--allowed-origins` flag to prevent DNS rebinding attacks")
|
||||
s.logger.WarnContext(ctx, "wildcard (`*`) allows all origin to access the resource and is not secure. Use it with cautious for public, non-sensitive data, or during local development. Recommended to use `--allowed-origins` flag")
|
||||
}
|
||||
corsOpts := cors.Options{
|
||||
AllowedOrigins: cfg.AllowedOrigins,
|
||||
@@ -385,6 +400,15 @@ func NewServer(ctx context.Context, cfg ServerConfig) (*Server, error) {
|
||||
MaxAge: 300, // cache preflight results for 5 minutes
|
||||
}
|
||||
r.Use(cors.Handler(corsOpts))
|
||||
// validate hosts for DNS rebinding attacks
|
||||
if slices.Contains(cfg.AllowedHosts, "*") {
|
||||
s.logger.WarnContext(ctx, "wildcard (`*`) allows all hosts to access the resource and is not secure. Use it with cautious for public, non-sensitive data, or during local development. Recommended to use `--allowed-hosts` flag to prevent DNS rebinding attacks")
|
||||
}
|
||||
allowedHostsMap := make(map[string]struct{}, len(cfg.AllowedHosts))
|
||||
for _, h := range cfg.AllowedHosts {
|
||||
allowedHostsMap[h] = struct{}{}
|
||||
}
|
||||
r.Use(hostCheck(allowedHostsMap))
|
||||
|
||||
// control plane
|
||||
apiR, err := apiRouter(s)
|
||||
|
||||
@@ -43,9 +43,10 @@ func TestServe(t *testing.T) {
|
||||
|
||||
addr, port := "127.0.0.1", 5000
|
||||
cfg := server.ServerConfig{
|
||||
Version: "0.0.0",
|
||||
Address: addr,
|
||||
Port: port,
|
||||
Version: "0.0.0",
|
||||
Address: addr,
|
||||
Port: port,
|
||||
AllowedHosts: []string{"*"},
|
||||
}
|
||||
|
||||
otelShutdown, err := telemetry.SetupOTel(ctx, "0.0.0", "", false, "toolbox")
|
||||
@@ -140,7 +141,7 @@ func TestUpdateServer(t *testing.T) {
|
||||
"example-source": &alloydbpg.Source{
|
||||
Config: alloydbpg.Config{
|
||||
Name: "example-alloydb-source",
|
||||
Kind: "alloydb-postgres",
|
||||
Type: "alloydb-postgres",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -32,14 +32,14 @@ import (
|
||||
"google.golang.org/api/option"
|
||||
)
|
||||
|
||||
const SourceKind string = "alloydb-admin"
|
||||
const SourceType string = "alloydb-admin"
|
||||
|
||||
// validate interface
|
||||
var _ sources.SourceConfig = Config{}
|
||||
|
||||
func init() {
|
||||
if !sources.Register(SourceKind, newConfig) {
|
||||
panic(fmt.Sprintf("source kind %q already registered", SourceKind))
|
||||
if !sources.Register(SourceType, newConfig) {
|
||||
panic(fmt.Sprintf("source type %q already registered", SourceType))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,13 +53,13 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Type string `yaml:"kind" validate:"required"`
|
||||
DefaultProject string `yaml:"defaultProject"`
|
||||
UseClientOAuth bool `yaml:"useClientOAuth"`
|
||||
}
|
||||
|
||||
func (r Config) SourceConfigKind() string {
|
||||
return SourceKind
|
||||
func (r Config) SourceConfigType() string {
|
||||
return SourceType
|
||||
}
|
||||
|
||||
func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
|
||||
@@ -106,8 +106,8 @@ type Source struct {
|
||||
Service *alloydbrestapi.Service
|
||||
}
|
||||
|
||||
func (s *Source) SourceKind() string {
|
||||
return SourceKind
|
||||
func (s *Source) SourceType() string {
|
||||
return SourceType
|
||||
}
|
||||
|
||||
func (s *Source) ToConfig() sources.SourceConfig {
|
||||
|
||||
@@ -41,7 +41,7 @@ func TestParseFromYamlAlloyDBAdmin(t *testing.T) {
|
||||
want: map[string]sources.SourceConfig{
|
||||
"my-alloydb-admin-instance": alloydbadmin.Config{
|
||||
Name: "my-alloydb-admin-instance",
|
||||
Kind: alloydbadmin.SourceKind,
|
||||
Type: alloydbadmin.SourceType,
|
||||
UseClientOAuth: false,
|
||||
},
|
||||
},
|
||||
@@ -57,7 +57,7 @@ func TestParseFromYamlAlloyDBAdmin(t *testing.T) {
|
||||
want: map[string]sources.SourceConfig{
|
||||
"my-alloydb-admin-instance": alloydbadmin.Config{
|
||||
Name: "my-alloydb-admin-instance",
|
||||
Kind: alloydbadmin.SourceKind,
|
||||
Type: alloydbadmin.SourceType,
|
||||
UseClientOAuth: true,
|
||||
},
|
||||
},
|
||||
|
||||
@@ -29,14 +29,14 @@ import (
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
const SourceKind string = "alloydb-postgres"
|
||||
const SourceType string = "alloydb-postgres"
|
||||
|
||||
// validate interface
|
||||
var _ sources.SourceConfig = Config{}
|
||||
|
||||
func init() {
|
||||
if !sources.Register(SourceKind, newConfig) {
|
||||
panic(fmt.Sprintf("source kind %q already registered", SourceKind))
|
||||
if !sources.Register(SourceType, newConfig) {
|
||||
panic(fmt.Sprintf("source type %q already registered", SourceType))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -50,7 +50,7 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Type string `yaml:"kind" validate:"required"`
|
||||
Project string `yaml:"project" validate:"required"`
|
||||
Region string `yaml:"region" validate:"required"`
|
||||
Cluster string `yaml:"cluster" validate:"required"`
|
||||
@@ -61,8 +61,8 @@ type Config struct {
|
||||
Database string `yaml:"database" validate:"required"`
|
||||
}
|
||||
|
||||
func (r Config) SourceConfigKind() string {
|
||||
return SourceKind
|
||||
func (r Config) SourceConfigType() string {
|
||||
return SourceType
|
||||
}
|
||||
|
||||
func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
|
||||
@@ -90,8 +90,8 @@ type Source struct {
|
||||
Pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
func (s *Source) SourceKind() string {
|
||||
return SourceKind
|
||||
func (s *Source) SourceType() string {
|
||||
return SourceType
|
||||
}
|
||||
|
||||
func (s *Source) ToConfig() sources.SourceConfig {
|
||||
@@ -183,7 +183,7 @@ func getConnectionConfig(ctx context.Context, user, pass, dbname string) (string
|
||||
|
||||
func initAlloyDBPgConnectionPool(ctx context.Context, tracer trace.Tracer, name, project, region, cluster, instance, ipType, user, pass, dbname string) (*pgxpool.Pool, error) {
|
||||
//nolint:all // Reassigned ctx
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceType, name)
|
||||
defer span.End()
|
||||
|
||||
dsn, useIAM, err := getConnectionConfig(ctx, user, pass, dbname)
|
||||
|
||||
@@ -48,7 +48,7 @@ func TestParseFromYamlAlloyDBPg(t *testing.T) {
|
||||
want: map[string]sources.SourceConfig{
|
||||
"my-pg-instance": alloydbpg.Config{
|
||||
Name: "my-pg-instance",
|
||||
Kind: alloydbpg.SourceKind,
|
||||
Type: alloydbpg.SourceType,
|
||||
Project: "my-project",
|
||||
Region: "my-region",
|
||||
Cluster: "my-cluster",
|
||||
@@ -78,7 +78,7 @@ func TestParseFromYamlAlloyDBPg(t *testing.T) {
|
||||
want: map[string]sources.SourceConfig{
|
||||
"my-pg-instance": alloydbpg.Config{
|
||||
Name: "my-pg-instance",
|
||||
Kind: alloydbpg.SourceKind,
|
||||
Type: alloydbpg.SourceType,
|
||||
Project: "my-project",
|
||||
Region: "my-region",
|
||||
Cluster: "my-cluster",
|
||||
@@ -108,7 +108,7 @@ func TestParseFromYamlAlloyDBPg(t *testing.T) {
|
||||
want: map[string]sources.SourceConfig{
|
||||
"my-pg-instance": alloydbpg.Config{
|
||||
Name: "my-pg-instance",
|
||||
Kind: alloydbpg.SourceKind,
|
||||
Type: alloydbpg.SourceType,
|
||||
Project: "my-project",
|
||||
Region: "my-region",
|
||||
Cluster: "my-cluster",
|
||||
|
||||
@@ -41,7 +41,7 @@ import (
|
||||
"google.golang.org/api/option"
|
||||
)
|
||||
|
||||
const SourceKind string = "bigquery"
|
||||
const SourceType string = "bigquery"
|
||||
|
||||
// CloudPlatformScope is a broad scope for Google Cloud Platform services.
|
||||
const CloudPlatformScope = "https://www.googleapis.com/auth/cloud-platform"
|
||||
@@ -65,8 +65,8 @@ type BigQuerySessionProvider func(ctx context.Context) (*Session, error)
|
||||
type DataplexClientCreator func(tokenString string) (*dataplexapi.CatalogClient, error)
|
||||
|
||||
func init() {
|
||||
if !sources.Register(SourceKind, newConfig) {
|
||||
panic(fmt.Sprintf("source kind %q already registered", SourceKind))
|
||||
if !sources.Register(SourceType, newConfig) {
|
||||
panic(fmt.Sprintf("source type %q already registered", SourceType))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -81,7 +81,7 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources
|
||||
type Config struct {
|
||||
// BigQuery configs
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Type string `yaml:"kind" validate:"required"`
|
||||
Project string `yaml:"project" validate:"required"`
|
||||
Location string `yaml:"location"`
|
||||
WriteMode string `yaml:"writeMode"`
|
||||
@@ -89,6 +89,7 @@ type Config struct {
|
||||
UseClientOAuth bool `yaml:"useClientOAuth"`
|
||||
ImpersonateServiceAccount string `yaml:"impersonateServiceAccount"`
|
||||
Scopes StringOrStringSlice `yaml:"scopes"`
|
||||
MaxQueryResultRows int `yaml:"maxQueryResultRows"`
|
||||
}
|
||||
|
||||
// StringOrStringSlice is a custom type that can unmarshal both a single string
|
||||
@@ -118,15 +119,19 @@ func (s *StringOrStringSlice) UnmarshalYAML(unmarshal func(any) error) error {
|
||||
return fmt.Errorf("cannot unmarshal %T into StringOrStringSlice", v)
|
||||
}
|
||||
|
||||
func (r Config) SourceConfigKind() string {
|
||||
func (r Config) SourceConfigType() string {
|
||||
// Returns BigQuery source kind
|
||||
return SourceKind
|
||||
return SourceType
|
||||
}
|
||||
func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
|
||||
if r.WriteMode == "" {
|
||||
r.WriteMode = WriteModeAllowed
|
||||
}
|
||||
|
||||
if r.MaxQueryResultRows == 0 {
|
||||
r.MaxQueryResultRows = 50
|
||||
}
|
||||
|
||||
if r.WriteMode == WriteModeProtected && r.UseClientOAuth {
|
||||
// The protected mode only allows write operations to the session's temporary datasets.
|
||||
// when using client OAuth, a new session is created every
|
||||
@@ -150,7 +155,7 @@ func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.So
|
||||
Client: client,
|
||||
RestService: restService,
|
||||
TokenSource: tokenSource,
|
||||
MaxQueryResultRows: 50,
|
||||
MaxQueryResultRows: r.MaxQueryResultRows,
|
||||
ClientCreator: clientCreator,
|
||||
}
|
||||
|
||||
@@ -297,9 +302,9 @@ type Session struct {
|
||||
LastUsed time.Time
|
||||
}
|
||||
|
||||
func (s *Source) SourceKind() string {
|
||||
func (s *Source) SourceType() string {
|
||||
// Returns BigQuery Google SQL source kind
|
||||
return SourceKind
|
||||
return SourceType
|
||||
}
|
||||
|
||||
func (s *Source) ToConfig() sources.SourceConfig {
|
||||
@@ -567,7 +572,7 @@ func (s *Source) RunSQL(ctx context.Context, bqClient *bigqueryapi.Client, state
|
||||
}
|
||||
|
||||
var out []any
|
||||
for {
|
||||
for s.MaxQueryResultRows <= 0 || len(out) < s.MaxQueryResultRows {
|
||||
var val []bigqueryapi.Value
|
||||
err = it.Next(&val)
|
||||
if err == iterator.Done {
|
||||
@@ -660,7 +665,7 @@ func initBigQueryConnection(
|
||||
impersonateServiceAccount string,
|
||||
scopes []string,
|
||||
) (*bigqueryapi.Client, *bigqueryrestapi.Service, oauth2.TokenSource, error) {
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceType, name)
|
||||
defer span.End()
|
||||
|
||||
userAgent, err := util.UserAgentFromContext(ctx)
|
||||
@@ -736,7 +741,7 @@ func initBigQueryConnectionWithOAuthToken(
|
||||
tokenString string,
|
||||
wantRestService bool,
|
||||
) (*bigqueryapi.Client, *bigqueryrestapi.Service, error) {
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceType, name)
|
||||
defer span.End()
|
||||
// Construct token source
|
||||
token := &oauth2.Token{
|
||||
@@ -796,7 +801,7 @@ func initDataplexConnection(
|
||||
var clientCreator DataplexClientCreator
|
||||
var err error
|
||||
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceType, name)
|
||||
defer span.End()
|
||||
|
||||
userAgent, err := util.UserAgentFromContext(ctx)
|
||||
|
||||
@@ -21,9 +21,12 @@ import (
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"go.opentelemetry.io/otel/trace/noop"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
)
|
||||
|
||||
func TestParseFromYamlBigQuery(t *testing.T) {
|
||||
@@ -43,7 +46,7 @@ func TestParseFromYamlBigQuery(t *testing.T) {
|
||||
want: server.SourceConfigs{
|
||||
"my-instance": bigquery.Config{
|
||||
Name: "my-instance",
|
||||
Kind: bigquery.SourceKind,
|
||||
Type: bigquery.SourceType,
|
||||
Project: "my-project",
|
||||
Location: "",
|
||||
WriteMode: "",
|
||||
@@ -63,7 +66,7 @@ func TestParseFromYamlBigQuery(t *testing.T) {
|
||||
want: server.SourceConfigs{
|
||||
"my-instance": bigquery.Config{
|
||||
Name: "my-instance",
|
||||
Kind: bigquery.SourceKind,
|
||||
Type: bigquery.SourceType,
|
||||
Project: "my-project",
|
||||
Location: "asia",
|
||||
WriteMode: "blocked",
|
||||
@@ -84,7 +87,7 @@ func TestParseFromYamlBigQuery(t *testing.T) {
|
||||
want: server.SourceConfigs{
|
||||
"my-instance": bigquery.Config{
|
||||
Name: "my-instance",
|
||||
Kind: bigquery.SourceKind,
|
||||
Type: bigquery.SourceType,
|
||||
Project: "my-project",
|
||||
Location: "us",
|
||||
UseClientOAuth: true,
|
||||
@@ -105,7 +108,7 @@ func TestParseFromYamlBigQuery(t *testing.T) {
|
||||
want: server.SourceConfigs{
|
||||
"my-instance": bigquery.Config{
|
||||
Name: "my-instance",
|
||||
Kind: bigquery.SourceKind,
|
||||
Type: bigquery.SourceType,
|
||||
Project: "my-project",
|
||||
Location: "us",
|
||||
AllowedDatasets: []string{"my_dataset"},
|
||||
@@ -125,7 +128,7 @@ func TestParseFromYamlBigQuery(t *testing.T) {
|
||||
want: server.SourceConfigs{
|
||||
"my-instance": bigquery.Config{
|
||||
Name: "my-instance",
|
||||
Kind: bigquery.SourceKind,
|
||||
Type: bigquery.SourceType,
|
||||
Project: "my-project",
|
||||
Location: "us",
|
||||
ImpersonateServiceAccount: "service-account@my-project.iam.gserviceaccount.com",
|
||||
@@ -147,13 +150,33 @@ func TestParseFromYamlBigQuery(t *testing.T) {
|
||||
want: server.SourceConfigs{
|
||||
"my-instance": bigquery.Config{
|
||||
Name: "my-instance",
|
||||
Kind: bigquery.SourceKind,
|
||||
Type: bigquery.SourceType,
|
||||
Project: "my-project",
|
||||
Location: "us",
|
||||
Scopes: []string{"https://www.googleapis.com/auth/bigquery", "https://www.googleapis.com/auth/cloud-platform"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with max query result rows example",
|
||||
in: `
|
||||
sources:
|
||||
my-instance:
|
||||
kind: bigquery
|
||||
project: my-project
|
||||
location: us
|
||||
maxQueryResultRows: 10
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-instance": bigquery.Config{
|
||||
Name: "my-instance",
|
||||
Type: bigquery.SourceType,
|
||||
Project: "my-project",
|
||||
Location: "us",
|
||||
MaxQueryResultRows: 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
@@ -220,6 +243,59 @@ func TestFailParseFromYaml(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestInitialize_MaxQueryResultRows(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
ctx = util.WithUserAgent(ctx, "test-agent")
|
||||
tracer := noop.NewTracerProvider().Tracer("")
|
||||
|
||||
tcs := []struct {
|
||||
desc string
|
||||
cfg bigquery.Config
|
||||
want int
|
||||
}{
|
||||
{
|
||||
desc: "default value",
|
||||
cfg: bigquery.Config{
|
||||
Name: "test-default",
|
||||
Type: bigquery.SourceType,
|
||||
Project: "test-project",
|
||||
UseClientOAuth: true,
|
||||
},
|
||||
want: 50,
|
||||
},
|
||||
{
|
||||
desc: "configured value",
|
||||
cfg: bigquery.Config{
|
||||
Name: "test-configured",
|
||||
Type: bigquery.SourceType,
|
||||
Project: "test-project",
|
||||
UseClientOAuth: true,
|
||||
MaxQueryResultRows: 100,
|
||||
},
|
||||
want: 100,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
src, err := tc.cfg.Initialize(ctx, tracer)
|
||||
if err != nil {
|
||||
t.Fatalf("Initialize failed: %v", err)
|
||||
}
|
||||
bqSrc, ok := src.(*bigquery.Source)
|
||||
if !ok {
|
||||
t.Fatalf("Expected *bigquery.Source, got %T", src)
|
||||
}
|
||||
if bqSrc.MaxQueryResultRows != tc.want {
|
||||
t.Errorf("MaxQueryResultRows = %d, want %d", bqSrc.MaxQueryResultRows, tc.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeValue(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
|
||||
@@ -27,14 +27,14 @@ import (
|
||||
"google.golang.org/api/option"
|
||||
)
|
||||
|
||||
const SourceKind string = "bigtable"
|
||||
const SourceType string = "bigtable"
|
||||
|
||||
// validate interface
|
||||
var _ sources.SourceConfig = Config{}
|
||||
|
||||
func init() {
|
||||
if !sources.Register(SourceKind, newConfig) {
|
||||
panic(fmt.Sprintf("source kind %q already registered", SourceKind))
|
||||
if !sources.Register(SourceType, newConfig) {
|
||||
panic(fmt.Sprintf("source type %q already registered", SourceType))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,13 +48,13 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Type string `yaml:"kind" validate:"required"`
|
||||
Project string `yaml:"project" validate:"required"`
|
||||
Instance string `yaml:"instance" validate:"required"`
|
||||
}
|
||||
|
||||
func (r Config) SourceConfigKind() string {
|
||||
return SourceKind
|
||||
func (r Config) SourceConfigType() string {
|
||||
return SourceType
|
||||
}
|
||||
|
||||
func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
|
||||
@@ -77,8 +77,8 @@ type Source struct {
|
||||
Client *bigtable.Client
|
||||
}
|
||||
|
||||
func (s *Source) SourceKind() string {
|
||||
return SourceKind
|
||||
func (s *Source) SourceType() string {
|
||||
return SourceType
|
||||
}
|
||||
|
||||
func (s *Source) ToConfig() sources.SourceConfig {
|
||||
@@ -179,7 +179,7 @@ func (s *Source) RunSQL(ctx context.Context, statement string, configParam param
|
||||
|
||||
func initBigtableClient(ctx context.Context, tracer trace.Tracer, name, project, instance string) (*bigtable.Client, error) {
|
||||
//nolint:all // Reassigned ctx
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceType, name)
|
||||
defer span.End()
|
||||
|
||||
// Set up Bigtable data operations client.
|
||||
|
||||
@@ -43,7 +43,7 @@ func TestParseFromYamlBigtableDb(t *testing.T) {
|
||||
want: map[string]sources.SourceConfig{
|
||||
"my-bigtable-instance": bigtable.Config{
|
||||
Name: "my-bigtable-instance",
|
||||
Kind: bigtable.SourceKind,
|
||||
Type: bigtable.SourceType,
|
||||
Project: "my-project",
|
||||
Instance: "my-instance",
|
||||
},
|
||||
|
||||
@@ -25,11 +25,11 @@ import (
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
const SourceKind string = "cassandra"
|
||||
const SourceType string = "cassandra"
|
||||
|
||||
func init() {
|
||||
if !sources.Register(SourceKind, newConfig) {
|
||||
panic(fmt.Sprintf("source kind %q already registered", SourceKind))
|
||||
if !sources.Register(SourceType, newConfig) {
|
||||
panic(fmt.Sprintf("source type %q already registered", SourceType))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -43,7 +43,7 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Type string `yaml:"kind" validate:"required"`
|
||||
Hosts []string `yaml:"hosts" validate:"required"`
|
||||
Keyspace string `yaml:"keyspace"`
|
||||
ProtoVersion int `yaml:"protoVersion"`
|
||||
@@ -68,9 +68,9 @@ func (c Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.So
|
||||
return s, nil
|
||||
}
|
||||
|
||||
// SourceConfigKind implements sources.SourceConfig.
|
||||
func (c Config) SourceConfigKind() string {
|
||||
return SourceKind
|
||||
// SourceConfigType implements sources.SourceConfig.
|
||||
func (c Config) SourceConfigType() string {
|
||||
return SourceType
|
||||
}
|
||||
|
||||
var _ sources.SourceConfig = Config{}
|
||||
@@ -89,9 +89,9 @@ func (s *Source) ToConfig() sources.SourceConfig {
|
||||
return s.Config
|
||||
}
|
||||
|
||||
// SourceKind implements sources.Source.
|
||||
func (s *Source) SourceKind() string {
|
||||
return SourceKind
|
||||
// SourceType implements sources.Source.
|
||||
func (s *Source) SourceType() string {
|
||||
return SourceType
|
||||
}
|
||||
|
||||
func (s *Source) RunSQL(ctx context.Context, statement string, params parameters.ParamValues) (any, error) {
|
||||
@@ -120,7 +120,7 @@ var _ sources.Source = &Source{}
|
||||
|
||||
func initCassandraSession(ctx context.Context, tracer trace.Tracer, c Config) (*gocql.Session, error) {
|
||||
//nolint:all // Reassigned ctx
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, c.Name)
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceType, c.Name)
|
||||
defer span.End()
|
||||
|
||||
// Validate authentication configuration
|
||||
|
||||
@@ -43,7 +43,7 @@ func TestParseFromYamlCassandra(t *testing.T) {
|
||||
want: server.SourceConfigs{
|
||||
"my-cassandra-instance": cassandra.Config{
|
||||
Name: "my-cassandra-instance",
|
||||
Kind: cassandra.SourceKind,
|
||||
Type: cassandra.SourceType,
|
||||
Hosts: []string{"my-host1", "my-host2"},
|
||||
Username: "",
|
||||
Password: "",
|
||||
@@ -77,7 +77,7 @@ func TestParseFromYamlCassandra(t *testing.T) {
|
||||
want: server.SourceConfigs{
|
||||
"my-cassandra-instance": cassandra.Config{
|
||||
Name: "my-cassandra-instance",
|
||||
Kind: cassandra.SourceKind,
|
||||
Type: cassandra.SourceType,
|
||||
Hosts: []string{"my-host1", "my-host2"},
|
||||
Username: "user",
|
||||
Password: "pass",
|
||||
|
||||
@@ -28,14 +28,14 @@ import (
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
const SourceKind string = "clickhouse"
|
||||
const SourceType string = "clickhouse"
|
||||
|
||||
// validate interface
|
||||
var _ sources.SourceConfig = Config{}
|
||||
|
||||
func init() {
|
||||
if !sources.Register(SourceKind, newConfig) {
|
||||
panic(fmt.Sprintf("source kind %q already registered", SourceKind))
|
||||
if !sources.Register(SourceType, newConfig) {
|
||||
panic(fmt.Sprintf("source type %q already registered", SourceType))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,7 +49,7 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Type string `yaml:"kind" validate:"required"`
|
||||
Host string `yaml:"host" validate:"required"`
|
||||
Port string `yaml:"port" validate:"required"`
|
||||
Database string `yaml:"database" validate:"required"`
|
||||
@@ -59,8 +59,8 @@ type Config struct {
|
||||
Secure bool `yaml:"secure"`
|
||||
}
|
||||
|
||||
func (r Config) SourceConfigKind() string {
|
||||
return SourceKind
|
||||
func (r Config) SourceConfigType() string {
|
||||
return SourceType
|
||||
}
|
||||
|
||||
func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
|
||||
@@ -88,8 +88,8 @@ type Source struct {
|
||||
Pool *sql.DB
|
||||
}
|
||||
|
||||
func (s *Source) SourceKind() string {
|
||||
return SourceKind
|
||||
func (s *Source) SourceType() string {
|
||||
return SourceType
|
||||
}
|
||||
|
||||
func (s *Source) ToConfig() sources.SourceConfig {
|
||||
@@ -174,7 +174,7 @@ func validateConfig(protocol string) error {
|
||||
|
||||
func initClickHouseConnectionPool(ctx context.Context, tracer trace.Tracer, name, host, port, user, pass, dbname, protocol string, secure bool) (*sql.DB, error) {
|
||||
//nolint:all // Reassigned ctx
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceType, name)
|
||||
defer span.End()
|
||||
|
||||
if protocol == "" {
|
||||
|
||||
@@ -25,10 +25,10 @@ import (
|
||||
"go.opentelemetry.io/otel"
|
||||
)
|
||||
|
||||
func TestConfigSourceConfigKind(t *testing.T) {
|
||||
func TestConfigSourceConfigType(t *testing.T) {
|
||||
config := Config{}
|
||||
if config.SourceConfigKind() != SourceKind {
|
||||
t.Errorf("Expected %s, got %s", SourceKind, config.SourceConfigKind())
|
||||
if config.SourceConfigType() != SourceType {
|
||||
t.Errorf("Expected %s, got %s", SourceType, config.SourceConfigType())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,7 +53,7 @@ func TestNewConfig(t *testing.T) {
|
||||
`,
|
||||
expected: Config{
|
||||
Name: "test-clickhouse",
|
||||
Kind: "clickhouse",
|
||||
Type: "clickhouse",
|
||||
Host: "localhost",
|
||||
Port: "8443",
|
||||
User: "default",
|
||||
@@ -75,7 +75,7 @@ func TestNewConfig(t *testing.T) {
|
||||
`,
|
||||
expected: Config{
|
||||
Name: "minimal-clickhouse",
|
||||
Kind: "clickhouse",
|
||||
Type: "clickhouse",
|
||||
Host: "127.0.0.1",
|
||||
Port: "8123",
|
||||
User: "testuser",
|
||||
@@ -100,7 +100,7 @@ func TestNewConfig(t *testing.T) {
|
||||
`,
|
||||
expected: Config{
|
||||
Name: "http-clickhouse",
|
||||
Kind: "clickhouse",
|
||||
Type: "clickhouse",
|
||||
Host: "clickhouse.example.com",
|
||||
Port: "8123",
|
||||
User: "analytics",
|
||||
@@ -125,7 +125,7 @@ func TestNewConfig(t *testing.T) {
|
||||
`,
|
||||
expected: Config{
|
||||
Name: "secure-clickhouse",
|
||||
Kind: "clickhouse",
|
||||
Type: "clickhouse",
|
||||
Host: "secure.clickhouse.io",
|
||||
Port: "8443",
|
||||
User: "secureuser",
|
||||
@@ -196,10 +196,10 @@ func TestNewConfigInvalidYAML(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestSource_SourceKind(t *testing.T) {
|
||||
func TestSource_SourceType(t *testing.T) {
|
||||
source := &Source{}
|
||||
if source.SourceKind() != SourceKind {
|
||||
t.Errorf("Expected %s, got %s", SourceKind, source.SourceKind())
|
||||
if source.SourceType() != SourceType {
|
||||
t.Errorf("Expected %s, got %s", SourceType, source.SourceType())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -29,15 +29,15 @@ import (
|
||||
"golang.org/x/oauth2/google"
|
||||
)
|
||||
|
||||
const SourceKind string = "cloud-gemini-data-analytics"
|
||||
const SourceType string = "cloud-gemini-data-analytics"
|
||||
const Endpoint string = "https://geminidataanalytics.googleapis.com"
|
||||
|
||||
// validate interface
|
||||
var _ sources.SourceConfig = Config{}
|
||||
|
||||
func init() {
|
||||
if !sources.Register(SourceKind, newConfig) {
|
||||
panic(fmt.Sprintf("source kind %q already registered", SourceKind))
|
||||
if !sources.Register(SourceType, newConfig) {
|
||||
panic(fmt.Sprintf("source type %q already registered", SourceType))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,13 +51,13 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Type string `yaml:"kind" validate:"required"`
|
||||
ProjectID string `yaml:"projectId" validate:"required"`
|
||||
UseClientOAuth bool `yaml:"useClientOAuth"`
|
||||
}
|
||||
|
||||
func (r Config) SourceConfigKind() string {
|
||||
return SourceKind
|
||||
func (r Config) SourceConfigType() string {
|
||||
return SourceType
|
||||
}
|
||||
|
||||
// Initialize initializes a Gemini Data Analytics Source instance.
|
||||
@@ -102,8 +102,8 @@ type Source struct {
|
||||
userAgent string
|
||||
}
|
||||
|
||||
func (s *Source) SourceKind() string {
|
||||
return SourceKind
|
||||
func (s *Source) SourceType() string {
|
||||
return SourceType
|
||||
}
|
||||
|
||||
func (s *Source) ToConfig() sources.SourceConfig {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user