mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-01-17 03:18:00 -05:00
Compare commits
38 Commits
py-sdk-doc
...
healthcare
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7ef303a4d5 | ||
|
|
96f13dd517 | ||
|
|
6e09b08c6a | ||
|
|
1f15a111f1 | ||
|
|
dfddeb528d | ||
|
|
00c3e6d8cb | ||
|
|
d00b6fdf18 | ||
|
|
4d23a3bbf2 | ||
|
|
5e0999ebf5 | ||
|
|
6b02591703 | ||
|
|
8e0fb03483 | ||
|
|
2817dd1e5d | ||
|
|
68a218407e | ||
|
|
d69792d843 | ||
|
|
647b04d3a7 | ||
|
|
030df9766f | ||
|
|
5dbf207162 | ||
|
|
9c3720e31d | ||
|
|
3cd3c39d66 | ||
|
|
0691a6f715 | ||
|
|
467b96a23b | ||
|
|
4abf0c39e7 | ||
|
|
dd7b9de623 | ||
|
|
41b518b955 | ||
|
|
6e8a9eb8ec | ||
|
|
ef8f3b02f2 | ||
|
|
351b007fe3 | ||
|
|
9d1feca108 | ||
|
|
17b41f6453 | ||
|
|
a7799757c9 | ||
|
|
d961e373e1 | ||
|
|
bcb40a720d | ||
|
|
4a4cf1e712 | ||
|
|
b706b5bc68 | ||
|
|
4d3332d37d | ||
|
|
1203b7370a | ||
|
|
4a26ce3c1b | ||
|
|
306b5becda |
@@ -825,7 +825,27 @@ steps:
|
||||
elasticsearch \
|
||||
elasticsearch
|
||||
|
||||
|
||||
- id: "snowflake"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
- "SNOWFLAKE_DATABASE=$_SNOWFLAKE_DATABASE"
|
||||
- "SNOWFLAKE_SCHEMA=$_SNOWFLAKE_SCHEMA"
|
||||
secretEnv: ["CLIENT_ID", "SNOWFLAKE_USER", "SNOWFLAKE_PASS", "SNOWFLAKE_ACCOUNT"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Snowflake" \
|
||||
snowflake \
|
||||
snowflake
|
||||
|
||||
- id: "cassandra"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
@@ -1038,6 +1058,12 @@ availableSecrets:
|
||||
env: ELASTICSEARCH_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/elastic_search_pass/versions/latest
|
||||
env: ELASTICSEARCH_PASS
|
||||
- versionName: projects/$PROJECT_ID/secrets/snowflake_account/versions/latest
|
||||
env: SNOWFLAKE_ACCOUNT
|
||||
- versionName: projects/$PROJECT_ID/secrets/snowflake_user/versions/latest
|
||||
env: SNOWFLAKE_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/snowflake_pass/versions/latest
|
||||
env: SNOWFLAKE_PASS
|
||||
- versionName: projects/$PROJECT_ID/secrets/cassandra_user/versions/latest
|
||||
env: CASSANDRA_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/cassandra_pass/versions/latest
|
||||
@@ -1124,4 +1150,5 @@ substitutions:
|
||||
_SINGLESTORE_USER: "root"
|
||||
_MARIADB_PORT: "3307"
|
||||
_MARIADB_DATABASE: test_database
|
||||
|
||||
_SNOWFLAKE_DATABASE: "test"
|
||||
_SNOWFLAKE_SCHEMA: "PUBLIC"
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -20,4 +20,4 @@ node_modules
|
||||
|
||||
# executable
|
||||
genai-toolbox
|
||||
toolbox
|
||||
toolbox
|
||||
@@ -51,6 +51,10 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
|
||||
# Add a new version block here before every release
|
||||
# The order of versions in this file is mirrored into the dropdown
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.25.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.25.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.24.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.24.0/"
|
||||
|
||||
25
CHANGELOG.md
25
CHANGELOG.md
@@ -1,5 +1,30 @@
|
||||
# Changelog
|
||||
|
||||
## [0.25.0](https://github.com/googleapis/genai-toolbox/compare/v0.24.0...v0.25.0) (2026-01-08)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add `embeddingModel` support ([#2121](https://github.com/googleapis/genai-toolbox/issues/2121)) ([9c62f31](https://github.com/googleapis/genai-toolbox/commit/9c62f313ff5edf0a3b5b8a3e996eba078fba4095))
|
||||
* Add `allowed-hosts` flag ([#2254](https://github.com/googleapis/genai-toolbox/issues/2254)) ([17b41f6](https://github.com/googleapis/genai-toolbox/commit/17b41f64531b8fe417c28ada45d1992ba430dc1b))
|
||||
* Add parameter default value to manifest ([#2264](https://github.com/googleapis/genai-toolbox/issues/2264)) ([9d1feca](https://github.com/googleapis/genai-toolbox/commit/9d1feca10810fa42cb4c94a409252f1bd373ee36))
|
||||
* **snowflake:** Add Snowflake Source and Tools ([#858](https://github.com/googleapis/genai-toolbox/issues/858)) ([b706b5b](https://github.com/googleapis/genai-toolbox/commit/b706b5bc685aeda277f277868bae77d38d5fd7b6))
|
||||
* **prebuilt/cloud-sql-mysql:** Update CSQL MySQL prebuilt tools to use IAM ([#2202](https://github.com/googleapis/genai-toolbox/issues/2202)) ([731a32e](https://github.com/googleapis/genai-toolbox/commit/731a32e5360b4d6862d81fcb27d7127c655679a8))
|
||||
* **sources/bigquery:** Make credentials scope configurable ([#2210](https://github.com/googleapis/genai-toolbox/issues/2210)) ([a450600](https://github.com/googleapis/genai-toolbox/commit/a4506009b93771b77fb05ae97044f914967e67ed))
|
||||
* **sources/trino:** Add ssl verification options and fix docs example ([#2155](https://github.com/googleapis/genai-toolbox/issues/2155)) ([4a4cf1e](https://github.com/googleapis/genai-toolbox/commit/4a4cf1e712b671853678dba99c4dc49dd4fc16a2))
|
||||
* **tools/looker:** Add ability to set destination folder with `make_look` and `make_dashboard`. ([#2245](https://github.com/googleapis/genai-toolbox/issues/2245)) ([eb79339](https://github.com/googleapis/genai-toolbox/commit/eb793398cd1cc4006d9808ccda5dc7aea5e92bd5))
|
||||
* **tools/postgressql:** Add tool to list store procedure ([#2156](https://github.com/googleapis/genai-toolbox/issues/2156)) ([cf0fc51](https://github.com/googleapis/genai-toolbox/commit/cf0fc515b57d9b84770076f3c0c5597c4597ef62))
|
||||
* **tools/postgressql:** Add Parameter `embeddedBy` config support ([#2151](https://github.com/googleapis/genai-toolbox/issues/2151)) ([17b70cc](https://github.com/googleapis/genai-toolbox/commit/17b70ccaa754d15bcc33a1a3ecb7e652520fa600))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **server:** Add `embeddingModel` config initialization ([#2281](https://github.com/googleapis/genai-toolbox/issues/2281)) ([a779975](https://github.com/googleapis/genai-toolbox/commit/a7799757c9345f99b6d2717841fbf792d364e1a2))
|
||||
* **sources/cloudgda:** Add import for cloudgda source ([#2217](https://github.com/googleapis/genai-toolbox/issues/2217)) ([7daa411](https://github.com/googleapis/genai-toolbox/commit/7daa4111f4ebfb0a35319fd67a8f7b9f0f99efcf))
|
||||
* **tools/alloydb-wait-for-operation:** Fix connection message generation ([#2228](https://github.com/googleapis/genai-toolbox/issues/2228)) ([7053fbb](https://github.com/googleapis/genai-toolbox/commit/7053fbb1953653143d39a8510916ea97a91022a6))
|
||||
* **tools/alloydbainl:** Only add psv when NL Config Param is defined ([#2265](https://github.com/googleapis/genai-toolbox/issues/2265)) ([ef8f3b0](https://github.com/googleapis/genai-toolbox/commit/ef8f3b02f2f38ce94a6ba9acf35d08b9469bef4e))
|
||||
* **tools/looker:** Looker client OAuth nil pointer error ([#2231](https://github.com/googleapis/genai-toolbox/issues/2231)) ([268700b](https://github.com/googleapis/genai-toolbox/commit/268700bdbf8281de0318d60ca613ed3672990b20))
|
||||
|
||||
## [0.24.0](https://github.com/googleapis/genai-toolbox/compare/v0.23.0...v0.24.0) (2025-12-19)
|
||||
|
||||
|
||||
|
||||
@@ -59,6 +59,13 @@ You can manually trigger the bot by commenting on your Pull Request:
|
||||
* `/gemini summary`: Posts a summary of the changes in the pull request.
|
||||
* `/gemini help`: Overview of the available commands
|
||||
|
||||
## Guidelines for Pull Requests
|
||||
|
||||
1. Please keep your PR small for more thorough review and easier updates. In case of regression, it also allows us to roll back a single feature instead of multiple ones.
|
||||
1. For non-trivial changes, consider opening an issue and discussing it with the code owners first.
|
||||
1. Provide a good PR description as a record of what change is being made and why it was made. Link to a GitHub issue if it exists.
|
||||
1. Make sure your code is thoroughly tested with unit tests and integration tests. Remember to clean up the test instances properly in your code to avoid memory leaks.
|
||||
|
||||
## Adding a New Database Source or Tool
|
||||
|
||||
Please create an
|
||||
@@ -110,6 +117,8 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s
|
||||
We recommend looking at an [example tool
|
||||
implementation](https://github.com/googleapis/genai-toolbox/tree/main/internal/tools/postgres/postgressql).
|
||||
|
||||
Remember to keep your PRs small. For example, if you are contributing a new Source, only include one or two core Tools within the same PR, the rest of the Tools can come in subsequent PRs.
|
||||
|
||||
* **Create a new directory** under `internal/tools` for your tool type (e.g., `internal/tools/newdb/newdbtool`).
|
||||
* **Define a configuration struct** for your tool in a file named `newdbtool.go`.
|
||||
Create a `Config` struct and a `Tool` struct to store necessary parameters for
|
||||
@@ -163,6 +172,8 @@ tools.
|
||||
parameters][temp-param-doc]. Only run this test if template
|
||||
parameters apply to your tool.
|
||||
|
||||
* **Add additional tests** for the tools that are not covered by the predefined tests. Every tool must be tested!
|
||||
|
||||
* **Add the new database to the integration test workflow** in
|
||||
[integration.cloudbuild.yaml](.ci/integration.cloudbuild.yaml).
|
||||
|
||||
@@ -244,4 +255,4 @@ resources.
|
||||
* **PR Description:** PR description should **always** be included. It should
|
||||
include a concise description of the changes, it's impact, along with a
|
||||
summary of the solution. If the PR is related to a specific issue, the issue
|
||||
number should be mentioned in the PR description (e.g. `Fixes #1`).
|
||||
number should be mentioned in the PR description (e.g. `Fixes #1`).
|
||||
|
||||
17
DEVELOPER.md
17
DEVELOPER.md
@@ -379,6 +379,23 @@ to approve PRs for main. TeamSync is used to create this team from the MDB
|
||||
Group `toolbox-contributors`. Googlers who are developing for MCP-Toolbox
|
||||
but aren't part of the core team should join this group.
|
||||
|
||||
### Issue/PR Triage and SLO
|
||||
After an issue is created, maintainers will assign the following labels:
|
||||
* `Priority` (defaulted to P0)
|
||||
* `Type` (if applicable)
|
||||
* `Product` (if applicable)
|
||||
|
||||
All incoming issues and PRs will follow the following SLO:
|
||||
| Type | Priority | Objective |
|
||||
|-----------------|----------|------------------------------------------------------------------------|
|
||||
| Feature Request | P0 | Must respond within **5 days** |
|
||||
| Process | P0 | Must respond within **5 days** |
|
||||
| Bugs | P0 | Must respond within **5 days**, and resolve/closure within **14 days** |
|
||||
| Bugs | P1 | Must respond within **7 days**, and resolve/closure within **90 days** |
|
||||
| Bugs | P2 | Must respond within **30 days**
|
||||
|
||||
_Types that are not listed in the table do not adhere to any SLO._
|
||||
|
||||
### Releasing
|
||||
|
||||
Toolbox has two types of releases: versioned and continuous. It uses Google
|
||||
|
||||
16
README.md
16
README.md
@@ -140,7 +140,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.24.0
|
||||
> export VERSION=0.25.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -153,7 +153,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.24.0
|
||||
> export VERSION=0.25.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -166,7 +166,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.24.0
|
||||
> export VERSION=0.25.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -179,7 +179,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```cmd
|
||||
> :: see releases page for other versions
|
||||
> set VERSION=0.24.0
|
||||
> set VERSION=0.25.0
|
||||
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
> ```
|
||||
>
|
||||
@@ -191,7 +191,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```powershell
|
||||
> # see releases page for other versions
|
||||
> $VERSION = "0.24.0"
|
||||
> $VERSION = "0.25.0"
|
||||
> curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||
> ```
|
||||
>
|
||||
@@ -204,7 +204,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -228,7 +228,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.24.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.25.0
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -272,7 +272,7 @@ To run Toolbox from binary:
|
||||
To run the server after pulling the [container image](#installing-the-server):
|
||||
|
||||
```sh
|
||||
export VERSION=0.11.0 # Use the version you pulled
|
||||
export VERSION=0.24.0 # Use the version you pulled
|
||||
docker run -p 5000:5000 \
|
||||
-v $(pwd)/tools.yaml:/app/tools.yaml \
|
||||
us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION \
|
||||
|
||||
@@ -92,11 +92,13 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomstudies"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcloneinstance"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatebackup"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatedatabase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreateusers"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistdatabases"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistinstances"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlrestorebackup"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmssql/cloudsqlmssqlcreateinstance"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmysql/cloudsqlmysqlcreateinstance"
|
||||
@@ -215,6 +217,8 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparklistbatches"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoreexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoresql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakeexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlistgraphs"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlisttables"
|
||||
@@ -263,6 +267,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/redis"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/serverlessspark"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/singlestore"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/snowflake"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/spanner"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/tidb"
|
||||
@@ -378,7 +383,9 @@ func NewCommand(opts ...Option) *Command {
|
||||
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
|
||||
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
|
||||
flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
|
||||
// TODO: Insecure by default. Might consider updating this for v1.0.0
|
||||
flags.StringSliceVar(&cmd.cfg.AllowedOrigins, "allowed-origins", []string{"*"}, "Specifies a list of origins permitted to access this server. Defaults to '*'.")
|
||||
flags.StringSliceVar(&cmd.cfg.AllowedHosts, "allowed-hosts", []string{"*"}, "Specifies a list of hosts permitted to access this server. Defaults to '*'.")
|
||||
|
||||
// wrap RunE command so that we have access to original Command object
|
||||
cmd.RunE = func(*cobra.Command, []string) error { return run(cmd) }
|
||||
@@ -944,6 +951,7 @@ func run(cmd *Command) error {
|
||||
|
||||
cmd.cfg.SourceConfigs = finalToolsFile.Sources
|
||||
cmd.cfg.AuthServiceConfigs = finalToolsFile.AuthServices
|
||||
cmd.cfg.EmbeddingModelConfigs = finalToolsFile.EmbeddingModels
|
||||
cmd.cfg.ToolConfigs = finalToolsFile.Tools
|
||||
cmd.cfg.ToolsetConfigs = finalToolsFile.Toolsets
|
||||
cmd.cfg.PromptConfigs = finalToolsFile.Prompts
|
||||
|
||||
@@ -67,6 +67,9 @@ func withDefaults(c server.ServerConfig) server.ServerConfig {
|
||||
if c.AllowedOrigins == nil {
|
||||
c.AllowedOrigins = []string{"*"}
|
||||
}
|
||||
if c.AllowedHosts == nil {
|
||||
c.AllowedHosts = []string{"*"}
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
@@ -220,6 +223,13 @@ func TestServerConfigFlags(t *testing.T) {
|
||||
AllowedOrigins: []string{"http://foo.com", "http://bar.com"},
|
||||
}),
|
||||
},
|
||||
{
|
||||
desc: "allowed hosts",
|
||||
args: []string{"--allowed-hosts", "http://foo.com,http://bar.com"},
|
||||
want: withDefaults(server.ServerConfig{
|
||||
AllowedHosts: []string{"http://foo.com", "http://bar.com"},
|
||||
}),
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
@@ -1352,6 +1362,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
cloudsqlmssqlobsvconfig, _ := prebuiltconfigs.Get("cloud-sql-mssql-observability")
|
||||
serverless_spark_config, _ := prebuiltconfigs.Get("serverless-spark")
|
||||
cloudhealthcare_config, _ := prebuiltconfigs.Get("cloud-healthcare")
|
||||
snowflake_config, _ := prebuiltconfigs.Get("snowflake")
|
||||
|
||||
// Set environment variables
|
||||
t.Setenv("API_KEY", "your_api_key")
|
||||
@@ -1449,6 +1460,14 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
t.Setenv("CLOUD_HEALTHCARE_REGION", "your_gcp_region")
|
||||
t.Setenv("CLOUD_HEALTHCARE_DATASET", "your_healthcare_dataset")
|
||||
|
||||
t.Setenv("SNOWFLAKE_ACCOUNT", "your_account")
|
||||
t.Setenv("SNOWFLAKE_USER", "your_username")
|
||||
t.Setenv("SNOWFLAKE_PASSWORD", "your_pass")
|
||||
t.Setenv("SNOWFLAKE_DATABASE", "your_db")
|
||||
t.Setenv("SNOWFLAKE_SCHEMA", "your_schema")
|
||||
t.Setenv("SNOWFLAKE_WAREHOUSE", "your_wh")
|
||||
t.Setenv("SNOWFLAKE_ROLE", "your_role")
|
||||
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
@@ -1474,7 +1493,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_postgres_admin_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_postgres_admin_tools",
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck", "clone_instance"},
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck", "clone_instance", "create_backup", "restore_backup"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1484,7 +1503,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_mysql_admin_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_mysql_admin_tools",
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance"},
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup", "restore_backup"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1494,7 +1513,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_mssql_admin_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_mssql_admin_tools",
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance"},
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup", "restore_backup"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1746,6 +1765,16 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Snowflake prebuilt tool",
|
||||
in: snowflake_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"snowflake_tools": tools.ToolsetConfig{
|
||||
Name: "snowflake_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
|
||||
@@ -1 +1 @@
|
||||
0.24.0
|
||||
0.25.0
|
||||
|
||||
18
docs/en/blogs/_index.md
Normal file
18
docs/en/blogs/_index.md
Normal file
@@ -0,0 +1,18 @@
|
||||
---
|
||||
title: "Featured Articles"
|
||||
weight: 3
|
||||
description: Toolbox Medium Blogs
|
||||
manualLink: "https://medium.com/@mcp_toolbox"
|
||||
manualLinkTarget: _blank
|
||||
---
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<title>Redirecting to Featured Articles</title>
|
||||
<link rel="canonical" href="https://medium.com/@mcp_toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://medium.com/@mcp_toolbox"/>
|
||||
</head>
|
||||
<body>
|
||||
<p>If you are not automatically redirected, please <a href="https://medium.com/@mcp_toolbox">follow this link to our articles</a>.</p>
|
||||
</body>
|
||||
</html>
|
||||
@@ -234,7 +234,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.24.0\" # x-release-please-version\n",
|
||||
"version = \"0.25.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -103,7 +103,7 @@ To install Toolbox as a binary on Linux (AMD64):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -114,7 +114,7 @@ To install Toolbox as a binary on macOS (Apple Silicon):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -125,7 +125,7 @@ To install Toolbox as a binary on macOS (Intel):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -136,7 +136,7 @@ To install Toolbox as a binary on Windows (Command Prompt):
|
||||
|
||||
```cmd
|
||||
:: see releases page for other versions
|
||||
set VERSION=0.24.0
|
||||
set VERSION=0.25.0
|
||||
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
```
|
||||
|
||||
@@ -146,7 +146,7 @@ To install Toolbox as a binary on Windows (PowerShell):
|
||||
|
||||
```powershell
|
||||
# see releases page for other versions
|
||||
$VERSION = "0.24.0"
|
||||
$VERSION = "0.25.0"
|
||||
curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||
```
|
||||
|
||||
@@ -158,7 +158,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -177,7 +177,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.24.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.25.0
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -569,11 +569,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/jws": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
||||
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
|
||||
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"jwa": "^2.0.0",
|
||||
"jwa": "^2.0.1",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -882,11 +882,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/jws": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
||||
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
|
||||
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"jwa": "^2.0.0",
|
||||
"jwa": "^2.0.1",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
google-genai==1.56.0
|
||||
google-genai==1.57.0
|
||||
toolbox-core==0.5.4
|
||||
pytest==9.0.2
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
langchain==1.2.0
|
||||
langchain-google-vertexai==3.2.0
|
||||
langchain==1.2.2
|
||||
langchain-google-vertexai==3.2.1
|
||||
langgraph==1.0.5
|
||||
toolbox-langchain==0.5.4
|
||||
pytest==9.0.2
|
||||
|
||||
@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -48,11 +48,13 @@ instance, database and users:
|
||||
* `roles/cloudsql.editor`: Provides permissions to manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* `roles/cloudsql.admin`: Provides full control over all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
* `create_user`
|
||||
* `clone_instance`
|
||||
* `restore_backup`
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
@@ -299,6 +301,8 @@ instances and interacting with your database:
|
||||
* **create_user**: Creates a new user in a Cloud SQL instance.
|
||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for SQL Server instance.
|
||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||
* **restore_backup**: Restores a backup of a Cloud SQL instance.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
|
||||
@@ -48,11 +48,13 @@ database and users:
|
||||
* `roles/cloudsql.editor`: Provides permissions to manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* `roles/cloudsql.admin`: Provides full control over all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
* `create_user`
|
||||
* `clone_instance`
|
||||
* `restore_backup`
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
@@ -299,6 +301,8 @@ instances and interacting with your database:
|
||||
* **create_user**: Creates a new user in a Cloud SQL instance.
|
||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for MySQL instance.
|
||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||
* **restore_backup**: Restores a backup of a Cloud SQL instance.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
|
||||
@@ -48,11 +48,13 @@ instance, database and users:
|
||||
* `roles/cloudsql.editor`: Provides permissions to manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* `roles/cloudsql.admin`: Provides full control over all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
* `create_user`
|
||||
* `clone_instance`
|
||||
* `restore_backup`
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
@@ -299,6 +301,8 @@ instances and interacting with your database:
|
||||
* **create_user**: Creates a new user in a Cloud SQL instance.
|
||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for PostgreSQL instance.
|
||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||
* **restore_backup**: Restores a backup of a Cloud SQL instance.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
|
||||
@@ -100,19 +100,19 @@ After you install Looker in the MCP Store, resources and tools from the server a
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -45,19 +45,19 @@ instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -43,19 +43,19 @@ expose your developer assistant tools to a MySQL instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -44,19 +44,19 @@ expose your developer assistant tools to a Neo4j instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -43,19 +43,19 @@ to expose your developer assistant tools to a SQLite instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -20,6 +20,7 @@ The native SDKs can be combined with MCP clients in many cases.
|
||||
|
||||
Toolbox currently supports the following versions of MCP specification:
|
||||
|
||||
* [2025-11-25](https://modelcontextprotocol.io/specification/2025-11-25)
|
||||
* [2025-06-18](https://modelcontextprotocol.io/specification/2025-06-18)
|
||||
* [2025-03-26](https://modelcontextprotocol.io/specification/2025-03-26)
|
||||
* [2024-11-05](https://modelcontextprotocol.io/specification/2024-11-05)
|
||||
|
||||
@@ -68,7 +68,12 @@ networks:
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
To prevent DNS rebinding attack, use the `--allowed-origins` flag to specify a
|
||||
To prevent DNS rebinding attack, use the `--allowed-hosts` flag to specify a
|
||||
list of hosts for validation. E.g. `command: [ "toolbox",
|
||||
"--tools-file", "/config/tools.yaml", "--address", "0.0.0.0",
|
||||
"--allowed-hosts", "localhost:5000"]`
|
||||
|
||||
To implement CORs, use the `--allowed-origins` flag to specify a
|
||||
list of origins permitted to access the server. E.g. `command: [ "toolbox",
|
||||
"--tools-file", "/config/tools.yaml", "--address", "0.0.0.0",
|
||||
"--allowed-origins", "https://foo.bar"]`
|
||||
|
||||
@@ -188,9 +188,13 @@ description: >
|
||||
path: tools.yaml
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
{{< notice tip >}}
|
||||
To prevent DNS rebinding attack, use the `--allowed-origins` flag to specify a
|
||||
list of origins permitted to access the server. E.g. `args: ["--address",
|
||||
"0.0.0.0", "--allowed-hosts", "foo.bar:5000"]`
|
||||
|
||||
To implement CORs, use the `--allowed-origins` flag to specify a
|
||||
list of origins permitted to access the server. E.g. `args: ["--address",
|
||||
"0.0.0.0", "--allowed-origins", "https://foo.bar"]`
|
||||
{{< /notice >}}
|
||||
|
||||
|
||||
@@ -142,14 +142,18 @@ deployment will time out.
|
||||
|
||||
### Update deployed server to be secure
|
||||
|
||||
To prevent DNS rebinding attack, use the `--allowed-origins` flag to specify a
|
||||
list of origins permitted to access the server. In order to do that, you will
|
||||
To prevent DNS rebinding attack, use the `--allowed-hosts` flag to specify a
|
||||
list of hosts. In order to do that, you will
|
||||
have to re-deploy the cloud run service with the new flag.
|
||||
|
||||
To implement CORs checks, use the `--allowed-origins` flag to specify a list of
|
||||
origins permitted to access the server.
|
||||
|
||||
1. Set an environment variable to the cloud run url:
|
||||
|
||||
```bash
|
||||
export URL=<cloud run url>
|
||||
export HOST=<cloud run host>
|
||||
```
|
||||
|
||||
2. Redeploy Toolbox:
|
||||
@@ -160,7 +164,7 @@ have to re-deploy the cloud run service with the new flag.
|
||||
--service-account toolbox-identity \
|
||||
--region us-central1 \
|
||||
--set-secrets "/app/tools.yaml=tools:latest" \
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL"
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL","--allowed-hosts=$HOST"
|
||||
# --allow-unauthenticated # https://cloud.google.com/run/docs/authenticating/public#gcloud
|
||||
```
|
||||
|
||||
@@ -172,7 +176,7 @@ have to re-deploy the cloud run service with the new flag.
|
||||
--service-account toolbox-identity \
|
||||
--region us-central1 \
|
||||
--set-secrets "/app/tools.yaml=tools:latest" \
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL" \
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL","--allowed-hosts=$HOST" \
|
||||
# TODO(dev): update the following to match your VPC if necessary
|
||||
--network default \
|
||||
--subnet default
|
||||
@@ -203,6 +207,7 @@ You can connect to Toolbox Cloud Run instances directly through the SDK.
|
||||
{{< tab header="Python" lang="python" >}}
|
||||
import asyncio
|
||||
from toolbox_core import ToolboxClient, auth_methods
|
||||
from toolbox_core.protocol import Protocol
|
||||
|
||||
# Replace with the Cloud Run service URL generated in the previous step
|
||||
URL = "https://cloud-run-url.app"
|
||||
@@ -213,6 +218,7 @@ async def main():
|
||||
async with ToolboxClient(
|
||||
URL,
|
||||
client_headers={"Authorization": auth_token_provider},
|
||||
protocol=Protocol.TOOLBOX,
|
||||
) as toolbox:
|
||||
toolset = await toolbox.load_toolset()
|
||||
# ...
|
||||
@@ -277,3 +283,5 @@ contain the specific error message needed to diagnose the problem.
|
||||
Manager, it means the Toolbox service account is missing permissions.
|
||||
- Ensure the `toolbox-identity` service account has the **Secret Manager
|
||||
Secret Accessor** (`roles/secretmanager.secretAccessor`) IAM role.
|
||||
|
||||
- **Cloud Run Connections via IAP:** Currently we do not support Cloud Run connections via [IAP](https://docs.cloud.google.com/iap/docs/concepts-overview). Please disable IAP if you are using it.
|
||||
@@ -8,25 +8,26 @@ description: >
|
||||
|
||||
## Reference
|
||||
|
||||
| Flag (Short) | Flag (Long) | Description | Default |
|
||||
|--------------|----------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------|
|
||||
| `-a` | `--address` | Address of the interface the server will listen on. | `127.0.0.1` |
|
||||
| | `--disable-reload` | Disables dynamic reloading of tools file. | |
|
||||
| `-h` | `--help` | help for toolbox | |
|
||||
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
|
||||
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
|
||||
| `-p` | `--port` | Port the server will listen on. | `5000` |
|
||||
| | `--prebuilt` | Use a prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
|
||||
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
|
||||
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
|
||||
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
|
||||
| | `--telemetry-service-name` | Sets the value of the service.name resource attribute for telemetry data. | `toolbox` |
|
||||
| Flag (Short) | Flag (Long) | Description | Default |
|
||||
|--------------|----------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------|
|
||||
| `-a` | `--address` | Address of the interface the server will listen on. | `127.0.0.1` |
|
||||
| | `--disable-reload` | Disables dynamic reloading of tools file. | |
|
||||
| `-h` | `--help` | help for toolbox | |
|
||||
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
|
||||
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
|
||||
| `-p` | `--port` | Port the server will listen on. | `5000` |
|
||||
| | `--prebuilt` | Use a prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
|
||||
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
|
||||
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
|
||||
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
|
||||
| | `--telemetry-service-name` | Sets the value of the service.name resource attribute for telemetry data. | `toolbox` |
|
||||
| | `--tools-file` | File path specifying the tool configuration. Cannot be used with --tools-files or --tools-folder. | |
|
||||
| | `--tools-files` | Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file or --tools-folder. | |
|
||||
| | `--tools-folder` | Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file or --tools-files. | |
|
||||
| | `--ui` | Launches the Toolbox UI web server. | |
|
||||
| | `--allowed-origins` | Specifies a list of origins permitted to access this server. | `*` |
|
||||
| `-v` | `--version` | version for toolbox | |
|
||||
| | `--ui` | Launches the Toolbox UI web server. | |
|
||||
| | `--allowed-origins` | Specifies a list of origins permitted to access this server for CORs access. | `*` |
|
||||
| | `--allowed-hosts` | Specifies a list of hosts permitted to access this server to prevent DNS rebinding attacks. | `*` |
|
||||
| `-v` | `--version` | version for toolbox | |
|
||||
|
||||
## Examples
|
||||
|
||||
|
||||
@@ -187,12 +187,14 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* **Cloud SQL Admin** (`roles/cloudsql.admin`): Provides full control over
|
||||
all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
* `create_user`
|
||||
* `clone_instance`
|
||||
* `restore_backup`
|
||||
|
||||
* **Tools:**
|
||||
* `create_instance`: Creates a new Cloud SQL for MySQL instance.
|
||||
@@ -203,6 +205,8 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `create_user`: Creates a new user in a Cloud SQL instance.
|
||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for MySQL instance.
|
||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||
* `restore_backup`: Restores a backup of a Cloud SQL instance.
|
||||
|
||||
## Cloud SQL for PostgreSQL
|
||||
|
||||
@@ -275,12 +279,14 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* **Cloud SQL Admin** (`roles/cloudsql.admin`): Provides full control over
|
||||
all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
* `create_user`
|
||||
* `clone_instance`
|
||||
* `restore_backup`
|
||||
* **Tools:**
|
||||
* `create_instance`: Creates a new Cloud SQL for PostgreSQL instance.
|
||||
* `get_instance`: Gets information about a Cloud SQL instance.
|
||||
@@ -290,6 +296,8 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `create_user`: Creates a new user in a Cloud SQL instance.
|
||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for PostgreSQL instance.
|
||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||
* `restore_backup`: Restores a backup of a Cloud SQL instance.
|
||||
|
||||
## Cloud SQL for SQL Server
|
||||
|
||||
@@ -336,12 +344,14 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* **Cloud SQL Admin** (`roles/cloudsql.admin`): Provides full control over
|
||||
all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
* `create_user`
|
||||
* `clone_instance`
|
||||
* `restore_backup`
|
||||
* **Tools:**
|
||||
* `create_instance`: Creates a new Cloud SQL for SQL Server instance.
|
||||
* `get_instance`: Gets information about a Cloud SQL instance.
|
||||
@@ -351,6 +361,8 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `create_user`: Creates a new user in a Cloud SQL instance.
|
||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for SQL Server instance.
|
||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||
* `restore_backup`: Restores a backup of a Cloud SQL instance.
|
||||
|
||||
## Dataplex
|
||||
|
||||
|
||||
@@ -134,6 +134,7 @@ sources:
|
||||
# scopes: # Optional: List of OAuth scopes to request.
|
||||
# - "https://www.googleapis.com/auth/bigquery"
|
||||
# - "https://www.googleapis.com/auth/drive.readonly"
|
||||
# maxQueryResultRows: 50 # Optional: Limits the number of rows returned by queries. Defaults to 50.
|
||||
```
|
||||
|
||||
Initialize a BigQuery source that uses the client's access token:
|
||||
@@ -153,6 +154,7 @@ sources:
|
||||
# scopes: # Optional: List of OAuth scopes to request.
|
||||
# - "https://www.googleapis.com/auth/bigquery"
|
||||
# - "https://www.googleapis.com/auth/drive.readonly"
|
||||
# maxQueryResultRows: 50 # Optional: Limits the number of rows returned by queries. Defaults to 50.
|
||||
```
|
||||
|
||||
## Reference
|
||||
@@ -167,3 +169,4 @@ sources:
|
||||
| useClientOAuth | bool | false | If true, forwards the client's OAuth access token from the "Authorization" header to downstream queries. **Note:** This cannot be used with `writeMode: protected`. |
|
||||
| scopes | []string | false | A list of OAuth 2.0 scopes to use for the credentials. If not provided, default scopes are used. |
|
||||
| impersonateServiceAccount | string | false | Service account email to impersonate when making BigQuery and Dataplex API calls. The authenticated principal must have the `roles/iam.serviceAccountTokenCreator` role on the target service account. [Learn More](https://cloud.google.com/iam/docs/service-account-impersonation) |
|
||||
| maxQueryResultRows | int | false | The maximum number of rows to return from a query. Defaults to 50. |
|
||||
|
||||
63
docs/en/resources/sources/snowflake.md
Normal file
63
docs/en/resources/sources/snowflake.md
Normal file
@@ -0,0 +1,63 @@
|
||||
---
|
||||
title: "Snowflake"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Snowflake is a cloud-based data platform.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
[Snowflake][sf-docs] is a cloud data platform that provides a data warehouse-as-a-service designed for the cloud.
|
||||
|
||||
[sf-docs]: https://docs.snowflake.com/
|
||||
|
||||
## Available Tools
|
||||
|
||||
- [`snowflake-sql`](../tools/snowflake/snowflake-sql.md)
|
||||
Execute SQL queries as prepared statements in Snowflake.
|
||||
|
||||
- [`snowflake-execute-sql`](../tools/snowflake/snowflake-execute-sql.md)
|
||||
Run parameterized SQL statements in Snowflake.
|
||||
|
||||
## Requirements
|
||||
|
||||
### Database User
|
||||
|
||||
This source only uses standard authentication. You will need to create a
|
||||
Snowflake user to login to the database with.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-sf-source:
|
||||
kind: snowflake
|
||||
account: ${SNOWFLAKE_ACCOUNT}
|
||||
user: ${SNOWFLAKE_USER}
|
||||
password: ${SNOWFLAKE_PASSWORD}
|
||||
database: ${SNOWFLAKE_DATABASE}
|
||||
schema: ${SNOWFLAKE_SCHEMA}
|
||||
warehouse: ${SNOWFLAKE_WAREHOUSE}
|
||||
role: ${SNOWFLAKE_ROLE}
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
Use environment variable replacement with the format ${ENV_NAME}
|
||||
instead of hardcoding your secrets into the configuration file.
|
||||
{{< /notice >}}
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "snowflake". |
|
||||
| account | string | true | Your Snowflake account identifier. |
|
||||
| user | string | true | Name of the Snowflake user to connect as (e.g. "my-sf-user"). |
|
||||
| password | string | true | Password of the Snowflake user (e.g. "my-password"). |
|
||||
| database | string | true | Name of the Snowflake database to connect to (e.g. "my_db"). |
|
||||
| schema | string | true | Name of the schema to use (e.g. "my_schema"). |
|
||||
| warehouse | string | false | The virtual warehouse to use. Defaults to "COMPUTE_WH". |
|
||||
| role | string | false | The security role to use. Defaults to "ACCOUNTADMIN". |
|
||||
| timeout | integer | false | The connection timeout in seconds. Defaults to 60. |
|
||||
@@ -50,16 +50,19 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------------|:--------:|:------------:|------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "trino". |
|
||||
| host | string | true | Trino coordinator hostname (e.g. "trino.example.com") |
|
||||
| port | string | true | Trino coordinator port (e.g. "8080", "8443") |
|
||||
| user | string | false | Username for authentication (e.g. "analyst"). Optional for anonymous access. |
|
||||
| password | string | false | Password for basic authentication |
|
||||
| catalog | string | true | Default catalog to use for queries (e.g. "hive") |
|
||||
| schema | string | true | Default schema to use for queries (e.g. "default") |
|
||||
| queryTimeout | string | false | Query timeout duration (e.g. "30m", "1h") |
|
||||
| accessToken | string | false | JWT access token for authentication |
|
||||
| kerberosEnabled | boolean | false | Enable Kerberos authentication (default: false) |
|
||||
| sslEnabled | boolean | false | Enable SSL/TLS (default: false) |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ---------------------- | :------: | :----------: | ---------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "trino". |
|
||||
| host | string | true | Trino coordinator hostname (e.g. "trino.example.com") |
|
||||
| port | string | true | Trino coordinator port (e.g. "8080", "8443") |
|
||||
| user | string | false | Username for authentication (e.g. "analyst"). Optional for anonymous access. |
|
||||
| password | string | false | Password for basic authentication |
|
||||
| catalog | string | true | Default catalog to use for queries (e.g. "hive") |
|
||||
| schema | string | true | Default schema to use for queries (e.g. "default") |
|
||||
| queryTimeout | string | false | Query timeout duration (e.g. "30m", "1h") |
|
||||
| accessToken | string | false | JWT access token for authentication |
|
||||
| kerberosEnabled | boolean | false | Enable Kerberos authentication (default: false) |
|
||||
| sslEnabled | boolean | false | Enable SSL/TLS (default: false) |
|
||||
| disableSslVerification | boolean | false | Skip SSL/TLS certificate verification (default: false) |
|
||||
| sslCertPath | string | false | Path to a custom SSL/TLS certificate file |
|
||||
| sslCert | string | false | Custom SSL/TLS certificate content |
|
||||
|
||||
@@ -91,8 +91,8 @@ visible to the LLM.
|
||||
https://cloud.google.com/alloydb/docs/parameterized-secure-views-overview
|
||||
|
||||
{{< notice tip >}} Make sure to enable the `parameterized_views` extension
|
||||
before running this tool. You can do so by running this command in the AlloyDB
|
||||
studio:
|
||||
to utilize PSV feature (`nlConfigParameters`) with this tool. You can do so by
|
||||
running this command in the AlloyDB studio:
|
||||
|
||||
```sql
|
||||
CREATE EXTENSION IF NOT EXISTS parameterized_views;
|
||||
|
||||
@@ -12,6 +12,9 @@ aliases:
|
||||
|
||||
The `cloud-gemini-data-analytics-query` tool allows you to send natural language questions to the Gemini Data Analytics API and receive structured responses containing SQL queries, natural language answers, and explanations. For details on defining data agent context for database data sources, see the official [documentation](https://docs.cloud.google.com/gemini/docs/conversational-analytics-api/data-agent-authored-context-databases).
|
||||
|
||||
> [!NOTE]
|
||||
> Only `alloydb`, `spannerReference`, and `cloudSqlReference` are supported as [datasource references](https://clouddocs.devsite.corp.google.com/gemini/docs/conversational-analytics-api/reference/rest/v1beta/projects.locations.dataAgents#DatasourceReferences).
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
@@ -41,13 +44,13 @@ tools:
|
||||
|
||||
### Usage Flow
|
||||
|
||||
When using this tool, a `prompt` parameter containing a natural language query is provided to the tool (typically by an agent). The tool then interacts with the Gemini Data Analytics API using the context defined in your configuration.
|
||||
When using this tool, a `query` parameter containing a natural language query is provided to the tool (typically by an agent). The tool then interacts with the Gemini Data Analytics API using the context defined in your configuration.
|
||||
|
||||
The structure of the response depends on the `generationOptions` configured in your tool definition (e.g., enabling `generateQueryResult` will include the SQL query results).
|
||||
|
||||
See [Data Analytics API REST documentation](https://clouddocs.devsite.corp.google.com/gemini/docs/conversational-analytics-api/reference/rest/v1alpha/projects.locations/queryData?rep_location=global) for details.
|
||||
|
||||
**Example Input Prompt:**
|
||||
**Example Input Query:**
|
||||
|
||||
```text
|
||||
How many accounts who have region in Prague are eligible for loans? A3 contains the data of region.
|
||||
|
||||
45
docs/en/resources/tools/cloudsql/cloudsqlcreatebackup.md
Normal file
45
docs/en/resources/tools/cloudsql/cloudsqlcreatebackup.md
Normal file
@@ -0,0 +1,45 @@
|
||||
---
|
||||
title: cloud-sql-create-backup
|
||||
type: docs
|
||||
weight: 10
|
||||
description: "Creates a backup on a Cloud SQL instance."
|
||||
---
|
||||
|
||||
The `cloud-sql-create-backup` tool creates an on-demand backup on a Cloud SQL instance using the Cloud SQL Admin API.
|
||||
|
||||
{{< notice info dd>}}
|
||||
This tool uses a `source` of kind `cloud-sql-admin`.
|
||||
{{< /notice >}}
|
||||
|
||||
## Examples
|
||||
|
||||
Basic backup creation (current state)
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
backup-creation-basic:
|
||||
kind: cloud-sql-create-backup
|
||||
source: cloud-sql-admin-source
|
||||
description: "Creates a backup on the given Cloud SQL instance."
|
||||
```
|
||||
## Reference
|
||||
### Tool Configuration
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| -------------- | :------: | :----------: | ------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "cloud-sql-create-backup". |
|
||||
| source | string | true | The name of the `cloud-sql-admin` source to use. |
|
||||
| description | string | false | A description of the tool. |
|
||||
|
||||
### Tool Inputs
|
||||
|
||||
| **parameter** | **type** | **required** | **description** |
|
||||
| -------------------------- | :------: | :----------: | ------------------------------------------------------------------------------- |
|
||||
| project | string | true | The project ID. |
|
||||
| instance | string | true | The name of the instance to take a backup on. Does not include the project ID. |
|
||||
| location | string | false | (Optional) Location of the backup run. |
|
||||
| backup_description | string | false | (Optional) The description of this backup run. |
|
||||
|
||||
## See Also
|
||||
- [Cloud SQL Admin API documentation](https://cloud.google.com/sql/docs/mysql/admin-api)
|
||||
- [Toolbox Cloud SQL tools documentation](../cloudsql)
|
||||
- [Cloud SQL Backup API documentation](https://cloud.google.com/sql/docs/mysql/backup-recovery/backups)
|
||||
53
docs/en/resources/tools/cloudsql/cloudsqlrestorebackup.md
Normal file
53
docs/en/resources/tools/cloudsql/cloudsqlrestorebackup.md
Normal file
@@ -0,0 +1,53 @@
|
||||
---
|
||||
title: cloud-sql-restore-backup
|
||||
type: docs
|
||||
weight: 10
|
||||
description: "Restores a backup of a Cloud SQL instance."
|
||||
---
|
||||
|
||||
The `cloud-sql-restore-backup` tool restores a backup on a Cloud SQL instance using the Cloud SQL Admin API.
|
||||
|
||||
{{< notice info dd>}}
|
||||
This tool uses a `source` of kind `cloud-sql-admin`.
|
||||
{{< /notice >}}
|
||||
|
||||
## Examples
|
||||
|
||||
Basic backup restore
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
backup-restore-basic:
|
||||
kind: cloud-sql-restore-backup
|
||||
source: cloud-sql-admin-source
|
||||
description: "Restores a backup onto the given Cloud SQL instance."
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
### Tool Configuration
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| -------------- | :------: | :----------: | ------------------------------------------------ |
|
||||
| kind | string | true | Must be "cloud-sql-restore-backup". |
|
||||
| source | string | true | The name of the `cloud-sql-admin` source to use. |
|
||||
| description | string | false | A description of the tool. |
|
||||
|
||||
### Tool Inputs
|
||||
|
||||
| **parameter** | **type** | **required** | **description** |
|
||||
| ------------------| :------: | :----------: | -----------------------------------------------------------------------------|
|
||||
| target_project | string | true | The project ID of the instance to restore the backup onto. |
|
||||
| target_instance | string | true | The instance to restore the backup onto. Does not include the project ID. |
|
||||
| backup_id | string | true | The identifier of the backup being restored. |
|
||||
| source_project | string | false | (Optional) The project ID of the instance that the backup belongs to. |
|
||||
| source_instance | string | false | (Optional) Cloud SQL instance ID of the instance that the backup belongs to. |
|
||||
|
||||
## Usage Notes
|
||||
|
||||
- The `backup_id` field can be a BackupRun ID (which will be an int64), backup name, or BackupDR backup name.
|
||||
- If the `backup_id` field contains a BackupRun ID (i.e. an int64), the optional fields `source_project` and `source_instance` must also be provided.
|
||||
|
||||
## See Also
|
||||
- [Cloud SQL Admin API documentation](https://cloud.google.com/sql/docs/mysql/admin-api)
|
||||
- [Toolbox Cloud SQL tools documentation](../cloudsql)
|
||||
- [Cloud SQL Restore API documentation](https://cloud.google.com/sql/docs/mysql/backup-recovery/restoring)
|
||||
@@ -30,6 +30,10 @@ following config for example:
|
||||
- name: userNames
|
||||
type: array
|
||||
description: The user names to be set.
|
||||
items:
|
||||
name: userName # the item name doesn't matter but it has to exist
|
||||
type: string
|
||||
description: username
|
||||
```
|
||||
|
||||
If the input is an array of strings `["Alice", "Sid", "Bob"]`, The final command
|
||||
|
||||
7
docs/en/resources/tools/snowflake/_index.md
Normal file
7
docs/en/resources/tools/snowflake/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "Snowflake"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with Snowflake Sources.
|
||||
---
|
||||
40
docs/en/resources/tools/snowflake/snowflake-execute-sql.md
Normal file
40
docs/en/resources/tools/snowflake/snowflake-execute-sql.md
Normal file
@@ -0,0 +1,40 @@
|
||||
---
|
||||
title: "snowflake-execute-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "snowflake-execute-sql" tool executes a SQL statement against a Snowflake
|
||||
database.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `snowflake-execute-sql` tool executes a SQL statement against a Snowflake
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [snowflake](../../sources/snowflake.md)
|
||||
|
||||
`snowflake-execute-sql` takes one input parameter `sql` and run the sql
|
||||
statement against the `source`.
|
||||
|
||||
> **Note:** This tool is intended for developer assistant workflows with
|
||||
> human-in-the-loop and shouldn't be used for production agents.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
execute_sql_tool:
|
||||
kind: snowflake-execute-sql
|
||||
source: my-snowflake-instance
|
||||
description: Use this tool to execute sql statement.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------|:-------------:|:------------:|-----------------------------------------------------------|
|
||||
| kind | string | true | Must be "snowflake-execute-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| authRequired | array[string] | false | List of auth services that are required to use this tool. |
|
||||
103
docs/en/resources/tools/snowflake/snowflake-sql.md
Normal file
103
docs/en/resources/tools/snowflake/snowflake-sql.md
Normal file
@@ -0,0 +1,103 @@
|
||||
---
|
||||
title: "snowflake-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "snowflake-sql" tool executes a pre-defined SQL statement against a
|
||||
Snowflake database.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `snowflake-sql` tool executes a pre-defined SQL statement against a Snowflake
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [snowflake](../../sources/snowflake.md)
|
||||
|
||||
The specified SQL statement is executed as a prepared statement, and specified
|
||||
parameters will be inserted according to their position: e.g. `:1` will be the
|
||||
first parameter specified, `:2` will be the second parameter, and so on.
|
||||
|
||||
> **Note:** This tool uses parameterized queries to prevent SQL injections.
|
||||
> Query parameters can be used as substitutes for arbitrary expressions.
|
||||
> Parameters cannot be used as substitutes for identifiers, column names, table
|
||||
> names, or other parts of the query.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search_flights_by_number:
|
||||
kind: snowflake-sql
|
||||
source: my-snowflake-instance
|
||||
statement: |
|
||||
SELECT * FROM flights
|
||||
WHERE airline = :1
|
||||
AND flight_number = :2
|
||||
LIMIT 10
|
||||
description: |
|
||||
Use this tool to get information for a specific flight.
|
||||
Takes an airline code and flight number and returns info on the flight.
|
||||
Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number.
|
||||
A airline code is a code for an airline service consisting of two-character
|
||||
airline designator and followed by flight number, which is 1 to 4 digit number.
|
||||
For example, if given CY 0123, the airline is "CY", and flight_number is "123".
|
||||
Another example for this is DL 1234, the airline is "DL", and flight_number is "1234".
|
||||
If the tool returns more than one option choose the date closes to today.
|
||||
Example:
|
||||
{{
|
||||
"airline": "CY",
|
||||
"flight_number": "888",
|
||||
}}
|
||||
Example:
|
||||
{{
|
||||
"airline": "DL",
|
||||
"flight_number": "1234",
|
||||
}}
|
||||
parameters:
|
||||
- name: airline
|
||||
type: string
|
||||
description: Airline unique 2 letter identifier
|
||||
- name: flight_number
|
||||
type: string
|
||||
description: 1 to 4 digit number
|
||||
```
|
||||
|
||||
### Example with Template Parameters
|
||||
|
||||
> **Note:** This tool allows direct modifications to the SQL statement,
|
||||
> including identifiers, column names, and table names. **This makes it more
|
||||
> vulnerable to SQL injections**. Using basic parameters only (see above) is
|
||||
> recommended for performance and safety reasons. For more details, please check
|
||||
> [templateParameters](..#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_table:
|
||||
kind: snowflake
|
||||
source: my-snowflake-instance
|
||||
statement: |
|
||||
SELECT * FROM {{.tableName}};
|
||||
description: |
|
||||
Use this tool to list all information from a specific table.
|
||||
Example:
|
||||
{{
|
||||
"tableName": "flights",
|
||||
}}
|
||||
templateParameters:
|
||||
- name: tableName
|
||||
type: string
|
||||
description: Table to select from
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------------|:--------------------------------------------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "snowflake-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute on. |
|
||||
| parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](..#template-parameters) | false | List of [templateParameters](..#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
| authRequired | array[string] | false | List of auth services that are required to use this tool. |
|
||||
@@ -16,11 +16,7 @@ database. It's compatible with any of the following sources:
|
||||
|
||||
- [trino](../../sources/trino.md)
|
||||
|
||||
The specified SQL statement is executed as a [prepared statement][trino-prepare],
|
||||
and specified parameters will be inserted according to their position: e.g. `$1`
|
||||
will be the first parameter specified, `$2` will be the second parameter, and so
|
||||
on. If template parameters are included, they will be resolved before execution
|
||||
of the prepared statement.
|
||||
The specified SQL statement is executed as a [prepared statement][trino-prepare], and expects parameters in the SQL query to be in the form of placeholders `?`.
|
||||
|
||||
[trino-prepare]: https://trino.io/docs/current/sql/prepare.html
|
||||
|
||||
@@ -38,8 +34,8 @@ tools:
|
||||
source: my-trino-instance
|
||||
statement: |
|
||||
SELECT * FROM hive.sales.orders
|
||||
WHERE region = $1
|
||||
AND order_date >= DATE($2)
|
||||
WHERE region = ?
|
||||
AND order_date >= DATE(?)
|
||||
LIMIT 10
|
||||
description: |
|
||||
Use this tool to get information for orders in a specific region.
|
||||
|
||||
@@ -771,7 +771,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.24.0\" # x-release-please-version\n",
|
||||
"version = \"0.25.0\" # x-release-please-version\n",
|
||||
"! curl -L -o /content/toolbox https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -123,7 +123,7 @@ In this section, we will download and install the Toolbox binary.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
export VERSION="0.24.0"
|
||||
export VERSION="0.25.0"
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -220,7 +220,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.24.0\" # x-release-please-version\n",
|
||||
"version = \"0.25.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -179,7 +179,7 @@ to use BigQuery, and then run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
7
docs/en/samples/neo4j/_index.md
Normal file
7
docs/en/samples/neo4j/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "Neo4j"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
How to get started with Toolbox using Neo4j.
|
||||
---
|
||||
141
docs/en/samples/neo4j/mcp_quickstart.md
Normal file
141
docs/en/samples/neo4j/mcp_quickstart.md
Normal file
@@ -0,0 +1,141 @@
|
||||
---
|
||||
title: "Quickstart (MCP with Neo4j)"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
How to get started running Toolbox with MCP Inspector and Neo4j as the source.
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
[Model Context Protocol](https://modelcontextprotocol.io) is an open protocol that standardizes how applications provide context to LLMs. Check out this page on how to [connect to Toolbox via MCP](../../how-to/connect_via_mcp.md).
|
||||
|
||||
|
||||
## Step 1: Set up your Neo4j Database and Data
|
||||
|
||||
In this section, you'll set up a database and populate it with sample data for a movies-related agent. This guide assumes you have a running Neo4j instance, either locally or in the cloud.
|
||||
|
||||
. **Populate the database with data.**
|
||||
To make this quickstart straightforward, we'll use the built-in Movies dataset available in Neo4j.
|
||||
|
||||
. In your Neo4j Browser, run the following command to create and populate the database:
|
||||
+
|
||||
```cypher
|
||||
:play movies
|
||||
````
|
||||
|
||||
. Follow the instructions to load the data. This will create a graph with `Movie`, `Person`, and `Actor` nodes and their relationships.
|
||||
|
||||
|
||||
## Step 2: Install and configure Toolbox
|
||||
|
||||
In this section, we will install the MCP Toolbox, configure our tools in a `tools.yaml` file, and then run the Toolbox server.
|
||||
|
||||
. **Install the Toolbox binary.**
|
||||
The simplest way to get started is to download the latest binary for your operating system.
|
||||
|
||||
. Download the latest version of Toolbox as a binary:
|
||||
\+
|
||||
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O [https://storage.googleapis.com/genai-toolbox/v0.16.0/$OS/toolbox](https://storage.googleapis.com/genai-toolbox/v0.16.0/$OS/toolbox)
|
||||
```
|
||||
|
||||
+
|
||||
. Make the binary executable:
|
||||
\+
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
. **Create the `tools.yaml` file.**
|
||||
This file defines your Neo4j source and the specific tools that will be exposed to your AI agent.
|
||||
\+
|
||||
{{\< notice tip \>}}
|
||||
Authentication for the Neo4j source uses standard username and password fields. For production use, it is highly recommended to use environment variables for sensitive information like passwords.
|
||||
{{\< /notice \>}}
|
||||
\+
|
||||
Write the following into a `tools.yaml` file:
|
||||
\+
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-neo4j-source:
|
||||
kind: neo4j
|
||||
uri: bolt://localhost:7687
|
||||
user: neo4j
|
||||
password: my-password # Replace with your actual password
|
||||
|
||||
tools:
|
||||
search-movies-by-actor:
|
||||
kind: neo4j-cypher
|
||||
source: my-neo4j-source
|
||||
description: "Searches for movies an actor has appeared in based on their name. Useful for questions like 'What movies has Tom Hanks been in?'"
|
||||
parameters:
|
||||
- name: actor_name
|
||||
type: string
|
||||
description: The full name of the actor to search for.
|
||||
statement: |
|
||||
MATCH (p:Person {name: $actor_name}) -[:ACTED_IN]-> (m:Movie)
|
||||
RETURN m.title AS title, m.year AS year, m.genre AS genre
|
||||
|
||||
get-actor-for-movie:
|
||||
kind: neo4j-cypher
|
||||
source: my-neo4j-source
|
||||
description: "Finds the actors who starred in a specific movie. Useful for questions like 'Who acted in Inception?'"
|
||||
parameters:
|
||||
- name: movie_title
|
||||
type: string
|
||||
description: The exact title of the movie.
|
||||
statement: |
|
||||
MATCH (p:Person) -[:ACTED_IN]-> (m:Movie {title: $movie_title})
|
||||
RETURN p.name AS actor
|
||||
```
|
||||
|
||||
. **Start the Toolbox server.**
|
||||
Run the Toolbox server, pointing to the `tools.yaml` file you created earlier.
|
||||
\+
|
||||
|
||||
```bash
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
## Step 3: Connect to MCP Inspector
|
||||
|
||||
. **Run the MCP Inspector:**
|
||||
\+
|
||||
|
||||
```bash
|
||||
npx @modelcontextprotocol/inspector
|
||||
```
|
||||
|
||||
. Type `y` when it asks to install the inspector package.
|
||||
. It should show the following when the MCP Inspector is up and running (please take note of `<YOUR_SESSION_TOKEN>`):
|
||||
\+
|
||||
|
||||
```bash
|
||||
Starting MCP inspector...
|
||||
⚙️ Proxy server listening on localhost:6277
|
||||
🔑 Session token: <YOUR_SESSION_TOKEN>
|
||||
Use this token to authenticate requests or set DANGEROUSLY_OMIT_AUTH=true to disable auth
|
||||
|
||||
🚀 MCP Inspector is up and running at:
|
||||
http://localhost:6274/?MCP_PROXY_AUTH_TOKEN=<YOUR_SESSION_TOKEN>
|
||||
```
|
||||
|
||||
1. Open the above link in your browser.
|
||||
|
||||
1. For `Transport Type`, select `Streamable HTTP`.
|
||||
|
||||
1. For `URL`, type in `http://127.0.0.1:5000/mcp`.
|
||||
|
||||
1. For `Configuration` -\> `Proxy Session Token`, make sure `<YOUR_SESSION_TOKEN>` is present.
|
||||
|
||||
1. Click `Connect`.
|
||||
|
||||
1. Select `List Tools`, you will see a list of tools configured in `tools.yaml`.
|
||||
|
||||
1. Test out your tools here\!
|
||||
|
||||
161
docs/en/samples/snowflake/_index.md
Normal file
161
docs/en/samples/snowflake/_index.md
Normal file
@@ -0,0 +1,161 @@
|
||||
---
|
||||
title: "Snowflake"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
How to get started running Toolbox with MCP Inspector and Snowflake as the source.
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
[Model Context Protocol](https://modelcontextprotocol.io) is an open protocol
|
||||
that standardizes how applications provide context to LLMs. Check out this page
|
||||
on how to [connect to Toolbox via MCP](../../how-to/connect_via_mcp.md).
|
||||
|
||||
## Before you begin
|
||||
|
||||
This guide assumes you have already done the following:
|
||||
|
||||
1. [Create a Snowflake account](https://signup.snowflake.com/).
|
||||
1. Connect to the instance using [SnowSQL](https://docs.snowflake.com/en/user-guide/snowsql), or any other Snowflake client.
|
||||
|
||||
## Step 1: Set up your environment
|
||||
|
||||
Copy the environment template and update it with your Snowflake credentials:
|
||||
|
||||
```bash
|
||||
cp examples/snowflake-env.sh my-snowflake-env.sh
|
||||
```
|
||||
|
||||
Edit `my-snowflake-env.sh` with your actual Snowflake connection details:
|
||||
|
||||
```bash
|
||||
export SNOWFLAKE_ACCOUNT="your-account-identifier"
|
||||
export SNOWFLAKE_USER="your-username"
|
||||
export SNOWFLAKE_PASSWORD="your-password"
|
||||
export SNOWFLAKE_DATABASE="your-database"
|
||||
export SNOWFLAKE_SCHEMA="your-schema"
|
||||
export SNOWFLAKE_WAREHOUSE="COMPUTE_WH"
|
||||
export SNOWFLAKE_ROLE="ACCOUNTADMIN"
|
||||
```
|
||||
|
||||
## Step 2: Install Toolbox
|
||||
|
||||
In this section, we will download and install the Toolbox binary.
|
||||
|
||||
1. Download the latest version of Toolbox as a binary:
|
||||
|
||||
{{< notice tip >}}
|
||||
Select the
|
||||
[correct binary](https://github.com/googleapis/genai-toolbox/releases)
|
||||
corresponding to your OS and CPU architecture.
|
||||
{{< /notice >}}
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
export VERSION="0.10.0"
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
## Step 3: Configure the tools
|
||||
|
||||
You have two options:
|
||||
|
||||
#### Option A: Use the prebuilt configuration
|
||||
|
||||
```bash
|
||||
./toolbox --prebuilt snowflake
|
||||
```
|
||||
|
||||
#### Option B: Use the custom configuration
|
||||
|
||||
Create a `tools.yaml` file and add the following content. You must replace the placeholders with your actual Snowflake configuration.
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
snowflake-source:
|
||||
kind: snowflake
|
||||
account: ${SNOWFLAKE_ACCOUNT}
|
||||
user: ${SNOWFLAKE_USER}
|
||||
password: ${SNOWFLAKE_PASSWORD}
|
||||
database: ${SNOWFLAKE_DATABASE}
|
||||
schema: ${SNOWFLAKE_SCHEMA}
|
||||
warehouse: ${SNOWFLAKE_WAREHOUSE}
|
||||
role: ${SNOWFLAKE_ROLE}
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: snowflake-execute-sql
|
||||
source: snowflake-source
|
||||
description: Use this tool to execute SQL.
|
||||
|
||||
list_tables:
|
||||
kind: snowflake-sql
|
||||
source: snowflake-source
|
||||
description: "Lists detailed schema information for user-created tables."
|
||||
statement: |
|
||||
SELECT table_name, table_type
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = current_schema()
|
||||
ORDER BY table_name;
|
||||
```
|
||||
|
||||
For more info on tools, check out the
|
||||
[Tools](../../resources/tools/) section.
|
||||
|
||||
## Step 4: Run the Toolbox server
|
||||
|
||||
Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
|
||||
|
||||
```bash
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
## Step 5: Connect to MCP Inspector
|
||||
|
||||
1. Run the MCP Inspector:
|
||||
|
||||
```bash
|
||||
npx @modelcontextprotocol/inspector
|
||||
```
|
||||
|
||||
1. Type `y` when it asks to install the inspector package.
|
||||
|
||||
1. It should show the following when the MCP Inspector is up and running (please take note of `<YOUR_SESSION_TOKEN>`):
|
||||
|
||||
```bash
|
||||
Starting MCP inspector...
|
||||
⚙️ Proxy server listening on localhost:6277
|
||||
🔑 Session token: <YOUR_SESSION_TOKEN>
|
||||
Use this token to authenticate requests or set DANGEROUSLY_OMIT_AUTH=true to disable auth
|
||||
|
||||
🚀 MCP Inspector is up and running at:
|
||||
http://localhost:6274/?MCP_PROXY_AUTH_TOKEN=<YOUR_SESSION_TOKEN>
|
||||
```
|
||||
|
||||
1. Open the above link in your browser.
|
||||
|
||||
1. For `Transport Type`, select `Streamable HTTP`.
|
||||
|
||||
1. For `URL`, type in `http://127.0.0.1:5000/mcp`.
|
||||
|
||||
1. For `Configuration` -> `Proxy Session Token`, make sure `<YOUR_SESSION_TOKEN>` is present.
|
||||
|
||||
1. Click Connect.
|
||||
|
||||
1. Select `List Tools`, you will see a list of tools configured in `tools.yaml`.
|
||||
|
||||
1. Test out your tools here!
|
||||
|
||||
## What's next
|
||||
|
||||
- Learn more about [MCP Inspector](../../how-to/connect_via_mcp.md).
|
||||
- Learn more about [Toolbox Resources](../../resources/).
|
||||
- Learn more about [Toolbox How-to guides](../../how-to/).
|
||||
18
docs/en/samples/snowflake/runme.py
Normal file
18
docs/en/samples/snowflake/runme.py
Normal file
@@ -0,0 +1,18 @@
|
||||
import asyncio
|
||||
from toolbox_core import ToolboxClient
|
||||
|
||||
|
||||
async def main():
|
||||
# Replace with the actual URL where your Toolbox service is running
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as toolbox:
|
||||
tool = await toolbox.load_tool("execute_sql")
|
||||
result = await tool("SELECT 1")
|
||||
print(result)
|
||||
|
||||
tool = await toolbox.load_tool("list_tables")
|
||||
result = await tool(table_names="DIM_DATE")
|
||||
print(result)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
68
docs/en/samples/snowflake/snowflake-config.yaml
Normal file
68
docs/en/samples/snowflake/snowflake-config.yaml
Normal file
@@ -0,0 +1,68 @@
|
||||
# Copyright 2026 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
sources:
|
||||
my-snowflake-db:
|
||||
kind: snowflake
|
||||
account: ${SNOWFLAKE_ACCOUNT}
|
||||
user: ${SNOWFLAKE_USER}
|
||||
password: ${SNOWFLAKE_PASSWORD}
|
||||
database: ${SNOWFLAKE_DATABASE}
|
||||
schema: ${SNOWFLAKE_SCHEMA}
|
||||
warehouse: ${SNOWFLAKE_WAREHOUSE} # Optional, defaults to COMPUTE_WH if not set
|
||||
role: ${SNOWFLAKE_ROLE} # Optional, defaults to ACCOUNTADMIN if not set
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: snowflake-execute-sql
|
||||
source: my-snowflake-db
|
||||
description: Execute arbitrary SQL statements on Snowflake
|
||||
|
||||
get_customer_orders:
|
||||
kind: snowflake-sql
|
||||
source: my-snowflake-db
|
||||
description: Get orders for a specific customer
|
||||
statement: |
|
||||
SELECT o.order_id, o.order_date, o.total_amount, o.status
|
||||
FROM orders o
|
||||
WHERE o.customer_id = $1
|
||||
ORDER BY o.order_date DESC
|
||||
parameters:
|
||||
- name: customer_id
|
||||
type: string
|
||||
description: The customer ID to look up orders for
|
||||
|
||||
daily_sales_report:
|
||||
kind: snowflake-sql
|
||||
source: my-snowflake-db
|
||||
description: Generate daily sales report for a specific date
|
||||
statement: |
|
||||
SELECT
|
||||
DATE(order_date) as sales_date,
|
||||
COUNT(*) as total_orders,
|
||||
SUM(total_amount) as total_revenue,
|
||||
AVG(total_amount) as avg_order_value
|
||||
FROM orders
|
||||
WHERE DATE(order_date) = $1
|
||||
GROUP BY DATE(order_date)
|
||||
parameters:
|
||||
- name: report_date
|
||||
type: string
|
||||
description: The date to generate report for (YYYY-MM-DD format)
|
||||
|
||||
toolsets:
|
||||
snowflake-analytics:
|
||||
- execute_sql
|
||||
- get_customer_orders
|
||||
- daily_sales_report
|
||||
28
docs/en/samples/snowflake/snowflake-env.sh
Normal file
28
docs/en/samples/snowflake/snowflake-env.sh
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Snowflake Connection Configuration
|
||||
# Copy this file to snowflake-env.sh and update with your actual values
|
||||
# Then source it before running the toolbox: source snowflake-env.sh
|
||||
|
||||
# Required environment variables
|
||||
export SNOWFLAKE_ACCOUNT="your-account-identifier" # e.g., "xy12345.snowflakecomputing.com"
|
||||
export SNOWFLAKE_USER="your-username" # Your Snowflake username
|
||||
export SNOWFLAKE_PASSWORD="your-password" # Your Snowflake password
|
||||
export SNOWFLAKE_DATABASE="your-database" # Database name
|
||||
export SNOWFLAKE_SCHEMA="your-schema" # Schema name (usually "PUBLIC")
|
||||
|
||||
# Optional environment variables (will use defaults if not set)
|
||||
export SNOWFLAKE_WAREHOUSE="COMPUTE_WH" # Warehouse name (default: COMPUTE_WH)
|
||||
export SNOWFLAKE_ROLE="ACCOUNTADMIN" # Role name (default: ACCOUNTADMIN)
|
||||
|
||||
echo "Snowflake environment variables have been set!"
|
||||
echo "Account: $SNOWFLAKE_ACCOUNT"
|
||||
echo "User: $SNOWFLAKE_USER"
|
||||
echo "Database: $SNOWFLAKE_DATABASE"
|
||||
echo "Schema: $SNOWFLAKE_SCHEMA"
|
||||
echo "Warehouse: $SNOWFLAKE_WAREHOUSE"
|
||||
echo "Role: $SNOWFLAKE_ROLE"
|
||||
echo ""
|
||||
echo "You can now run the toolbox with:"
|
||||
echo " ./toolbox --prebuilt snowflake # Use prebuilt configuration"
|
||||
echo " ./toolbox --tools-file docs/en/samples/snowflake/snowflake-config.yaml # Use custom configuration"
|
||||
56
docs/en/samples/snowflake/test-snowflake.sh
Normal file
56
docs/en/samples/snowflake/test-snowflake.sh
Normal file
@@ -0,0 +1,56 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Test script to demonstrate Snowflake configuration with environment variables
|
||||
# This script shows how to set up and test the Snowflake toolbox configuration
|
||||
|
||||
echo "=== Testing Snowflake Configuration ==="
|
||||
echo ""
|
||||
|
||||
# Set up test environment variables (replace with your actual values)
|
||||
echo "Setting up test environment variables..."
|
||||
export SNOWFLAKE_ACCOUNT="test-account"
|
||||
export SNOWFLAKE_USER="test-user"
|
||||
export SNOWFLAKE_PASSWORD="test-password"
|
||||
export SNOWFLAKE_DATABASE="test-database"
|
||||
export SNOWFLAKE_SCHEMA="test-schema"
|
||||
export SNOWFLAKE_WAREHOUSE="COMPUTE_WH"
|
||||
export SNOWFLAKE_ROLE="ACCOUNTADMIN"
|
||||
|
||||
echo "Environment variables set:"
|
||||
echo " SNOWFLAKE_ACCOUNT: $SNOWFLAKE_ACCOUNT"
|
||||
echo " SNOWFLAKE_USER: $SNOWFLAKE_USER"
|
||||
echo " SNOWFLAKE_DATABASE: $SNOWFLAKE_DATABASE"
|
||||
echo " SNOWFLAKE_SCHEMA: $SNOWFLAKE_SCHEMA"
|
||||
echo " SNOWFLAKE_WAREHOUSE: $SNOWFLAKE_WAREHOUSE"
|
||||
echo " SNOWFLAKE_ROLE: $SNOWFLAKE_ROLE"
|
||||
echo ""
|
||||
|
||||
echo "=== Testing Prebuilt Configuration ==="
|
||||
echo "This will attempt to initialize with the prebuilt Snowflake configuration:"
|
||||
echo "Command: ./toolbox --prebuilt snowflake --stdio"
|
||||
echo ""
|
||||
echo "Expected result: Connection failure due to test credentials (this is normal)"
|
||||
echo ""
|
||||
|
||||
# Test the prebuilt configuration (this will fail with test credentials, which is expected)
|
||||
timeout 5s ./toolbox --prebuilt snowflake --stdio 2>&1 | head -5
|
||||
|
||||
echo ""
|
||||
echo "=== Testing Custom Configuration ==="
|
||||
echo "This will attempt to initialize with the custom Snowflake configuration:"
|
||||
echo "Command: ./toolbox --tools-file docs/en/samples/snowflake/snowflake-config.yaml --stdio"
|
||||
echo ""
|
||||
echo "Expected result: Connection failure due to test credentials (this is normal)"
|
||||
echo ""
|
||||
|
||||
# Test the custom configuration (this will fail with test credentials, which is expected)
|
||||
timeout 5s ./toolbox --tools-file docs/en/samples/snowflake/snowflake-config.yaml --stdio 2>&1 | head -5
|
||||
|
||||
echo ""
|
||||
echo "=== Instructions for Real Usage ==="
|
||||
echo "1. Copy docs/en/samples/snowflake/snowflake-env.sh to your own file"
|
||||
echo "2. Edit it with your actual Snowflake credentials"
|
||||
echo "3. Source the file: source your-snowflake-env.sh"
|
||||
echo "4. Run: ./toolbox --prebuilt snowflake"
|
||||
echo ""
|
||||
echo "For more information, see docs/en/samples/snowflake"
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "mcp-toolbox-for-databases",
|
||||
"version": "0.24.0",
|
||||
"version": "0.25.0",
|
||||
"description": "MCP Toolbox for Databases is an open-source MCP server for more than 30 different datasources.",
|
||||
"contextFileName": "MCP-TOOLBOX-EXTENSION.md"
|
||||
}
|
||||
41
go.mod
41
go.mod
@@ -37,12 +37,14 @@ require (
|
||||
github.com/google/go-cmp v0.7.0
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/jackc/pgx/v5 v5.7.6
|
||||
github.com/jmoiron/sqlx v1.4.0
|
||||
github.com/looker-open-source/sdk-codegen/go v0.25.21
|
||||
github.com/microsoft/go-mssqldb v1.9.3
|
||||
github.com/nakagami/firebirdsql v0.9.15
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4
|
||||
github.com/redis/go-redis/v9 v9.17.2
|
||||
github.com/sijms/go-ora/v2 v2.9.0
|
||||
github.com/snowflakedb/gosnowflake v1.18.1
|
||||
github.com/spf13/cobra v1.10.1
|
||||
github.com/thlib/go-timezone-local v0.0.7
|
||||
github.com/trinodb/trino-go-client v0.330.0
|
||||
@@ -88,13 +90,40 @@ require (
|
||||
cloud.google.com/go/monitoring v1.24.3 // indirect
|
||||
cloud.google.com/go/trace v1.11.7 // indirect
|
||||
filippo.io/edwards25519 v1.1.0 // indirect
|
||||
github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect
|
||||
github.com/99designs/keyring v1.2.2 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 // indirect
|
||||
github.com/BurntSushi/toml v1.4.0 // indirect
|
||||
github.com/GoogleCloudPlatform/grpc-gcp-go/grpcgcp v1.5.3 // indirect
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0 // indirect
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.54.0 // indirect
|
||||
github.com/PuerkitoBio/goquery v1.10.3 // indirect
|
||||
github.com/VictoriaMetrics/easyproto v0.1.4 // indirect
|
||||
github.com/ajg/form v1.5.1 // indirect
|
||||
github.com/apache/arrow-go/v18 v18.4.0 // indirect
|
||||
github.com/apache/arrow/go/v15 v15.0.2 // indirect
|
||||
github.com/apache/thrift v0.22.0 // indirect
|
||||
github.com/aws/aws-sdk-go-v2 v1.39.0 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/config v1.31.8 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.18.12 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.15 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.8.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.88.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.29.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.34.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.38.4 // indirect
|
||||
github.com/aws/smithy-go v1.23.0 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 // indirect
|
||||
github.com/couchbase/gocbcore/v10 v10.8.1 // indirect
|
||||
@@ -102,8 +131,10 @@ require (
|
||||
github.com/couchbase/goprotostellar v1.0.2 // indirect
|
||||
github.com/couchbase/tools-common/errors v1.0.0 // indirect
|
||||
github.com/couchbaselabs/gocbconnstr/v2 v2.0.0 // indirect
|
||||
github.com/danieljoos/wincred v1.2.2 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/dvsekhvalnov/jose2go v1.7.0 // indirect
|
||||
github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect
|
||||
github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
@@ -115,7 +146,9 @@ require (
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect
|
||||
github.com/godror/knownpb v0.3.0 // indirect
|
||||
github.com/golang-jwt/jwt/v5 v5.3.0 // indirect
|
||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect
|
||||
github.com/golang-sql/sqlexp v0.1.0 // indirect
|
||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
|
||||
@@ -127,6 +160,7 @@ require (
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 // indirect
|
||||
github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect
|
||||
github.com/hashicorp/go-uuid v1.0.3 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
@@ -140,19 +174,25 @@ require (
|
||||
github.com/jcmturner/rpc/v2 v2.0.3 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0 // indirect
|
||||
github.com/klauspost/asmfmt v1.3.2 // indirect
|
||||
github.com/klauspost/compress v1.18.0 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.11 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 // indirect
|
||||
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/montanaflynn/stats v0.7.1 // indirect
|
||||
github.com/mtibben/percent v0.2.1 // indirect
|
||||
github.com/nakagami/chacha20 v0.1.0 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/pierrec/lz4 v2.6.1+incompatible // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
|
||||
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/sirupsen/logrus v1.9.3 // indirect
|
||||
github.com/spf13/pflag v1.0.9 // indirect
|
||||
github.com/spiffe/go-spiffe/v2 v2.5.0 // indirect
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
||||
@@ -182,6 +222,7 @@ require (
|
||||
golang.org/x/sync v0.18.0 // indirect
|
||||
golang.org/x/sys v0.38.0 // indirect
|
||||
golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8 // indirect
|
||||
golang.org/x/term v0.37.0 // indirect
|
||||
golang.org/x/text v0.31.0 // indirect
|
||||
golang.org/x/time v0.14.0 // indirect
|
||||
golang.org/x/tools v0.38.0 // indirect
|
||||
|
||||
40
go.sum
40
go.sum
@@ -643,6 +643,10 @@ filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
|
||||
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
|
||||
gioui.org v0.0.0-20210308172011-57750fc8a0a6/go.mod h1:RSH6KIUZ0p2xy5zHDxgAM4zumjgTw83q2ge/PI+yyw8=
|
||||
git.sr.ht/~sbinet/gg v0.3.1/go.mod h1:KGYtlADtqsqANL9ueOFkWymvzUvLMQllU5Ixo+8v3pc=
|
||||
github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMbk2FiG/kXiLl8BRyzTWDw7gX/Hz7Dd5eDMs=
|
||||
github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4=
|
||||
github.com/99designs/keyring v1.2.2 h1:pZd3neh/EmUzWONb35LxQfvuY7kiSXAq3HQd97+XBn0=
|
||||
github.com/99designs/keyring v1.2.2/go.mod h1:wes/FrByc8j7lFOAGLGSNEg8f/PaI3cgTBqhFkHUrPk=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1 h1:B+blDbyVIG3WaikNxPnhPiJ1MThR03b3vKGtER95TP4=
|
||||
@@ -653,11 +657,15 @@ github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.3.1 h1:Wgf5rZb
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.3.1/go.mod h1:xxCBG/f/4Vbmh2XQJBsOmNdxWUY5j/s27jujKPbQf14=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.1.1 h1:bFWuoEKg+gImo7pvkiQEFAc8ocibADgXeiLAxWhWmkI=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.1.1/go.mod h1:Vih/3yc6yac2JzU4hzpaDupBJP0Flaia9rXXrU8xyww=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 h1:u/LLAOFgsMv7HmNL4Qufg58y+qElGOt5qv0z1mURkRY=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0/go.mod h1:2e8rMJtl2+2j+HXbTBwnyGpm5Nou7KhvSfxOq8JpTag=
|
||||
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c h1:udKWzYgxTojEKWjV8V+WSxDXJ4NFATAsZjh8iIbsQIg=
|
||||
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0=
|
||||
github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
|
||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||
github.com/ClickHouse/ch-go v0.68.0 h1:zd2VD8l2aVYnXFRyhTyKCrxvhSz1AaY4wBUXu/f0GiU=
|
||||
github.com/ClickHouse/ch-go v0.68.0/go.mod h1:C89Fsm7oyck9hr6rRo5gqqiVtaIY6AjdD0WFMyNRQ5s=
|
||||
@@ -701,6 +709,8 @@ github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUS
|
||||
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
|
||||
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
|
||||
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||
github.com/apache/arrow-go/v18 v18.4.0 h1:/RvkGqH517iY8bZKc4FD5/kkdwXJGjxf28JIXbJ/oB0=
|
||||
github.com/apache/arrow-go/v18 v18.4.0/go.mod h1:Aawvwhj8x2jURIzD9Moy72cF0FyJXOpkYpdmGRHcw14=
|
||||
github.com/apache/arrow/go/v10 v10.0.1/go.mod h1:YvhnlEePVnBS4+0z3fhPfUy7W1Ikj0Ih0vcRo/gZ1M0=
|
||||
github.com/apache/arrow/go/v11 v11.0.0/go.mod h1:Eg5OsL5H+e299f7u5ssuXsuHQVEGC4xei5aX110hRiI=
|
||||
github.com/apache/arrow/go/v15 v15.0.2 h1:60IliRbiyTWCWjERBCkO1W4Qun9svcYoZrSLcyOsMLE=
|
||||
@@ -708,6 +718,8 @@ github.com/apache/arrow/go/v15 v15.0.2/go.mod h1:DGXsR3ajT524njufqf95822i+KTh+ye
|
||||
github.com/apache/cassandra-gocql-driver/v2 v2.0.0 h1:Omnzb1Z/P90Dr2TbVNu54ICQL7TKVIIsJO231w484HU=
|
||||
github.com/apache/cassandra-gocql-driver/v2 v2.0.0/go.mod h1:QH/asJjB3mHvY6Dot6ZKMMpTcOrWJ8i9GhsvG1g0PK4=
|
||||
github.com/apache/thrift v0.16.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU=
|
||||
github.com/apache/thrift v0.22.0 h1:r7mTJdj51TMDe6RtcmNdQxgn9XcyfGDOzegMDRg47uc=
|
||||
github.com/apache/thrift v0.22.0/go.mod h1:1e7J/O1Ae6ZQMTYdy9xa3w9k+XHWPfRvdPyJeynQ+/g=
|
||||
github.com/aws/aws-sdk-go v1.55.8 h1:JRmEUbU52aJQZ2AjX4q4Wu7t4uZjOu71uyNmaWlUkJQ=
|
||||
github.com/aws/aws-sdk-go v1.55.8/go.mod h1:ZkViS9AqA6otK+JBBNH2++sx1sgxrPKcSzPPvQkUtXk=
|
||||
github.com/aws/aws-sdk-go-v2 v1.39.0 h1:xm5WV/2L4emMRmMjHFykqiA4M/ra0DJVSWUkDyBjbg4=
|
||||
@@ -720,6 +732,8 @@ github.com/aws/aws-sdk-go-v2/credentials v1.18.12 h1:zmc9e1q90wMn8wQbjryy8IwA6Q4
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.18.12/go.mod h1:3VzdRDR5u3sSJRI4kYcOSIBbeYsgtVk7dG5R/U6qLWY=
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.7 h1:Is2tPmieqGS2edBnmOJIbdvOA6Op+rRpaYR60iBAwXM=
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.7/go.mod h1:F1i5V5421EGci570yABvpIXgRIBPb5JM+lSkHF6Dq5w=
|
||||
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.15 h1:7Zwtt/lP3KNRkeZre7soMELMGNoBrutx8nobg1jKWmo=
|
||||
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.15/go.mod h1:436h2adoHb57yd+8W+gYPrrA9U/R/SuAuOO42Ushzhw=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.7 h1:UCxq0X9O3xrlENdKf1r9eRJoKz/b0AfGkpp3a7FPlhg=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.7/go.mod h1:rHRoJUNUASj5Z/0eqI4w32vKvC7atoWR0jC+IkmVH8k=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.7 h1:Y6DTZUn7ZUC4th9FMBbo8LVE+1fyq3ofw+tRwkUd3PY=
|
||||
@@ -804,6 +818,8 @@ github.com/couchbaselabs/gocbconnstr/v2 v2.0.0 h1:HU9DlAYYWR69jQnLN6cpg0fh0hxW/8
|
||||
github.com/couchbaselabs/gocbconnstr/v2 v2.0.0/go.mod h1:o7T431UOfFVHDNvMBUmUxpHnhivwv7BziUao/nMl81E=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/danieljoos/wincred v1.2.2 h1:774zMFJrqaeYCK2W57BgAem/MLi6mtSE47MB6BOJ0i0=
|
||||
github.com/danieljoos/wincred v1.2.2/go.mod h1:w7w4Utbrz8lqeMbDAK0lkNJUv5sAOkFi7nd/ogr0Uh8=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
|
||||
@@ -822,6 +838,8 @@ github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3
|
||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/dvsekhvalnov/jose2go v1.7.0 h1:bnQc8+GMnidJZA8zc6lLEAb4xNrIqHwO+9TzqvtQZPo=
|
||||
github.com/dvsekhvalnov/jose2go v1.7.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.8.0 h1:7k1Ua+qluFr6p1jfJjGDl97ssJS/P7cHNInzfxgBQAo=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.8.0/go.mod h1:YLHer5cj0csTzNFXoNQ8qhtGY1GTvSqPnKWKaqQE3Hk=
|
||||
github.com/elastic/go-elasticsearch/v9 v9.2.0 h1:COeL/g20+ixnUbffe4Wfbu88emrHjAq/LhVfmrjqRQs=
|
||||
@@ -905,6 +923,7 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn
|
||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||
github.com/go-playground/validator/v10 v10.28.0 h1:Q7ibns33JjyW48gHkuFT91qX48KG0ktULL6FgHdG688=
|
||||
github.com/go-playground/validator/v10 v10.28.0/go.mod h1:GoI6I1SjPBh9p7ykNE/yj3fFYbyDOpwMn5KXd+m2hUU=
|
||||
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
|
||||
github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo=
|
||||
github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
@@ -915,6 +934,8 @@ github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
|
||||
github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
|
||||
github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 h1:ZpnhV/YsD2/4cESfV5+Hoeu/iUR3ruzNvZ+yQfO03a0=
|
||||
github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4=
|
||||
github.com/godror/godror v0.49.6 h1:ts4ZGw8uLJ42e1D7aXmVuSrld0/lzUzmIUjuUuQOgGM=
|
||||
github.com/godror/godror v0.49.6/go.mod h1:kTMcxZzRw73RT5kn9v3JkBK4kHI6dqowHotqV72ebU8=
|
||||
github.com/godror/knownpb v0.3.0 h1:+caUdy8hTtl7X05aPl3tdL540TvCcaQA6woZQroLZMw=
|
||||
@@ -1066,6 +1087,8 @@ github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4Zs
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3/go.mod h1:o//XUCC/F+yRGJoPO/VU0GSB0f8Nhgmxx0VIRUvaC0w=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 h1:8Tjv8EJ+pM1xP8mK6egEbD1OgnVTyacbefKhmbLhIhU=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2/go.mod h1:pkJQ2tZHJ0aFOVEEot6oZmaVEZcRme73eIFmhiVuRWs=
|
||||
github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8ZofjG1Y75iExal34USq5p+wiN1tpie8IrU=
|
||||
github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0=
|
||||
github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8=
|
||||
github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
@@ -1110,6 +1133,8 @@ github.com/jcmturner/gokrb5/v8 v8.4.4 h1:x1Sv4HaTpepFkXbt2IkL29DXRf8sOfZXo8eRKh6
|
||||
github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs=
|
||||
github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY=
|
||||
github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc=
|
||||
github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o=
|
||||
github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||
@@ -1121,6 +1146,7 @@ github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALr
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
|
||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4=
|
||||
github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE=
|
||||
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||
github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU=
|
||||
@@ -1144,6 +1170,8 @@ github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0
|
||||
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
|
||||
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
|
||||
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||
github.com/looker-open-source/sdk-codegen/go v0.25.21 h1:nlZ1nz22SKluBNkzplrMHBPEVgJO3zVLF6aAws1rrRA=
|
||||
github.com/looker-open-source/sdk-codegen/go v0.25.21/go.mod h1:Br1ntSiruDJ/4nYNjpYyWyCbqJ7+GQceWbIgn0hYims=
|
||||
github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
|
||||
@@ -1156,9 +1184,13 @@ github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-sqlite3 v1.14.14/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
|
||||
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/microsoft/go-mssqldb v1.9.3 h1:hy4p+LDC8LIGvI3JATnLVmBOLMJbmn5X400mr5j0lPs=
|
||||
github.com/microsoft/go-mssqldb v1.9.3/go.mod h1:GBbW9ASTiDC+mpgWDGKdm3FnFLTUsLYN3iFL90lQ+PA=
|
||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs=
|
||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY=
|
||||
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8DFdX7uMikMLXX4oubIzJF4kv/wI=
|
||||
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE=
|
||||
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
|
||||
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
|
||||
@@ -1174,6 +1206,8 @@ github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjY
|
||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
||||
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
|
||||
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
|
||||
github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs=
|
||||
github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns=
|
||||
github.com/nakagami/chacha20 v0.1.0 h1:2fbf5KeVUw7oRpAe6/A7DqvBJLYYu0ka5WstFbnkEVo=
|
||||
github.com/nakagami/chacha20 v0.1.0/go.mod h1:xpoujepNFA7MvYLvX5xKHzlOHimDrLI9Ll8zfOJ0l2E=
|
||||
github.com/nakagami/firebirdsql v0.9.15 h1:Mf05jaFI8+kjy6sBstsAu76zOkJ44AGd6cpApWNrp/0=
|
||||
@@ -1182,6 +1216,7 @@ github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdh
|
||||
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4 h1:7toxehVcYkZbyxV4W3Ib9VcnyRBQPucF+VwNNmtSXi4=
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4/go.mod h1:Vff8OwT7QpLm7L2yYr85XNWe9Rbqlbeb9asNXJTHO4k=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/oklog/ulid/v2 v2.0.2 h1:r4fFzBm+bv0wNKNh5eXTwU7i85y5x+uwkxCUTNVQqLc=
|
||||
github.com/oklog/ulid/v2 v2.0.2/go.mod h1:mtBL0Qe/0HAx6/a4Z30qxVIAL1eQDweXq5lxOEiwQ68=
|
||||
github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8=
|
||||
@@ -1247,6 +1282,8 @@ github.com/sijms/go-ora/v2 v2.9.0/go.mod h1:QgFInVi3ZWyqAiJwzBQA+nbKYKH77tdp1PYo
|
||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
github.com/snowflakedb/gosnowflake v1.18.1 h1:Nb4AWSnSBWe1UKpKTwCZxjYhYo1JH7GgKhO3wW1kR10=
|
||||
github.com/snowflakedb/gosnowflake v1.18.1/go.mod h1:7D4+cLepOWrerVsH+tevW3zdMJ5/WrEN7ZceAC6xBv0=
|
||||
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||
github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4=
|
||||
github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I=
|
||||
@@ -1650,10 +1687,12 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20220610221304-9f5ed59c137d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220624220833-87e55d714810/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220829200755-d48e67d00261/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
@@ -2075,6 +2114,7 @@ google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aO
|
||||
google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||
|
||||
@@ -49,6 +49,7 @@ var expectedToolSources = []string{
|
||||
"postgres",
|
||||
"serverless-spark",
|
||||
"singlestore",
|
||||
"snowflake",
|
||||
"spanner-postgres",
|
||||
"spanner",
|
||||
"sqlite",
|
||||
@@ -127,9 +128,9 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
sqlite_config, _ := Get("sqlite")
|
||||
neo4jconfig, _ := Get("neo4j")
|
||||
healthcare_config, _ := Get("cloud-healthcare")
|
||||
|
||||
snowflake_config, _ := Get("snowflake")
|
||||
if len(alloydb_admin_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch alloydb prebuilt tools yaml")
|
||||
t.Fatalf("unexpected error: could not fetch alloydb admin prebuilt tools yaml")
|
||||
}
|
||||
if len(alloydb_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch alloydb prebuilt tools yaml")
|
||||
@@ -197,6 +198,9 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
if len(singlestore_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch singlestore prebuilt tools yaml")
|
||||
}
|
||||
if len(snowflake_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch snowflake prebuilt tools yaml")
|
||||
}
|
||||
if len(spanner_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch spanner prebuilt tools yaml")
|
||||
}
|
||||
@@ -218,6 +222,9 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
if len(healthcare_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch healthcare prebuilt tools yaml")
|
||||
}
|
||||
if len(snowflake_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch snowflake prebuilt tools yaml")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFailGetPrebuiltTool(t *testing.T) {
|
||||
|
||||
@@ -19,6 +19,7 @@ sources:
|
||||
location: ${BIGQUERY_LOCATION:}
|
||||
useClientOAuth: ${BIGQUERY_USE_CLIENT_OAUTH:false}
|
||||
scopes: ${BIGQUERY_SCOPES:}
|
||||
maxQueryResultRows: ${BIGQUERY_MAX_QUERY_RESULT_ROWS:50}
|
||||
|
||||
tools:
|
||||
analyze_contribution:
|
||||
|
||||
@@ -43,6 +43,12 @@ tools:
|
||||
clone_instance:
|
||||
kind: cloud-sql-clone-instance
|
||||
source: cloud-sql-admin-source
|
||||
create_backup:
|
||||
kind: cloud-sql-create-backup
|
||||
source: cloud-sql-admin-source
|
||||
restore_backup:
|
||||
kind: cloud-sql-restore-backup
|
||||
source: cloud-sql-admin-source
|
||||
|
||||
toolsets:
|
||||
cloud_sql_mssql_admin_tools:
|
||||
@@ -54,3 +60,5 @@ toolsets:
|
||||
- create_user
|
||||
- wait_for_operation
|
||||
- clone_instance
|
||||
- create_backup
|
||||
- restore_backup
|
||||
|
||||
@@ -43,6 +43,12 @@ tools:
|
||||
clone_instance:
|
||||
kind: cloud-sql-clone-instance
|
||||
source: cloud-sql-admin-source
|
||||
create_backup:
|
||||
kind: cloud-sql-create-backup
|
||||
source: cloud-sql-admin-source
|
||||
restore_backup:
|
||||
kind: cloud-sql-restore-backup
|
||||
source: cloud-sql-admin-source
|
||||
|
||||
toolsets:
|
||||
cloud_sql_mysql_admin_tools:
|
||||
@@ -54,3 +60,5 @@ toolsets:
|
||||
- create_user
|
||||
- wait_for_operation
|
||||
- clone_instance
|
||||
- create_backup
|
||||
- restore_backup
|
||||
|
||||
@@ -46,6 +46,12 @@ tools:
|
||||
postgres_upgrade_precheck:
|
||||
kind: postgres-upgrade-precheck
|
||||
source: cloud-sql-admin-source
|
||||
create_backup:
|
||||
kind: cloud-sql-create-backup
|
||||
source: cloud-sql-admin-source
|
||||
restore_backup:
|
||||
kind: cloud-sql-restore-backup
|
||||
source: cloud-sql-admin-source
|
||||
|
||||
toolsets:
|
||||
cloud_sql_postgres_admin_tools:
|
||||
@@ -58,3 +64,5 @@ toolsets:
|
||||
- wait_for_operation
|
||||
- postgres_upgrade_precheck
|
||||
- clone_instance
|
||||
- create_backup
|
||||
- restore_backup
|
||||
|
||||
142
internal/prebuiltconfigs/tools/snowflake.yaml
Normal file
142
internal/prebuiltconfigs/tools/snowflake.yaml
Normal file
@@ -0,0 +1,142 @@
|
||||
# Copyright 2026 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
sources:
|
||||
snowflake-source:
|
||||
kind: snowflake
|
||||
account: ${SNOWFLAKE_ACCOUNT}
|
||||
user: ${SNOWFLAKE_USER}
|
||||
password: ${SNOWFLAKE_PASSWORD}
|
||||
database: ${SNOWFLAKE_DATABASE}
|
||||
schema: ${SNOWFLAKE_SCHEMA}
|
||||
warehouse: ${SNOWFLAKE_WAREHOUSE}
|
||||
role: ${SNOWFLAKE_ROLE}
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: snowflake-execute-sql
|
||||
source: snowflake-source
|
||||
description: Use this tool to execute SQL.
|
||||
|
||||
list_tables:
|
||||
kind: snowflake-sql
|
||||
source: snowflake-source
|
||||
description: "Lists detailed schema information (object type, columns, constraints, indexes, owner, comment) as JSON for user-created tables. Filters by a comma-separated list of names. If names are omitted, lists all tables in the specified database and schema."
|
||||
statement: |
|
||||
WITH
|
||||
input_param AS (
|
||||
SELECT ? AS param -- Single bind variable here
|
||||
)
|
||||
,
|
||||
all_tables_mode AS (
|
||||
SELECT COALESCE(TRIM(param), '') = '' AS is_all_tables
|
||||
FROM input_param
|
||||
) --SELECT * FROM all_tables_mode;
|
||||
,
|
||||
filtered_table_names AS (
|
||||
SELECT DISTINCT TRIM(LOWER(value)) AS table_name
|
||||
FROM input_param, all_tables_mode, TABLE(SPLIT_TO_TABLE(param, ','))
|
||||
WHERE NOT is_all_tables
|
||||
) -- SELECT * FROM filtered_table_names;
|
||||
,
|
||||
table_info AS (
|
||||
SELECT
|
||||
t.TABLE_CATALOG,
|
||||
t.TABLE_SCHEMA,
|
||||
t.TABLE_NAME,
|
||||
t.TABLE_TYPE,
|
||||
t.TABLE_OWNER,
|
||||
t.COMMENT
|
||||
FROM
|
||||
all_tables_mode
|
||||
CROSS JOIN ${SNOWFLAKE_DATABASE}.INFORMATION_SCHEMA.TABLES T
|
||||
WHERE
|
||||
t.TABLE_TYPE = 'BASE TABLE'
|
||||
AND t.TABLE_SCHEMA NOT IN ('INFORMATION_SCHEMA')
|
||||
AND t.TABLE_SCHEMA = '${SNOWFLAKE_SCHEMA}'
|
||||
AND is_all_tables OR LOWER(T.TABLE_NAME) IN (SELECT table_name FROM filtered_table_names)
|
||||
) -- SELECT * FROM table_info;
|
||||
,
|
||||
columns_info AS (
|
||||
SELECT
|
||||
c.TABLE_CATALOG AS database_name,
|
||||
c.TABLE_SCHEMA AS schema_name,
|
||||
c.TABLE_NAME AS table_name,
|
||||
c.COLUMN_NAME AS column_name,
|
||||
c.DATA_TYPE AS data_type,
|
||||
c.ORDINAL_POSITION AS column_ordinal_position,
|
||||
c.IS_NULLABLE AS is_nullable,
|
||||
c.COLUMN_DEFAULT AS column_default,
|
||||
c.COMMENT AS column_comment
|
||||
FROM
|
||||
${SNOWFLAKE_DATABASE}.INFORMATION_SCHEMA.COLUMNS c
|
||||
INNER JOIN table_info USING (TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME)
|
||||
)
|
||||
,
|
||||
constraints_info AS (
|
||||
SELECT
|
||||
tc.TABLE_CATALOG AS database_name,
|
||||
tc.TABLE_SCHEMA AS schema_name,
|
||||
tc.TABLE_NAME AS table_name,
|
||||
tc.CONSTRAINT_NAME AS constraint_name,
|
||||
tc.CONSTRAINT_TYPE AS constraint_type
|
||||
FROM
|
||||
${SNOWFLAKE_DATABASE}.INFORMATION_SCHEMA.TABLE_CONSTRAINTS tc
|
||||
INNER JOIN table_info USING (TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME)
|
||||
GROUP BY
|
||||
tc.TABLE_CATALOG, tc.TABLE_SCHEMA, tc.TABLE_NAME, tc.CONSTRAINT_NAME, tc.CONSTRAINT_TYPE
|
||||
)
|
||||
SELECT
|
||||
ti.TABLE_SCHEMA AS schema_name,
|
||||
ti.TABLE_NAME AS object_name,
|
||||
OBJECT_CONSTRUCT(
|
||||
'schema_name', ti.TABLE_SCHEMA,
|
||||
'object_name', ti.TABLE_NAME,
|
||||
'object_type', ti.TABLE_TYPE,
|
||||
'owner', ti.TABLE_OWNER,
|
||||
'comment', ti.COMMENT,
|
||||
'columns', COALESCE(
|
||||
(SELECT ARRAY_AGG(
|
||||
OBJECT_CONSTRUCT(
|
||||
'column_name', ci.column_name,
|
||||
'data_type', ci.data_type,
|
||||
'ordinal_position', ci.column_ordinal_position,
|
||||
'is_nullable', ci.is_nullable,
|
||||
'column_default', ci.column_default,
|
||||
'column_comment', ci.column_comment
|
||||
)
|
||||
) FROM columns_info ci WHERE ci.table_name = ti.TABLE_NAME AND ci.schema_name = ti.TABLE_SCHEMA),
|
||||
ARRAY_CONSTRUCT()
|
||||
),
|
||||
'constraints', COALESCE(
|
||||
(SELECT ARRAY_AGG(
|
||||
OBJECT_CONSTRUCT(
|
||||
'constraint_name', cons.constraint_name,
|
||||
'constraint_type', cons.constraint_type
|
||||
)
|
||||
) FROM constraints_info cons WHERE cons.table_name = ti.TABLE_NAME AND cons.schema_name = ti.TABLE_SCHEMA),
|
||||
ARRAY_CONSTRUCT()
|
||||
)
|
||||
) AS object_details
|
||||
FROM table_info ti
|
||||
ORDER BY ti.TABLE_SCHEMA, ti.TABLE_NAME;
|
||||
parameters:
|
||||
- name: table_names
|
||||
type: string
|
||||
description: "Optional: A comma-separated list of table names. If empty, details for all tables in the specified database and schema will be listed."
|
||||
|
||||
toolsets:
|
||||
snowflake_tools:
|
||||
- execute_sql
|
||||
- list_tables
|
||||
@@ -68,6 +68,8 @@ type ServerConfig struct {
|
||||
UI bool
|
||||
// Specifies a list of origins permitted to access this server.
|
||||
AllowedOrigins []string
|
||||
// Specifies a list of hosts permitted to access this server
|
||||
AllowedHosts []string
|
||||
}
|
||||
|
||||
type logFormat string
|
||||
|
||||
@@ -27,19 +27,21 @@ import (
|
||||
v20241105 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20241105"
|
||||
v20250326 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20250326"
|
||||
v20250618 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20250618"
|
||||
v20251125 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20251125"
|
||||
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
)
|
||||
|
||||
// LATEST_PROTOCOL_VERSION is the latest version of the MCP protocol supported.
|
||||
// Update the version used in InitializeResponse when this value is updated.
|
||||
const LATEST_PROTOCOL_VERSION = v20250618.PROTOCOL_VERSION
|
||||
const LATEST_PROTOCOL_VERSION = v20251125.PROTOCOL_VERSION
|
||||
|
||||
// SUPPORTED_PROTOCOL_VERSIONS is the MCP protocol versions that are supported.
|
||||
var SUPPORTED_PROTOCOL_VERSIONS = []string{
|
||||
v20241105.PROTOCOL_VERSION,
|
||||
v20250326.PROTOCOL_VERSION,
|
||||
v20250618.PROTOCOL_VERSION,
|
||||
v20251125.PROTOCOL_VERSION,
|
||||
}
|
||||
|
||||
// InitializeResponse runs capability negotiation and protocol version agreement.
|
||||
@@ -102,6 +104,8 @@ func NotificationHandler(ctx context.Context, body []byte) error {
|
||||
// This is the Operation phase of the lifecycle for MCP client-server connections.
|
||||
func ProcessMethod(ctx context.Context, mcpVersion string, id jsonrpc.RequestId, method string, toolset tools.Toolset, promptset prompts.Promptset, resourceMgr *resources.ResourceManager, body []byte, header http.Header) (any, error) {
|
||||
switch mcpVersion {
|
||||
case v20251125.PROTOCOL_VERSION:
|
||||
return v20251125.ProcessMethod(ctx, id, method, toolset, promptset, resourceMgr, body, header)
|
||||
case v20250618.PROTOCOL_VERSION:
|
||||
return v20250618.ProcessMethod(ctx, id, method, toolset, promptset, resourceMgr, body, header)
|
||||
case v20250326.PROTOCOL_VERSION:
|
||||
|
||||
326
internal/server/mcp/v20251125/method.go
Normal file
326
internal/server/mcp/v20251125/method.go
Normal file
@@ -0,0 +1,326 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package v20251125
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/prompts"
|
||||
"github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc"
|
||||
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
)
|
||||
|
||||
// ProcessMethod returns a response for the request.
|
||||
func ProcessMethod(ctx context.Context, id jsonrpc.RequestId, method string, toolset tools.Toolset, promptset prompts.Promptset, resourceMgr *resources.ResourceManager, body []byte, header http.Header) (any, error) {
|
||||
switch method {
|
||||
case PING:
|
||||
return pingHandler(id)
|
||||
case TOOLS_LIST:
|
||||
return toolsListHandler(id, toolset, body)
|
||||
case TOOLS_CALL:
|
||||
return toolsCallHandler(ctx, id, resourceMgr, body, header)
|
||||
case PROMPTS_LIST:
|
||||
return promptsListHandler(ctx, id, promptset, body)
|
||||
case PROMPTS_GET:
|
||||
return promptsGetHandler(ctx, id, resourceMgr, body)
|
||||
default:
|
||||
err := fmt.Errorf("invalid method %s", method)
|
||||
return jsonrpc.NewError(id, jsonrpc.METHOD_NOT_FOUND, err.Error(), nil), err
|
||||
}
|
||||
}
|
||||
|
||||
// pingHandler handles the "ping" method by returning an empty response.
|
||||
func pingHandler(id jsonrpc.RequestId) (any, error) {
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: struct{}{},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func toolsListHandler(id jsonrpc.RequestId, toolset tools.Toolset, body []byte) (any, error) {
|
||||
var req ListToolsRequest
|
||||
if err := json.Unmarshal(body, &req); err != nil {
|
||||
err = fmt.Errorf("invalid mcp tools list request: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
|
||||
result := ListToolsResult{
|
||||
Tools: toolset.McpManifest,
|
||||
}
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: result,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// toolsCallHandler generate a response for tools call.
|
||||
func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *resources.ResourceManager, body []byte, header http.Header) (any, error) {
|
||||
authServices := resourceMgr.GetAuthServiceMap()
|
||||
|
||||
// retrieve logger from context
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
|
||||
var req CallToolRequest
|
||||
if err = json.Unmarshal(body, &req); err != nil {
|
||||
err = fmt.Errorf("invalid mcp tools call request: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
|
||||
toolName := req.Params.Name
|
||||
toolArgument := req.Params.Arguments
|
||||
logger.DebugContext(ctx, fmt.Sprintf("tool name: %s", toolName))
|
||||
tool, ok := resourceMgr.GetTool(toolName)
|
||||
if !ok {
|
||||
err = fmt.Errorf("invalid tool name: tool with name %q does not exist", toolName)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||
}
|
||||
|
||||
// Get access token
|
||||
authTokenHeadername, err := tool.GetAuthTokenHeaderName(resourceMgr)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("error during invocation: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, errMsg.Error(), nil), errMsg
|
||||
}
|
||||
accessToken := tools.AccessToken(header.Get(authTokenHeadername))
|
||||
|
||||
// Check if this specific tool requires the standard authorization header
|
||||
clientAuth, err := tool.RequiresClientAuthorization(resourceMgr)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("error during invocation: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, errMsg.Error(), nil), errMsg
|
||||
}
|
||||
if clientAuth {
|
||||
if accessToken == "" {
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, "missing access token in the 'Authorization' header", nil), util.ErrUnauthorized
|
||||
}
|
||||
}
|
||||
|
||||
// marshal arguments and decode it using decodeJSON instead to prevent loss between floats/int.
|
||||
aMarshal, err := json.Marshal(toolArgument)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("unable to marshal tools argument: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
|
||||
var data map[string]any
|
||||
if err = util.DecodeJSON(bytes.NewBuffer(aMarshal), &data); err != nil {
|
||||
err = fmt.Errorf("unable to decode tools argument: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
|
||||
// Tool authentication
|
||||
// claimsFromAuth maps the name of the authservice to the claims retrieved from it.
|
||||
claimsFromAuth := make(map[string]map[string]any)
|
||||
|
||||
// if using stdio, header will be nil and auth will not be supported
|
||||
if header != nil {
|
||||
for _, aS := range authServices {
|
||||
claims, err := aS.GetClaimsFromHeader(ctx, header)
|
||||
if err != nil {
|
||||
logger.DebugContext(ctx, err.Error())
|
||||
continue
|
||||
}
|
||||
if claims == nil {
|
||||
// authService not present in header
|
||||
continue
|
||||
}
|
||||
claimsFromAuth[aS.GetName()] = claims
|
||||
}
|
||||
}
|
||||
|
||||
// Tool authorization check
|
||||
verifiedAuthServices := make([]string, len(claimsFromAuth))
|
||||
i := 0
|
||||
for k := range claimsFromAuth {
|
||||
verifiedAuthServices[i] = k
|
||||
i++
|
||||
}
|
||||
|
||||
// Check if any of the specified auth services is verified
|
||||
isAuthorized := tool.Authorized(verifiedAuthServices)
|
||||
if !isAuthorized {
|
||||
err = fmt.Errorf("unauthorized Tool call: Please make sure your specify correct auth headers: %w", util.ErrUnauthorized)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
logger.DebugContext(ctx, "tool invocation authorized")
|
||||
|
||||
params, err := tool.ParseParams(data, claimsFromAuth)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("provided parameters were invalid: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||
}
|
||||
logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
||||
|
||||
// run tool invocation and generate response.
|
||||
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
||||
if err != nil {
|
||||
errStr := err.Error()
|
||||
// Missing authService tokens.
|
||||
if errors.Is(err, util.ErrUnauthorized) {
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
// Upstream auth error
|
||||
if strings.Contains(errStr, "Error 401") || strings.Contains(errStr, "Error 403") {
|
||||
if clientAuth {
|
||||
// Error with client credentials should pass down to the client
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
// Auth error with ADC should raise internal 500 error
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
text := TextContent{
|
||||
Type: "text",
|
||||
Text: err.Error(),
|
||||
}
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: CallToolResult{Content: []TextContent{text}, IsError: true},
|
||||
}, nil
|
||||
}
|
||||
|
||||
content := make([]TextContent, 0)
|
||||
|
||||
sliceRes, ok := results.([]any)
|
||||
if !ok {
|
||||
sliceRes = []any{results}
|
||||
}
|
||||
|
||||
for _, d := range sliceRes {
|
||||
text := TextContent{Type: "text"}
|
||||
dM, err := json.Marshal(d)
|
||||
if err != nil {
|
||||
text.Text = fmt.Sprintf("fail to marshal: %s, result: %s", err, d)
|
||||
} else {
|
||||
text.Text = string(dM)
|
||||
}
|
||||
content = append(content, text)
|
||||
}
|
||||
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: CallToolResult{Content: content},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// promptsListHandler handles the "prompts/list" method.
|
||||
func promptsListHandler(ctx context.Context, id jsonrpc.RequestId, promptset prompts.Promptset, body []byte) (any, error) {
|
||||
// retrieve logger from context
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
logger.DebugContext(ctx, "handling prompts/list request")
|
||||
|
||||
var req ListPromptsRequest
|
||||
if err := json.Unmarshal(body, &req); err != nil {
|
||||
err = fmt.Errorf("invalid mcp prompts list request: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
|
||||
result := ListPromptsResult{
|
||||
Prompts: promptset.McpManifest,
|
||||
}
|
||||
logger.DebugContext(ctx, fmt.Sprintf("returning %d prompts", len(promptset.McpManifest)))
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: result,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// promptsGetHandler handles the "prompts/get" method.
|
||||
func promptsGetHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *resources.ResourceManager, body []byte) (any, error) {
|
||||
// retrieve logger from context
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
logger.DebugContext(ctx, "handling prompts/get request")
|
||||
|
||||
var req GetPromptRequest
|
||||
if err := json.Unmarshal(body, &req); err != nil {
|
||||
err = fmt.Errorf("invalid mcp prompts/get request: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
|
||||
promptName := req.Params.Name
|
||||
logger.DebugContext(ctx, fmt.Sprintf("prompt name: %s", promptName))
|
||||
prompt, ok := resourceMgr.GetPrompt(promptName)
|
||||
if !ok {
|
||||
err := fmt.Errorf("prompt with name %q does not exist", promptName)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||
}
|
||||
|
||||
// Parse the arguments provided in the request.
|
||||
argValues, err := prompt.ParseArgs(req.Params.Arguments, nil)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("invalid arguments for prompt %q: %w", promptName, err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||
}
|
||||
logger.DebugContext(ctx, fmt.Sprintf("parsed args: %v", argValues))
|
||||
|
||||
// Substitute the argument values into the prompt's messages.
|
||||
substituted, err := prompt.SubstituteParams(argValues)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("error substituting params for prompt %q: %w", promptName, err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
|
||||
// Cast the result to the expected []prompts.Message type.
|
||||
substitutedMessages, ok := substituted.([]prompts.Message)
|
||||
if !ok {
|
||||
err = fmt.Errorf("internal error: SubstituteParams returned unexpected type")
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
logger.DebugContext(ctx, "substituted params successfully")
|
||||
|
||||
// Format the response messages into the required structure.
|
||||
promptMessages := make([]PromptMessage, len(substitutedMessages))
|
||||
for i, msg := range substitutedMessages {
|
||||
promptMessages[i] = PromptMessage{
|
||||
Role: msg.Role,
|
||||
Content: TextContent{
|
||||
Type: "text",
|
||||
Text: msg.Content,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
result := GetPromptResult{
|
||||
Description: prompt.Manifest().Description,
|
||||
Messages: promptMessages,
|
||||
}
|
||||
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: result,
|
||||
}, nil
|
||||
}
|
||||
219
internal/server/mcp/v20251125/types.go
Normal file
219
internal/server/mcp/v20251125/types.go
Normal file
@@ -0,0 +1,219 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package v20251125
|
||||
|
||||
import (
|
||||
"github.com/googleapis/genai-toolbox/internal/prompts"
|
||||
"github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
)
|
||||
|
||||
// SERVER_NAME is the server name used in Implementation.
|
||||
const SERVER_NAME = "Toolbox"
|
||||
|
||||
// PROTOCOL_VERSION is the version of the MCP protocol in this package.
|
||||
const PROTOCOL_VERSION = "2025-11-25"
|
||||
|
||||
// methods that are supported.
|
||||
const (
|
||||
PING = "ping"
|
||||
TOOLS_LIST = "tools/list"
|
||||
TOOLS_CALL = "tools/call"
|
||||
PROMPTS_LIST = "prompts/list"
|
||||
PROMPTS_GET = "prompts/get"
|
||||
)
|
||||
|
||||
/* Empty result */
|
||||
|
||||
// EmptyResult represents a response that indicates success but carries no data.
|
||||
type EmptyResult jsonrpc.Result
|
||||
|
||||
/* Pagination */
|
||||
|
||||
// Cursor is an opaque token used to represent a cursor for pagination.
|
||||
type Cursor string
|
||||
|
||||
type PaginatedRequest struct {
|
||||
jsonrpc.Request
|
||||
Params struct {
|
||||
// An opaque token representing the current pagination position.
|
||||
// If provided, the server should return results starting after this cursor.
|
||||
Cursor Cursor `json:"cursor,omitempty"`
|
||||
} `json:"params,omitempty"`
|
||||
}
|
||||
|
||||
type PaginatedResult struct {
|
||||
jsonrpc.Result
|
||||
// An opaque token representing the pagination position after the last returned result.
|
||||
// If present, there may be more results available.
|
||||
NextCursor Cursor `json:"nextCursor,omitempty"`
|
||||
}
|
||||
|
||||
/* Tools */
|
||||
|
||||
// Sent from the client to request a list of tools the server has.
|
||||
type ListToolsRequest struct {
|
||||
PaginatedRequest
|
||||
}
|
||||
|
||||
// The server's response to a tools/list request from the client.
|
||||
type ListToolsResult struct {
|
||||
PaginatedResult
|
||||
Tools []tools.McpManifest `json:"tools"`
|
||||
}
|
||||
|
||||
// Used by the client to invoke a tool provided by the server.
|
||||
type CallToolRequest struct {
|
||||
jsonrpc.Request
|
||||
Params struct {
|
||||
Name string `json:"name"`
|
||||
Arguments map[string]any `json:"arguments,omitempty"`
|
||||
} `json:"params,omitempty"`
|
||||
}
|
||||
|
||||
// The sender or recipient of messages and data in a conversation.
|
||||
type Role string
|
||||
|
||||
const (
|
||||
RoleUser Role = "user"
|
||||
RoleAssistant Role = "assistant"
|
||||
)
|
||||
|
||||
// Base for objects that include optional annotations for the client.
|
||||
// The client can use annotations to inform how objects are used or displayed
|
||||
type Annotated struct {
|
||||
Annotations *struct {
|
||||
// Describes who the intended customer of this object or data is.
|
||||
// It can include multiple entries to indicate content useful for multiple
|
||||
// audiences (e.g., `["user", "assistant"]`).
|
||||
Audience []Role `json:"audience,omitempty"`
|
||||
// Describes how important this data is for operating the server.
|
||||
//
|
||||
// A value of 1 means "most important," and indicates that the data is
|
||||
// effectively required, while 0 means "least important," and indicates that
|
||||
// the data is entirely optional.
|
||||
//
|
||||
// @TJS-type number
|
||||
// @minimum 0
|
||||
// @maximum 1
|
||||
Priority float64 `json:"priority,omitempty"`
|
||||
} `json:"annotations,omitempty"`
|
||||
}
|
||||
|
||||
// TextContent represents text provided to or from an LLM.
|
||||
type TextContent struct {
|
||||
Annotated
|
||||
Type string `json:"type"`
|
||||
// The text content of the message.
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
// The server's response to a tool call.
|
||||
//
|
||||
// Any errors that originate from the tool SHOULD be reported inside the result
|
||||
// object, with `isError` set to true, _not_ as an MCP protocol-level error
|
||||
// response. Otherwise, the LLM would not be able to see that an error occurred
|
||||
// and self-correct.
|
||||
//
|
||||
// However, any errors in _finding_ the tool, an error indicating that the
|
||||
// server does not support tool calls, or any other exceptional conditions,
|
||||
// should be reported as an MCP error response.
|
||||
type CallToolResult struct {
|
||||
jsonrpc.Result
|
||||
// Could be either a TextContent, ImageContent, or EmbeddedResources
|
||||
// For Toolbox, we will only be sending TextContent
|
||||
Content []TextContent `json:"content"`
|
||||
// Whether the tool call ended in an error.
|
||||
// If not set, this is assumed to be false (the call was successful).
|
||||
//
|
||||
// Any errors that originate from the tool SHOULD be reported inside the result
|
||||
// object, with `isError` set to true, _not_ as an MCP protocol-level error
|
||||
// response. Otherwise, the LLM would not be able to see that an error occurred
|
||||
// and self-correct.
|
||||
//
|
||||
// However, any errors in _finding_ the tool, an error indicating that the
|
||||
// server does not support tool calls, or any other exceptional conditions,
|
||||
// should be reported as an MCP error response.
|
||||
IsError bool `json:"isError,omitempty"`
|
||||
// An optional JSON object that represents the structured result of the tool call.
|
||||
StructuredContent map[string]any `json:"structuredContent,omitempty"`
|
||||
}
|
||||
|
||||
// Additional properties describing a Tool to clients.
|
||||
//
|
||||
// NOTE: all properties in ToolAnnotations are **hints**.
|
||||
// They are not guaranteed to provide a faithful description of
|
||||
// tool behavior (including descriptive properties like `title`).
|
||||
//
|
||||
// Clients should never make tool use decisions based on ToolAnnotations
|
||||
// received from untrusted servers.
|
||||
type ToolAnnotations struct {
|
||||
// A human-readable title for the tool.
|
||||
Title string `json:"title,omitempty"`
|
||||
// If true, the tool does not modify its environment.
|
||||
// Default: false
|
||||
ReadOnlyHint bool `json:"readOnlyHint,omitempty"`
|
||||
// If true, the tool may perform destructive updates to its environment.
|
||||
// If false, the tool performs only additive updates.
|
||||
// (This property is meaningful only when `readOnlyHint == false`)
|
||||
// Default: true
|
||||
DestructiveHint bool `json:"destructiveHint,omitempty"`
|
||||
// If true, calling the tool repeatedly with the same arguments
|
||||
// will have no additional effect on the its environment.
|
||||
// (This property is meaningful only when `readOnlyHint == false`)
|
||||
// Default: false
|
||||
IdempotentHint bool `json:"idempotentHint,omitempty"`
|
||||
// If true, this tool may interact with an "open world" of external
|
||||
// entities. If false, the tool's domain of interaction is closed.
|
||||
// For example, the world of a web search tool is open, whereas that
|
||||
// of a memory tool is not.
|
||||
// Default: true
|
||||
OpenWorldHint bool `json:"openWorldHint,omitempty"`
|
||||
}
|
||||
|
||||
/* Prompts */
|
||||
|
||||
// Sent from the client to request a list of prompts the server has.
|
||||
type ListPromptsRequest struct {
|
||||
PaginatedRequest
|
||||
}
|
||||
|
||||
// The server's response to a prompts/list request from the client.
|
||||
type ListPromptsResult struct {
|
||||
PaginatedResult
|
||||
Prompts []prompts.McpManifest `json:"prompts"`
|
||||
}
|
||||
|
||||
// Used by the client to get a prompt provided by the server.
|
||||
type GetPromptRequest struct {
|
||||
jsonrpc.Request
|
||||
Params struct {
|
||||
Name string `json:"name"`
|
||||
Arguments map[string]any `json:"arguments,omitempty"`
|
||||
} `json:"params"`
|
||||
}
|
||||
|
||||
// The server's response to a prompts/get request from the client.
|
||||
type GetPromptResult struct {
|
||||
jsonrpc.Result
|
||||
Description string `json:"description,omitempty"`
|
||||
Messages []PromptMessage `json:"messages"`
|
||||
}
|
||||
|
||||
// Describes a message returned as part of a prompt.
|
||||
type PromptMessage struct {
|
||||
Role string `json:"role"`
|
||||
Content TextContent `json:"content"`
|
||||
}
|
||||
@@ -37,6 +37,7 @@ const jsonrpcVersion = "2.0"
|
||||
const protocolVersion20241105 = "2024-11-05"
|
||||
const protocolVersion20250326 = "2025-03-26"
|
||||
const protocolVersion20250618 = "2025-06-18"
|
||||
const protocolVersion20251125 = "2025-11-25"
|
||||
const serverName = "Toolbox"
|
||||
|
||||
var basicInputSchema = map[string]any{
|
||||
@@ -485,6 +486,23 @@ func TestMcpEndpoint(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "version 2025-11-25",
|
||||
protocol: protocolVersion20251125,
|
||||
idHeader: false,
|
||||
initWant: map[string]any{
|
||||
"jsonrpc": "2.0",
|
||||
"id": "mcp-initialize",
|
||||
"result": map[string]any{
|
||||
"protocolVersion": "2025-11-25",
|
||||
"capabilities": map[string]any{
|
||||
"tools": map[string]any{"listChanged": false},
|
||||
"prompts": map[string]any{"listChanged": false},
|
||||
},
|
||||
"serverInfo": map[string]any{"name": serverName, "version": fakeVersionString},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, vtc := range versTestCases {
|
||||
t.Run(vtc.name, func(t *testing.T) {
|
||||
@@ -494,8 +512,7 @@ func TestMcpEndpoint(t *testing.T) {
|
||||
if sessionId != "" {
|
||||
header["Mcp-Session-Id"] = sessionId
|
||||
}
|
||||
|
||||
if vtc.protocol == protocolVersion20250618 {
|
||||
if vtc.protocol != protocolVersion20241105 && vtc.protocol != protocolVersion20250326 {
|
||||
header["MCP-Protocol-Version"] = vtc.protocol
|
||||
}
|
||||
|
||||
|
||||
@@ -300,6 +300,25 @@ func InitializeConfigs(ctx context.Context, cfg ServerConfig) (
|
||||
return sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, nil
|
||||
}
|
||||
|
||||
func hostCheck(allowedHosts map[string]struct{}) func(http.Handler) http.Handler {
|
||||
return func(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
_, hasWildcard := allowedHosts["*"]
|
||||
hostname := r.Host
|
||||
if host, _, err := net.SplitHostPort(r.Host); err == nil {
|
||||
hostname = host
|
||||
}
|
||||
_, hostIsAllowed := allowedHosts[hostname]
|
||||
if !hasWildcard && !hostIsAllowed {
|
||||
// Return 403 Forbidden to block the attack
|
||||
http.Error(w, "Invalid Host header", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// NewServer returns a Server object based on provided Config.
|
||||
func NewServer(ctx context.Context, cfg ServerConfig) (*Server, error) {
|
||||
instrumentation, err := util.InstrumentationFromContext(ctx)
|
||||
@@ -374,7 +393,7 @@ func NewServer(ctx context.Context, cfg ServerConfig) (*Server, error) {
|
||||
|
||||
// cors
|
||||
if slices.Contains(cfg.AllowedOrigins, "*") {
|
||||
s.logger.WarnContext(ctx, "wildcard (`*`) allows all origin to access the resource and is not secure. Use it with cautious for public, non-sensitive data, or during local development. Recommended to use `--allowed-origins` flag to prevent DNS rebinding attacks")
|
||||
s.logger.WarnContext(ctx, "wildcard (`*`) allows all origin to access the resource and is not secure. Use it with cautious for public, non-sensitive data, or during local development. Recommended to use `--allowed-origins` flag")
|
||||
}
|
||||
corsOpts := cors.Options{
|
||||
AllowedOrigins: cfg.AllowedOrigins,
|
||||
@@ -385,6 +404,19 @@ func NewServer(ctx context.Context, cfg ServerConfig) (*Server, error) {
|
||||
MaxAge: 300, // cache preflight results for 5 minutes
|
||||
}
|
||||
r.Use(cors.Handler(corsOpts))
|
||||
// validate hosts for DNS rebinding attacks
|
||||
if slices.Contains(cfg.AllowedHosts, "*") {
|
||||
s.logger.WarnContext(ctx, "wildcard (`*`) allows all hosts to access the resource and is not secure. Use it with cautious for public, non-sensitive data, or during local development. Recommended to use `--allowed-hosts` flag to prevent DNS rebinding attacks")
|
||||
}
|
||||
allowedHostsMap := make(map[string]struct{}, len(cfg.AllowedHosts))
|
||||
for _, h := range cfg.AllowedHosts {
|
||||
hostname := h
|
||||
if host, _, err := net.SplitHostPort(h); err == nil {
|
||||
hostname = host
|
||||
}
|
||||
allowedHostsMap[hostname] = struct{}{}
|
||||
}
|
||||
r.Use(hostCheck(allowedHostsMap))
|
||||
|
||||
// control plane
|
||||
apiR, err := apiRouter(s)
|
||||
|
||||
@@ -43,9 +43,10 @@ func TestServe(t *testing.T) {
|
||||
|
||||
addr, port := "127.0.0.1", 5000
|
||||
cfg := server.ServerConfig{
|
||||
Version: "0.0.0",
|
||||
Address: addr,
|
||||
Port: port,
|
||||
Version: "0.0.0",
|
||||
Address: addr,
|
||||
Port: port,
|
||||
AllowedHosts: []string{"*"},
|
||||
}
|
||||
|
||||
otelShutdown, err := telemetry.SetupOTel(ctx, "0.0.0", "", false, "toolbox")
|
||||
|
||||
@@ -89,6 +89,7 @@ type Config struct {
|
||||
UseClientOAuth bool `yaml:"useClientOAuth"`
|
||||
ImpersonateServiceAccount string `yaml:"impersonateServiceAccount"`
|
||||
Scopes StringOrStringSlice `yaml:"scopes"`
|
||||
MaxQueryResultRows int `yaml:"maxQueryResultRows"`
|
||||
}
|
||||
|
||||
// StringOrStringSlice is a custom type that can unmarshal both a single string
|
||||
@@ -127,6 +128,10 @@ func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.So
|
||||
r.WriteMode = WriteModeAllowed
|
||||
}
|
||||
|
||||
if r.MaxQueryResultRows == 0 {
|
||||
r.MaxQueryResultRows = 50
|
||||
}
|
||||
|
||||
if r.WriteMode == WriteModeProtected && r.UseClientOAuth {
|
||||
// The protected mode only allows write operations to the session's temporary datasets.
|
||||
// when using client OAuth, a new session is created every
|
||||
@@ -150,7 +155,7 @@ func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.So
|
||||
Client: client,
|
||||
RestService: restService,
|
||||
TokenSource: tokenSource,
|
||||
MaxQueryResultRows: 50,
|
||||
MaxQueryResultRows: r.MaxQueryResultRows,
|
||||
ClientCreator: clientCreator,
|
||||
}
|
||||
|
||||
@@ -567,7 +572,7 @@ func (s *Source) RunSQL(ctx context.Context, bqClient *bigqueryapi.Client, state
|
||||
}
|
||||
|
||||
var out []any
|
||||
for {
|
||||
for s.MaxQueryResultRows <= 0 || len(out) < s.MaxQueryResultRows {
|
||||
var val []bigqueryapi.Value
|
||||
err = it.Next(&val)
|
||||
if err == iterator.Done {
|
||||
|
||||
@@ -21,9 +21,12 @@ import (
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"go.opentelemetry.io/otel/trace/noop"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
)
|
||||
|
||||
func TestParseFromYamlBigQuery(t *testing.T) {
|
||||
@@ -154,6 +157,26 @@ func TestParseFromYamlBigQuery(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with max query result rows example",
|
||||
in: `
|
||||
sources:
|
||||
my-instance:
|
||||
kind: bigquery
|
||||
project: my-project
|
||||
location: us
|
||||
maxQueryResultRows: 10
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-instance": bigquery.Config{
|
||||
Name: "my-instance",
|
||||
Kind: bigquery.SourceKind,
|
||||
Project: "my-project",
|
||||
Location: "us",
|
||||
MaxQueryResultRows: 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
@@ -220,6 +243,59 @@ func TestFailParseFromYaml(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestInitialize_MaxQueryResultRows(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
ctx = util.WithUserAgent(ctx, "test-agent")
|
||||
tracer := noop.NewTracerProvider().Tracer("")
|
||||
|
||||
tcs := []struct {
|
||||
desc string
|
||||
cfg bigquery.Config
|
||||
want int
|
||||
}{
|
||||
{
|
||||
desc: "default value",
|
||||
cfg: bigquery.Config{
|
||||
Name: "test-default",
|
||||
Kind: bigquery.SourceKind,
|
||||
Project: "test-project",
|
||||
UseClientOAuth: true,
|
||||
},
|
||||
want: 50,
|
||||
},
|
||||
{
|
||||
desc: "configured value",
|
||||
cfg: bigquery.Config{
|
||||
Name: "test-configured",
|
||||
Kind: bigquery.SourceKind,
|
||||
Project: "test-project",
|
||||
UseClientOAuth: true,
|
||||
MaxQueryResultRows: 100,
|
||||
},
|
||||
want: 100,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
src, err := tc.cfg.Initialize(ctx, tracer)
|
||||
if err != nil {
|
||||
t.Fatalf("Initialize failed: %v", err)
|
||||
}
|
||||
bqSrc, ok := src.(*bigquery.Source)
|
||||
if !ok {
|
||||
t.Fatalf("Expected *bigquery.Source, got %T", src)
|
||||
}
|
||||
if bqSrc.MaxQueryResultRows != tc.want {
|
||||
t.Errorf("MaxQueryResultRows = %d, want %d", bqSrc.MaxQueryResultRows, tc.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeValue(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
|
||||
@@ -16,8 +16,12 @@ package cloudhealthcare
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
@@ -255,3 +259,299 @@ func (s *Source) IsDICOMStoreAllowed(storeID string) bool {
|
||||
func (s *Source) UseClientAuthorization() bool {
|
||||
return s.UseClientOAuth
|
||||
}
|
||||
|
||||
func parseResults(resp *http.Response) (any, error) {
|
||||
respBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not read response: %w", err)
|
||||
}
|
||||
if resp.StatusCode > 299 {
|
||||
return nil, fmt.Errorf("status %d %s: %s", resp.StatusCode, resp.Status, respBytes)
|
||||
}
|
||||
var jsonMap map[string]interface{}
|
||||
if err := json.Unmarshal(respBytes, &jsonMap); err != nil {
|
||||
return nil, fmt.Errorf("could not unmarshal response as json: %w", err)
|
||||
}
|
||||
return jsonMap, nil
|
||||
}
|
||||
|
||||
func (s *Source) getService(tokenStr string) (*healthcare.Service, error) {
|
||||
svc := s.Service()
|
||||
var err error
|
||||
// Initialize new service if using user OAuth token
|
||||
if s.UseClientAuthorization() {
|
||||
svc, err = s.ServiceCreator()(tokenStr)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating service from OAuth access token: %w", err)
|
||||
}
|
||||
}
|
||||
return svc, nil
|
||||
}
|
||||
|
||||
func (s *Source) FHIRFetchPage(ctx context.Context, url, tokenStr string) (any, error) {
|
||||
var httpClient *http.Client
|
||||
if s.UseClientAuthorization() {
|
||||
ts := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: tokenStr})
|
||||
httpClient = oauth2.NewClient(ctx, ts)
|
||||
} else {
|
||||
// The source.Service() object holds a client with the default credentials.
|
||||
// However, the client is not exported, so we have to create a new one.
|
||||
var err error
|
||||
httpClient, err = google.DefaultClient(ctx, healthcare.CloudHealthcareScope)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create default http client: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create http request: %w", err)
|
||||
}
|
||||
req.Header.Set("Accept", "application/fhir+json;charset=utf-8")
|
||||
|
||||
resp, err := httpClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get fhir page from %q: %w", url, err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
return parseResults(resp)
|
||||
}
|
||||
|
||||
func (s *Source) FHIRPatientEverything(storeID, patientID, tokenStr string, opts []googleapi.CallOption) (any, error) {
|
||||
svc, err := s.getService(tokenStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
name := fmt.Sprintf("projects/%s/locations/%s/datasets/%s/fhirStores/%s/fhir/Patient/%s", s.Project(), s.Region(), s.DatasetID(), storeID, patientID)
|
||||
resp, err := svc.Projects.Locations.Datasets.FhirStores.Fhir.PatientEverything(name).Do(opts...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to call patient everything for %q: %w", name, err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
return parseResults(resp)
|
||||
}
|
||||
|
||||
func (s *Source) FHIRPatientSearch(storeID, tokenStr string, opts []googleapi.CallOption) (any, error) {
|
||||
svc, err := s.getService(tokenStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
name := fmt.Sprintf("projects/%s/locations/%s/datasets/%s/fhirStores/%s", s.Project(), s.Region(), s.DatasetID(), storeID)
|
||||
resp, err := svc.Projects.Locations.Datasets.FhirStores.Fhir.SearchType(name, "Patient", &healthcare.SearchResourcesRequest{ResourceType: "Patient"}).Do(opts...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to search patient resources: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
return parseResults(resp)
|
||||
}
|
||||
|
||||
func (s *Source) GetDataset(tokenStr string) (*healthcare.Dataset, error) {
|
||||
svc, err := s.getService(tokenStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
datasetName := fmt.Sprintf("projects/%s/locations/%s/datasets/%s", s.Project(), s.Region(), s.DatasetID())
|
||||
dataset, err := svc.Projects.Locations.Datasets.Get(datasetName).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get dataset %q: %w", datasetName, err)
|
||||
}
|
||||
return dataset, nil
|
||||
}
|
||||
|
||||
func (s *Source) GetFHIRResource(storeID, resType, resID, tokenStr string) (any, error) {
|
||||
svc, err := s.getService(tokenStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
name := fmt.Sprintf("projects/%s/locations/%s/datasets/%s/fhirStores/%s/fhir/%s/%s", s.Project(), s.Region(), s.DatasetID(), storeID, resType, resID)
|
||||
call := svc.Projects.Locations.Datasets.FhirStores.Fhir.Read(name)
|
||||
call.Header().Set("Content-Type", "application/fhir+json;charset=utf-8")
|
||||
resp, err := call.Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get fhir resource %q: %w", name, err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
return parseResults(resp)
|
||||
}
|
||||
|
||||
func (s *Source) GetDICOMStore(storeID, tokenStr string) (*healthcare.DicomStore, error) {
|
||||
svc, err := s.getService(tokenStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
storeName := fmt.Sprintf("projects/%s/locations/%s/datasets/%s/dicomStores/%s", s.Project(), s.Region(), s.DatasetID(), storeID)
|
||||
store, err := svc.Projects.Locations.Datasets.DicomStores.Get(storeName).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get DICOM store %q: %w", storeName, err)
|
||||
}
|
||||
return store, nil
|
||||
}
|
||||
|
||||
func (s *Source) GetFHIRStore(storeID, tokenStr string) (*healthcare.FhirStore, error) {
|
||||
svc, err := s.getService(tokenStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
storeName := fmt.Sprintf("projects/%s/locations/%s/datasets/%s/fhirStores/%s", s.Project(), s.Region(), s.DatasetID(), storeID)
|
||||
store, err := svc.Projects.Locations.Datasets.FhirStores.Get(storeName).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get FHIR store %q: %w", storeName, err)
|
||||
}
|
||||
return store, nil
|
||||
}
|
||||
|
||||
func (s *Source) GetDICOMStoreMetrics(storeID, tokenStr string) (*healthcare.DicomStoreMetrics, error) {
|
||||
svc, err := s.getService(tokenStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
storeName := fmt.Sprintf("projects/%s/locations/%s/datasets/%s/dicomStores/%s", s.Project(), s.Region(), s.DatasetID(), storeID)
|
||||
store, err := svc.Projects.Locations.Datasets.DicomStores.GetDICOMStoreMetrics(storeName).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get metrics for DICOM store %q: %w", storeName, err)
|
||||
}
|
||||
return store, nil
|
||||
}
|
||||
|
||||
func (s *Source) GetFHIRStoreMetrics(storeID, tokenStr string) (*healthcare.FhirStoreMetrics, error) {
|
||||
svc, err := s.getService(tokenStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
storeName := fmt.Sprintf("projects/%s/locations/%s/datasets/%s/fhirStores/%s", s.Project(), s.Region(), s.DatasetID(), storeID)
|
||||
store, err := svc.Projects.Locations.Datasets.FhirStores.GetFHIRStoreMetrics(storeName).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get metrics for FHIR store %q: %w", storeName, err)
|
||||
}
|
||||
return store, nil
|
||||
}
|
||||
|
||||
func (s *Source) ListDICOMStores(tokenStr string) ([]*healthcare.DicomStore, error) {
|
||||
svc, err := s.getService(tokenStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
datasetName := fmt.Sprintf("projects/%s/locations/%s/datasets/%s", s.Project(), s.Region(), s.DatasetID())
|
||||
stores, err := svc.Projects.Locations.Datasets.DicomStores.List(datasetName).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get dataset %q: %w", datasetName, err)
|
||||
}
|
||||
var filtered []*healthcare.DicomStore
|
||||
for _, store := range stores.DicomStores {
|
||||
if len(s.AllowedDICOMStores()) == 0 {
|
||||
filtered = append(filtered, store)
|
||||
continue
|
||||
}
|
||||
if len(store.Name) == 0 {
|
||||
continue
|
||||
}
|
||||
parts := strings.Split(store.Name, "/")
|
||||
if _, ok := s.AllowedDICOMStores()[parts[len(parts)-1]]; ok {
|
||||
filtered = append(filtered, store)
|
||||
}
|
||||
}
|
||||
return filtered, nil
|
||||
}
|
||||
|
||||
func (s *Source) ListFHIRStores(tokenStr string) ([]*healthcare.FhirStore, error) {
|
||||
svc, err := s.getService(tokenStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
datasetName := fmt.Sprintf("projects/%s/locations/%s/datasets/%s", s.Project(), s.Region(), s.DatasetID())
|
||||
stores, err := svc.Projects.Locations.Datasets.FhirStores.List(datasetName).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get dataset %q: %w", datasetName, err)
|
||||
}
|
||||
var filtered []*healthcare.FhirStore
|
||||
for _, store := range stores.FhirStores {
|
||||
if len(s.AllowedFHIRStores()) == 0 {
|
||||
filtered = append(filtered, store)
|
||||
continue
|
||||
}
|
||||
if len(store.Name) == 0 {
|
||||
continue
|
||||
}
|
||||
parts := strings.Split(store.Name, "/")
|
||||
if _, ok := s.AllowedFHIRStores()[parts[len(parts)-1]]; ok {
|
||||
filtered = append(filtered, store)
|
||||
}
|
||||
}
|
||||
return filtered, nil
|
||||
}
|
||||
|
||||
func (s *Source) RetrieveRenderedDICOMInstance(storeID, study, series, sop string, frame int, tokenStr string) (any, error) {
|
||||
svc, err := s.getService(tokenStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
name := fmt.Sprintf("projects/%s/locations/%s/datasets/%s/dicomStores/%s", s.Project(), s.Region(), s.DatasetID(), storeID)
|
||||
dicomWebPath := fmt.Sprintf("studies/%s/series/%s/instances/%s/frames/%d/rendered", study, series, sop, frame)
|
||||
call := svc.Projects.Locations.Datasets.DicomStores.Studies.Series.Instances.Frames.RetrieveRendered(name, dicomWebPath)
|
||||
call.Header().Set("Accept", "image/jpeg")
|
||||
resp, err := call.Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to retrieve dicom instance rendered image: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
respBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not read response: %w", err)
|
||||
}
|
||||
if resp.StatusCode > 299 {
|
||||
return nil, fmt.Errorf("RetrieveRendered: status %d %s: %s", resp.StatusCode, resp.Status, respBytes)
|
||||
}
|
||||
base64String := base64.StdEncoding.EncodeToString(respBytes)
|
||||
return base64String, nil
|
||||
}
|
||||
|
||||
func (s *Source) SearchDICOM(toolKind, storeID, dicomWebPath, tokenStr string, opts []googleapi.CallOption) (any, error) {
|
||||
svc, err := s.getService(tokenStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
name := fmt.Sprintf("projects/%s/locations/%s/datasets/%s/dicomStores/%s", s.Project(), s.Region(), s.DatasetID(), storeID)
|
||||
var resp *http.Response
|
||||
switch toolKind {
|
||||
case "cloud-healthcare-search-dicom-instances":
|
||||
resp, err = svc.Projects.Locations.Datasets.DicomStores.SearchForInstances(name, dicomWebPath).Do(opts...)
|
||||
case "cloud-healthcare-search-dicom-series":
|
||||
resp, err = svc.Projects.Locations.Datasets.DicomStores.SearchForSeries(name, dicomWebPath).Do(opts...)
|
||||
case "cloud-healthcare-search-dicom-studies":
|
||||
resp, err = svc.Projects.Locations.Datasets.DicomStores.SearchForStudies(name, dicomWebPath).Do(opts...)
|
||||
default:
|
||||
return nil, fmt.Errorf("incompatible tool kind: %s", toolKind)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to search dicom series: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
respBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not read response: %w", err)
|
||||
}
|
||||
if resp.StatusCode > 299 {
|
||||
return nil, fmt.Errorf("search: status %d %s: %s", resp.StatusCode, resp.Status, respBytes)
|
||||
}
|
||||
if len(respBytes) == 0 {
|
||||
return []interface{}{}, nil
|
||||
}
|
||||
var result []interface{}
|
||||
if err := json.Unmarshal(respBytes, &result); err != nil {
|
||||
return nil, fmt.Errorf("could not unmarshal response as list: %w", err)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"text/template"
|
||||
"time"
|
||||
@@ -36,7 +37,10 @@ import (
|
||||
|
||||
const SourceKind string = "cloud-sql-admin"
|
||||
|
||||
var targetLinkRegex = regexp.MustCompile(`/projects/([^/]+)/instances/([^/]+)/databases/([^/]+)`)
|
||||
var (
|
||||
targetLinkRegex = regexp.MustCompile(`/projects/([^/]+)/instances/([^/]+)/databases/([^/]+)`)
|
||||
backupDRRegex = regexp.MustCompile(`^projects/([^/]+)/locations/([^/]+)/backupVaults/([^/]+)/dataSources/([^/]+)/backups/([^/]+)$`)
|
||||
)
|
||||
|
||||
// validate interface
|
||||
var _ sources.SourceConfig = Config{}
|
||||
@@ -352,6 +356,70 @@ func (s *Source) GetWaitForOperations(ctx context.Context, service *sqladmin.Ser
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (s *Source) InsertBackupRun(ctx context.Context, project, instance, location, backupDescription, accessToken string) (any, error) {
|
||||
backupRun := &sqladmin.BackupRun{}
|
||||
if location != "" {
|
||||
backupRun.Location = location
|
||||
}
|
||||
if backupDescription != "" {
|
||||
backupRun.Description = backupDescription
|
||||
}
|
||||
|
||||
service, err := s.GetService(ctx, string(accessToken))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp, err := service.BackupRuns.Insert(project, instance, backupRun).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating backup: %w", err)
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (s *Source) RestoreBackup(ctx context.Context, targetProject, targetInstance, sourceProject, sourceInstance, backupID, accessToken string) (any, error) {
|
||||
request := &sqladmin.InstancesRestoreBackupRequest{}
|
||||
|
||||
// There are 3 scenarios for the backup identifier:
|
||||
// 1. The identifier is an int64 containing the timestamp of the BackupRun.
|
||||
// This is used to restore standard backups, and the RestoreBackupContext
|
||||
// field should be populated with the backup ID and source instance info.
|
||||
// 2. The identifier is a string of the format
|
||||
// 'projects/{project-id}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}/backups/{backup-uid}'.
|
||||
// This is used to restore BackupDR backups, and the BackupdrBackup field
|
||||
// should be populated.
|
||||
// 3. The identifer is a string of the format
|
||||
// 'projects/{project-id}/backups/{backup-uid}'. In this case, the Backup
|
||||
// field should be populated.
|
||||
if backupRunID, err := strconv.ParseInt(backupID, 10, 64); err == nil {
|
||||
if sourceProject == "" || targetInstance == "" {
|
||||
return nil, fmt.Errorf("source project and instance are required when restoring via backup ID")
|
||||
}
|
||||
request.RestoreBackupContext = &sqladmin.RestoreBackupContext{
|
||||
Project: sourceProject,
|
||||
InstanceId: sourceInstance,
|
||||
BackupRunId: backupRunID,
|
||||
}
|
||||
} else if backupDRRegex.MatchString(backupID) {
|
||||
request.BackupdrBackup = backupID
|
||||
} else {
|
||||
request.Backup = backupID
|
||||
}
|
||||
|
||||
service, err := s.GetService(ctx, string(accessToken))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp, err := service.Instances.RestoreBackup(targetProject, targetInstance, request).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error restoring backup: %w", err)
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func generateCloudSQLConnectionMessage(ctx context.Context, source *Source, logger log.Logger, opResponse map[string]any, connectionMessageTemplate string) (string, bool) {
|
||||
operationType, ok := opResponse["operationType"].(string)
|
||||
if !ok || operationType != "CREATE_DATABASE" {
|
||||
|
||||
@@ -19,6 +19,8 @@ import (
|
||||
"fmt"
|
||||
|
||||
dataplexapi "cloud.google.com/go/dataplex/apiv1"
|
||||
"cloud.google.com/go/dataplex/apiv1/dataplexpb"
|
||||
"github.com/cenkalti/backoff/v5"
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
@@ -121,3 +123,101 @@ func initDataplexConnection(
|
||||
}
|
||||
return client, nil
|
||||
}
|
||||
|
||||
func (s *Source) LookupEntry(ctx context.Context, name string, view int, aspectTypes []string, entry string) (*dataplexpb.Entry, error) {
|
||||
viewMap := map[int]dataplexpb.EntryView{
|
||||
1: dataplexpb.EntryView_BASIC,
|
||||
2: dataplexpb.EntryView_FULL,
|
||||
3: dataplexpb.EntryView_CUSTOM,
|
||||
4: dataplexpb.EntryView_ALL,
|
||||
}
|
||||
req := &dataplexpb.LookupEntryRequest{
|
||||
Name: name,
|
||||
View: viewMap[view],
|
||||
AspectTypes: aspectTypes,
|
||||
Entry: entry,
|
||||
}
|
||||
result, err := s.CatalogClient().LookupEntry(ctx, req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (s *Source) searchRequest(ctx context.Context, query string, pageSize int, orderBy string) (*dataplexapi.SearchEntriesResultIterator, error) {
|
||||
// Create SearchEntriesRequest with the provided parameters
|
||||
req := &dataplexpb.SearchEntriesRequest{
|
||||
Query: query,
|
||||
Name: fmt.Sprintf("projects/%s/locations/global", s.ProjectID()),
|
||||
PageSize: int32(pageSize),
|
||||
OrderBy: orderBy,
|
||||
SemanticSearch: true,
|
||||
}
|
||||
|
||||
// Perform the search using the CatalogClient - this will return an iterator
|
||||
it := s.CatalogClient().SearchEntries(ctx, req)
|
||||
if it == nil {
|
||||
return nil, fmt.Errorf("failed to create search entries iterator for project %q", s.ProjectID())
|
||||
}
|
||||
return it, nil
|
||||
}
|
||||
|
||||
func (s *Source) SearchAspectTypes(ctx context.Context, query string, pageSize int, orderBy string) ([]*dataplexpb.AspectType, error) {
|
||||
q := query + " type=projects/dataplex-types/locations/global/entryTypes/aspecttype"
|
||||
it, err := s.searchRequest(ctx, q, pageSize, orderBy)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Iterate through the search results and call GetAspectType for each result using the resource name
|
||||
var results []*dataplexpb.AspectType
|
||||
for {
|
||||
entry, err := it.Next()
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
|
||||
// Create an instance of exponential backoff with default values for retrying GetAspectType calls
|
||||
// InitialInterval, RandomizationFactor, Multiplier, MaxInterval = 500 ms, 0.5, 1.5, 60 s
|
||||
getAspectBackOff := backoff.NewExponentialBackOff()
|
||||
|
||||
resourceName := entry.DataplexEntry.GetEntrySource().Resource
|
||||
getAspectTypeReq := &dataplexpb.GetAspectTypeRequest{
|
||||
Name: resourceName,
|
||||
}
|
||||
|
||||
operation := func() (*dataplexpb.AspectType, error) {
|
||||
aspectType, err := s.CatalogClient().GetAspectType(ctx, getAspectTypeReq)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get aspect type for entry %q: %w", resourceName, err)
|
||||
}
|
||||
return aspectType, nil
|
||||
}
|
||||
|
||||
// Retry the GetAspectType operation with exponential backoff
|
||||
aspectType, err := backoff.Retry(ctx, operation, backoff.WithBackOff(getAspectBackOff))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get aspect type after retries for entry %q: %w", resourceName, err)
|
||||
}
|
||||
|
||||
results = append(results, aspectType)
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (s *Source) SearchEntries(ctx context.Context, query string, pageSize int, orderBy string) ([]*dataplexpb.SearchEntriesResult, error) {
|
||||
it, err := s.searchRequest(ctx, query, pageSize, orderBy)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var results []*dataplexpb.SearchEntriesResult
|
||||
for {
|
||||
entry, err := it.Next()
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
results = append(results, entry)
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
@@ -16,7 +16,10 @@ package firestore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"cloud.google.com/go/firestore"
|
||||
"github.com/goccy/go-yaml"
|
||||
@@ -25,6 +28,7 @@ import (
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
"google.golang.org/api/firebaserules/v1"
|
||||
"google.golang.org/api/option"
|
||||
"google.golang.org/genproto/googleapis/type/latlng"
|
||||
)
|
||||
|
||||
const SourceKind string = "firestore"
|
||||
@@ -113,6 +117,476 @@ func (s *Source) GetDatabaseId() string {
|
||||
return s.Database
|
||||
}
|
||||
|
||||
// FirestoreValueToJSON converts a Firestore value to a simplified JSON representation
|
||||
// This removes type information and returns plain values
|
||||
func FirestoreValueToJSON(value any) any {
|
||||
if value == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
switch v := value.(type) {
|
||||
case time.Time:
|
||||
return v.Format(time.RFC3339Nano)
|
||||
case *latlng.LatLng:
|
||||
return map[string]any{
|
||||
"latitude": v.Latitude,
|
||||
"longitude": v.Longitude,
|
||||
}
|
||||
case []byte:
|
||||
return base64.StdEncoding.EncodeToString(v)
|
||||
case []any:
|
||||
result := make([]any, len(v))
|
||||
for i, item := range v {
|
||||
result[i] = FirestoreValueToJSON(item)
|
||||
}
|
||||
return result
|
||||
case map[string]any:
|
||||
result := make(map[string]any)
|
||||
for k, val := range v {
|
||||
result[k] = FirestoreValueToJSON(val)
|
||||
}
|
||||
return result
|
||||
case *firestore.DocumentRef:
|
||||
return v.Path
|
||||
default:
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
||||
// BuildQuery constructs the Firestore query from parameters
|
||||
func (s *Source) BuildQuery(collectionPath string, filter firestore.EntityFilter, selectFields []string, field string, direction firestore.Direction, limit int, analyzeQuery bool) (*firestore.Query, error) {
|
||||
collection := s.FirestoreClient().Collection(collectionPath)
|
||||
query := collection.Query
|
||||
|
||||
// Process and apply filters if template is provided
|
||||
if filter != nil {
|
||||
query = query.WhereEntity(filter)
|
||||
}
|
||||
if len(selectFields) > 0 {
|
||||
query = query.Select(selectFields...)
|
||||
}
|
||||
if field != "" {
|
||||
query = query.OrderBy(field, direction)
|
||||
}
|
||||
query = query.Limit(limit)
|
||||
|
||||
// Apply analyze options if enabled
|
||||
if analyzeQuery {
|
||||
query = query.WithRunOptions(firestore.ExplainOptions{
|
||||
Analyze: true,
|
||||
})
|
||||
}
|
||||
|
||||
return &query, nil
|
||||
}
|
||||
|
||||
// QueryResult represents a document result from the query
|
||||
type QueryResult struct {
|
||||
ID string `json:"id"`
|
||||
Path string `json:"path"`
|
||||
Data map[string]any `json:"data"`
|
||||
CreateTime any `json:"createTime,omitempty"`
|
||||
UpdateTime any `json:"updateTime,omitempty"`
|
||||
ReadTime any `json:"readTime,omitempty"`
|
||||
}
|
||||
|
||||
// QueryResponse represents the full response including optional metrics
|
||||
type QueryResponse struct {
|
||||
Documents []QueryResult `json:"documents"`
|
||||
ExplainMetrics map[string]any `json:"explainMetrics,omitempty"`
|
||||
}
|
||||
|
||||
// ExecuteQuery runs the query and formats the results
|
||||
func (s *Source) ExecuteQuery(ctx context.Context, query *firestore.Query, analyzeQuery bool) (any, error) {
|
||||
docIterator := query.Documents(ctx)
|
||||
docs, err := docIterator.GetAll()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to execute query: %w", err)
|
||||
}
|
||||
// Convert results to structured format
|
||||
results := make([]QueryResult, len(docs))
|
||||
for i, doc := range docs {
|
||||
results[i] = QueryResult{
|
||||
ID: doc.Ref.ID,
|
||||
Path: doc.Ref.Path,
|
||||
Data: doc.Data(),
|
||||
CreateTime: doc.CreateTime,
|
||||
UpdateTime: doc.UpdateTime,
|
||||
ReadTime: doc.ReadTime,
|
||||
}
|
||||
}
|
||||
|
||||
// Return with explain metrics if requested
|
||||
if analyzeQuery {
|
||||
explainMetrics, err := getExplainMetrics(docIterator)
|
||||
if err == nil && explainMetrics != nil {
|
||||
response := QueryResponse{
|
||||
Documents: results,
|
||||
ExplainMetrics: explainMetrics,
|
||||
}
|
||||
return response, nil
|
||||
}
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
// getExplainMetrics extracts explain metrics from the query iterator
|
||||
func getExplainMetrics(docIterator *firestore.DocumentIterator) (map[string]any, error) {
|
||||
explainMetrics, err := docIterator.ExplainMetrics()
|
||||
if err != nil || explainMetrics == nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
metricsData := make(map[string]any)
|
||||
|
||||
// Add plan summary if available
|
||||
if explainMetrics.PlanSummary != nil {
|
||||
planSummary := make(map[string]any)
|
||||
planSummary["indexesUsed"] = explainMetrics.PlanSummary.IndexesUsed
|
||||
metricsData["planSummary"] = planSummary
|
||||
}
|
||||
|
||||
// Add execution stats if available
|
||||
if explainMetrics.ExecutionStats != nil {
|
||||
executionStats := make(map[string]any)
|
||||
executionStats["resultsReturned"] = explainMetrics.ExecutionStats.ResultsReturned
|
||||
executionStats["readOperations"] = explainMetrics.ExecutionStats.ReadOperations
|
||||
|
||||
if explainMetrics.ExecutionStats.ExecutionDuration != nil {
|
||||
executionStats["executionDuration"] = explainMetrics.ExecutionStats.ExecutionDuration.String()
|
||||
}
|
||||
|
||||
if explainMetrics.ExecutionStats.DebugStats != nil {
|
||||
executionStats["debugStats"] = *explainMetrics.ExecutionStats.DebugStats
|
||||
}
|
||||
|
||||
metricsData["executionStats"] = executionStats
|
||||
}
|
||||
|
||||
return metricsData, nil
|
||||
}
|
||||
|
||||
func (s *Source) GetDocuments(ctx context.Context, documentPaths []string) ([]any, error) {
|
||||
// Create document references from paths
|
||||
docRefs := make([]*firestore.DocumentRef, len(documentPaths))
|
||||
for i, path := range documentPaths {
|
||||
docRefs[i] = s.FirestoreClient().Doc(path)
|
||||
}
|
||||
|
||||
// Get all documents
|
||||
snapshots, err := s.FirestoreClient().GetAll(ctx, docRefs)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get documents: %w", err)
|
||||
}
|
||||
|
||||
// Convert snapshots to response data
|
||||
results := make([]any, len(snapshots))
|
||||
for i, snapshot := range snapshots {
|
||||
docData := make(map[string]any)
|
||||
docData["path"] = documentPaths[i]
|
||||
docData["exists"] = snapshot.Exists()
|
||||
|
||||
if snapshot.Exists() {
|
||||
docData["data"] = snapshot.Data()
|
||||
docData["createTime"] = snapshot.CreateTime
|
||||
docData["updateTime"] = snapshot.UpdateTime
|
||||
docData["readTime"] = snapshot.ReadTime
|
||||
}
|
||||
|
||||
results[i] = docData
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (s *Source) AddDocuments(ctx context.Context, collectionPath string, documentData any, returnData bool) (map[string]any, error) {
|
||||
// Get the collection reference
|
||||
collection := s.FirestoreClient().Collection(collectionPath)
|
||||
|
||||
// Add the document to the collection
|
||||
docRef, writeResult, err := collection.Add(ctx, documentData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to add document: %w", err)
|
||||
}
|
||||
// Build the response
|
||||
response := map[string]any{
|
||||
"documentPath": docRef.Path,
|
||||
"createTime": writeResult.UpdateTime.Format("2006-01-02T15:04:05.999999999Z"),
|
||||
}
|
||||
// Add document data if requested
|
||||
if returnData {
|
||||
// Fetch the updated document to return the current state
|
||||
snapshot, err := docRef.Get(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to retrieve updated document: %w", err)
|
||||
}
|
||||
// Convert the document data back to simple JSON format
|
||||
simplifiedData := FirestoreValueToJSON(snapshot.Data())
|
||||
response["documentData"] = simplifiedData
|
||||
}
|
||||
return response, nil
|
||||
}
|
||||
|
||||
func (s *Source) UpdateDocument(ctx context.Context, documentPath string, updates []firestore.Update, documentData any, returnData bool) (map[string]any, error) {
|
||||
// Get the document reference
|
||||
docRef := s.FirestoreClient().Doc(documentPath)
|
||||
|
||||
// Prepare update data
|
||||
var writeResult *firestore.WriteResult
|
||||
var writeErr error
|
||||
|
||||
if len(updates) > 0 {
|
||||
writeResult, writeErr = docRef.Update(ctx, updates)
|
||||
} else {
|
||||
writeResult, writeErr = docRef.Set(ctx, documentData, firestore.MergeAll)
|
||||
}
|
||||
|
||||
if writeErr != nil {
|
||||
return nil, fmt.Errorf("failed to update document: %w", writeErr)
|
||||
}
|
||||
|
||||
// Build the response
|
||||
response := map[string]any{
|
||||
"documentPath": docRef.Path,
|
||||
"updateTime": writeResult.UpdateTime.Format("2006-01-02T15:04:05.999999999Z"),
|
||||
}
|
||||
|
||||
// Add document data if requested
|
||||
if returnData {
|
||||
// Fetch the updated document to return the current state
|
||||
snapshot, err := docRef.Get(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to retrieve updated document: %w", err)
|
||||
}
|
||||
// Convert the document data to simple JSON format
|
||||
simplifiedData := FirestoreValueToJSON(snapshot.Data())
|
||||
response["documentData"] = simplifiedData
|
||||
}
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
func (s *Source) DeleteDocuments(ctx context.Context, documentPaths []string) ([]any, error) {
|
||||
// Create a BulkWriter to handle multiple deletions efficiently
|
||||
bulkWriter := s.FirestoreClient().BulkWriter(ctx)
|
||||
|
||||
// Keep track of jobs for each document
|
||||
jobs := make([]*firestore.BulkWriterJob, len(documentPaths))
|
||||
|
||||
// Add all delete operations to the BulkWriter
|
||||
for i, path := range documentPaths {
|
||||
docRef := s.FirestoreClient().Doc(path)
|
||||
job, err := bulkWriter.Delete(docRef)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to add delete operation for document %q: %w", path, err)
|
||||
}
|
||||
jobs[i] = job
|
||||
}
|
||||
|
||||
// End the BulkWriter to execute all operations
|
||||
bulkWriter.End()
|
||||
|
||||
// Collect results
|
||||
results := make([]any, len(documentPaths))
|
||||
for i, job := range jobs {
|
||||
docData := make(map[string]any)
|
||||
docData["path"] = documentPaths[i]
|
||||
|
||||
// Wait for the job to complete and get the result
|
||||
_, err := job.Results()
|
||||
if err != nil {
|
||||
docData["success"] = false
|
||||
docData["error"] = err.Error()
|
||||
} else {
|
||||
docData["success"] = true
|
||||
}
|
||||
|
||||
results[i] = docData
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (s *Source) ListCollections(ctx context.Context, parentPath string) ([]any, error) {
|
||||
var collectionRefs []*firestore.CollectionRef
|
||||
var err error
|
||||
if parentPath != "" {
|
||||
// List subcollections of the specified document
|
||||
docRef := s.FirestoreClient().Doc(parentPath)
|
||||
collectionRefs, err = docRef.Collections(ctx).GetAll()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to list subcollections of document %q: %w", parentPath, err)
|
||||
}
|
||||
} else {
|
||||
// List root collections
|
||||
collectionRefs, err = s.FirestoreClient().Collections(ctx).GetAll()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to list root collections: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Convert collection references to response data
|
||||
results := make([]any, len(collectionRefs))
|
||||
for i, collRef := range collectionRefs {
|
||||
collData := make(map[string]any)
|
||||
collData["id"] = collRef.ID
|
||||
collData["path"] = collRef.Path
|
||||
|
||||
// If this is a subcollection, include parent information
|
||||
if collRef.Parent != nil {
|
||||
collData["parent"] = collRef.Parent.Path
|
||||
}
|
||||
results[i] = collData
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (s *Source) GetRules(ctx context.Context) (any, error) {
|
||||
// Get the latest release for Firestore
|
||||
releaseName := fmt.Sprintf("projects/%s/releases/cloud.firestore/%s", s.GetProjectId(), s.GetDatabaseId())
|
||||
release, err := s.FirebaseRulesClient().Projects.Releases.Get(releaseName).Context(ctx).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get latest Firestore release: %w", err)
|
||||
}
|
||||
|
||||
if release.RulesetName == "" {
|
||||
return nil, fmt.Errorf("no active Firestore rules were found in project '%s' and database '%s'", s.GetProjectId(), s.GetDatabaseId())
|
||||
}
|
||||
|
||||
// Get the ruleset content
|
||||
ruleset, err := s.FirebaseRulesClient().Projects.Rulesets.Get(release.RulesetName).Context(ctx).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get ruleset content: %w", err)
|
||||
}
|
||||
|
||||
if ruleset.Source == nil || len(ruleset.Source.Files) == 0 {
|
||||
return nil, fmt.Errorf("no rules files found in ruleset")
|
||||
}
|
||||
|
||||
return ruleset, nil
|
||||
}
|
||||
|
||||
// SourcePosition represents the location of an issue in the source
|
||||
type SourcePosition struct {
|
||||
FileName string `json:"fileName,omitempty"`
|
||||
Line int64 `json:"line"` // 1-based
|
||||
Column int64 `json:"column"` // 1-based
|
||||
CurrentOffset int64 `json:"currentOffset"` // 0-based, inclusive start
|
||||
EndOffset int64 `json:"endOffset"` // 0-based, exclusive end
|
||||
}
|
||||
|
||||
// Issue represents a validation issue in the rules
|
||||
type Issue struct {
|
||||
SourcePosition SourcePosition `json:"sourcePosition"`
|
||||
Description string `json:"description"`
|
||||
Severity string `json:"severity"`
|
||||
}
|
||||
|
||||
// ValidationResult represents the result of rules validation
|
||||
type ValidationResult struct {
|
||||
Valid bool `json:"valid"`
|
||||
IssueCount int `json:"issueCount"`
|
||||
FormattedIssues string `json:"formattedIssues,omitempty"`
|
||||
RawIssues []Issue `json:"rawIssues,omitempty"`
|
||||
}
|
||||
|
||||
func (s *Source) ValidateRules(ctx context.Context, sourceParam string) (any, error) {
|
||||
// Create test request
|
||||
testRequest := &firebaserules.TestRulesetRequest{
|
||||
Source: &firebaserules.Source{
|
||||
Files: []*firebaserules.File{
|
||||
{
|
||||
Name: "firestore.rules",
|
||||
Content: sourceParam,
|
||||
},
|
||||
},
|
||||
},
|
||||
// We don't need test cases for validation only
|
||||
TestSuite: &firebaserules.TestSuite{
|
||||
TestCases: []*firebaserules.TestCase{},
|
||||
},
|
||||
}
|
||||
// Call the test API
|
||||
projectName := fmt.Sprintf("projects/%s", s.GetProjectId())
|
||||
response, err := s.FirebaseRulesClient().Projects.Test(projectName, testRequest).Context(ctx).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to validate rules: %w", err)
|
||||
}
|
||||
|
||||
// Process the response
|
||||
if len(response.Issues) == 0 {
|
||||
return ValidationResult{
|
||||
Valid: true,
|
||||
IssueCount: 0,
|
||||
FormattedIssues: "✓ No errors detected. Rules are valid.",
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Convert issues to our format
|
||||
issues := make([]Issue, len(response.Issues))
|
||||
for i, issue := range response.Issues {
|
||||
issues[i] = Issue{
|
||||
Description: issue.Description,
|
||||
Severity: issue.Severity,
|
||||
SourcePosition: SourcePosition{
|
||||
FileName: issue.SourcePosition.FileName,
|
||||
Line: issue.SourcePosition.Line,
|
||||
Column: issue.SourcePosition.Column,
|
||||
CurrentOffset: issue.SourcePosition.CurrentOffset,
|
||||
EndOffset: issue.SourcePosition.EndOffset,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Format issues
|
||||
sourceLines := strings.Split(sourceParam, "\n")
|
||||
var formattedOutput []string
|
||||
|
||||
formattedOutput = append(formattedOutput, fmt.Sprintf("Found %d issue(s) in rules source:\n", len(issues)))
|
||||
|
||||
for _, issue := range issues {
|
||||
issueString := fmt.Sprintf("%s: %s [Ln %d, Col %d]",
|
||||
issue.Severity,
|
||||
issue.Description,
|
||||
issue.SourcePosition.Line,
|
||||
issue.SourcePosition.Column)
|
||||
|
||||
if issue.SourcePosition.Line > 0 {
|
||||
lineIndex := int(issue.SourcePosition.Line - 1) // 0-based index
|
||||
if lineIndex >= 0 && lineIndex < len(sourceLines) {
|
||||
errorLine := sourceLines[lineIndex]
|
||||
issueString += fmt.Sprintf("\n```\n%s", errorLine)
|
||||
|
||||
// Add carets if we have column and offset information
|
||||
if issue.SourcePosition.Column > 0 &&
|
||||
issue.SourcePosition.CurrentOffset >= 0 &&
|
||||
issue.SourcePosition.EndOffset > issue.SourcePosition.CurrentOffset {
|
||||
|
||||
startColumn := int(issue.SourcePosition.Column - 1) // 0-based
|
||||
errorTokenLength := int(issue.SourcePosition.EndOffset - issue.SourcePosition.CurrentOffset)
|
||||
|
||||
if startColumn >= 0 && errorTokenLength > 0 && startColumn <= len(errorLine) {
|
||||
padding := strings.Repeat(" ", startColumn)
|
||||
carets := strings.Repeat("^", errorTokenLength)
|
||||
issueString += fmt.Sprintf("\n%s%s", padding, carets)
|
||||
}
|
||||
}
|
||||
issueString += "\n```"
|
||||
}
|
||||
}
|
||||
|
||||
formattedOutput = append(formattedOutput, issueString)
|
||||
}
|
||||
|
||||
formattedIssues := strings.Join(formattedOutput, "\n\n")
|
||||
|
||||
return ValidationResult{
|
||||
Valid: false,
|
||||
IssueCount: len(issues),
|
||||
FormattedIssues: formattedIssues,
|
||||
RawIssues: issues,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func initFirestoreConnection(
|
||||
ctx context.Context,
|
||||
tracer trace.Tracer,
|
||||
|
||||
@@ -16,6 +16,7 @@ package firestore_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
@@ -128,3 +129,37 @@ func TestFailParseFromYamlFirestore(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestFirestoreValueToJSON_RoundTrip(t *testing.T) {
|
||||
// Test round-trip conversion
|
||||
original := map[string]any{
|
||||
"name": "Test",
|
||||
"count": int64(42),
|
||||
"price": 19.99,
|
||||
"active": true,
|
||||
"tags": []any{"tag1", "tag2"},
|
||||
"metadata": map[string]any{
|
||||
"created": time.Now(),
|
||||
},
|
||||
"nullField": nil,
|
||||
}
|
||||
|
||||
// Convert to JSON representation
|
||||
jsonRepresentation := firestore.FirestoreValueToJSON(original)
|
||||
|
||||
// Verify types are simplified
|
||||
jsonMap, ok := jsonRepresentation.(map[string]any)
|
||||
if !ok {
|
||||
t.Fatalf("Expected map, got %T", jsonRepresentation)
|
||||
}
|
||||
|
||||
// Time should be converted to string
|
||||
metadata, ok := jsonMap["metadata"].(map[string]any)
|
||||
if !ok {
|
||||
t.Fatalf("metadata should be a map, got %T", jsonMap["metadata"])
|
||||
}
|
||||
_, ok = metadata["created"].(string)
|
||||
if !ok {
|
||||
t.Errorf("created should be a string, got %T", metadata["created"])
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,9 @@ package http
|
||||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"time"
|
||||
@@ -143,3 +145,28 @@ func (s *Source) HttpQueryParams() map[string]string {
|
||||
func (s *Source) Client() *http.Client {
|
||||
return s.client
|
||||
}
|
||||
|
||||
func (s *Source) RunRequest(req *http.Request) (any, error) {
|
||||
// Make request and fetch response
|
||||
resp, err := s.Client().Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error making HTTP request: %s", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
var body []byte
|
||||
body, err = io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if resp.StatusCode < 200 || resp.StatusCode > 299 {
|
||||
return nil, fmt.Errorf("unexpected status code: %d, response body: %s", resp.StatusCode, string(body))
|
||||
}
|
||||
|
||||
var data any
|
||||
if err = json.Unmarshal(body, &data); err != nil {
|
||||
// if unable to unmarshal data, return result as string.
|
||||
return string(body), nil
|
||||
}
|
||||
return data, nil
|
||||
}
|
||||
|
||||
@@ -15,7 +15,9 @@ package looker
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -208,6 +210,49 @@ func (s *Source) LookerSessionLength() int64 {
|
||||
return s.SessionLength
|
||||
}
|
||||
|
||||
// Make types for RoundTripper
|
||||
type transportWithAuthHeader struct {
|
||||
Base http.RoundTripper
|
||||
AuthToken string
|
||||
}
|
||||
|
||||
func (t *transportWithAuthHeader) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
req.Header.Set("x-looker-appid", "go-sdk")
|
||||
req.Header.Set("Authorization", t.AuthToken)
|
||||
return t.Base.RoundTrip(req)
|
||||
}
|
||||
|
||||
func (s *Source) GetLookerSDK(accessToken string) (*v4.LookerSDK, error) {
|
||||
if s.UseClientAuthorization() {
|
||||
if accessToken == "" {
|
||||
return nil, fmt.Errorf("no access token supplied with request")
|
||||
}
|
||||
|
||||
session := rtl.NewAuthSession(*s.LookerApiSettings())
|
||||
// Configure base transport with TLS
|
||||
transport := &http.Transport{
|
||||
TLSClientConfig: &tls.Config{
|
||||
InsecureSkipVerify: !s.LookerApiSettings().VerifySsl,
|
||||
},
|
||||
}
|
||||
|
||||
// Build transport for end user token
|
||||
session.Client = http.Client{
|
||||
Transport: &transportWithAuthHeader{
|
||||
Base: transport,
|
||||
AuthToken: accessToken,
|
||||
},
|
||||
}
|
||||
// return SDK with new Transport
|
||||
return v4.NewLookerSDK(session), nil
|
||||
}
|
||||
|
||||
if s.LookerClient() == nil {
|
||||
return nil, fmt.Errorf("client id or client secret not valid")
|
||||
}
|
||||
return s.LookerClient(), nil
|
||||
}
|
||||
|
||||
func initGoogleCloudConnection(ctx context.Context) (oauth2.TokenSource, error) {
|
||||
cred, err := google.FindDefaultCredentials(ctx, geminidataanalytics.DefaultAuthScopes()...)
|
||||
if err != nil {
|
||||
|
||||
@@ -16,11 +16,14 @@ package mongodb
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
"go.mongodb.org/mongo-driver/mongo/options"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
@@ -93,6 +96,201 @@ func (s *Source) MongoClient() *mongo.Client {
|
||||
return s.Client
|
||||
}
|
||||
|
||||
func parseData(ctx context.Context, cur *mongo.Cursor) ([]any, error) {
|
||||
var data = []any{}
|
||||
err := cur.All(ctx, &data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var final []any
|
||||
for _, item := range data {
|
||||
tmp, _ := bson.MarshalExtJSON(item, false, false)
|
||||
var tmp2 any
|
||||
err = json.Unmarshal(tmp, &tmp2)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
final = append(final, tmp2)
|
||||
}
|
||||
return final, err
|
||||
}
|
||||
|
||||
func (s *Source) Aggregate(ctx context.Context, pipelineString string, canonical, readOnly bool, database, collection string) ([]any, error) {
|
||||
var pipeline = []bson.M{}
|
||||
err := bson.UnmarshalExtJSON([]byte(pipelineString), canonical, &pipeline)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if readOnly {
|
||||
//fail if we do a merge or an out
|
||||
for _, stage := range pipeline {
|
||||
for key := range stage {
|
||||
if key == "$merge" || key == "$out" {
|
||||
return nil, fmt.Errorf("this is not a read-only pipeline: %+v", stage)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cur, err := s.MongoClient().Database(database).Collection(collection).Aggregate(ctx, pipeline)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer cur.Close(ctx)
|
||||
res, err := parseData(ctx, cur)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if res == nil {
|
||||
return []any{}, nil
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
|
||||
func (s *Source) Find(ctx context.Context, filterString, database, collection string, opts *options.FindOptions) ([]any, error) {
|
||||
var filter = bson.D{}
|
||||
err := bson.UnmarshalExtJSON([]byte(filterString), false, &filter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cur, err := s.MongoClient().Database(database).Collection(collection).Find(ctx, filter, opts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer cur.Close(ctx)
|
||||
return parseData(ctx, cur)
|
||||
}
|
||||
|
||||
func (s *Source) FindOne(ctx context.Context, filterString, database, collection string, opts *options.FindOneOptions) ([]any, error) {
|
||||
var filter = bson.D{}
|
||||
err := bson.UnmarshalExtJSON([]byte(filterString), false, &filter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res := s.MongoClient().Database(database).Collection(collection).FindOne(ctx, filter, opts)
|
||||
if res.Err() != nil {
|
||||
return nil, res.Err()
|
||||
}
|
||||
|
||||
var data any
|
||||
err = res.Decode(&data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var final []any
|
||||
tmp, _ := bson.MarshalExtJSON(data, false, false)
|
||||
var tmp2 any
|
||||
err = json.Unmarshal(tmp, &tmp2)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
final = append(final, tmp2)
|
||||
|
||||
return final, err
|
||||
}
|
||||
|
||||
func (s *Source) InsertMany(ctx context.Context, jsonData string, canonical bool, database, collection string) ([]any, error) {
|
||||
var data = []any{}
|
||||
err := bson.UnmarshalExtJSON([]byte(jsonData), canonical, &data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res, err := s.MongoClient().Database(database).Collection(collection).InsertMany(ctx, data, options.InsertMany())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return res.InsertedIDs, nil
|
||||
}
|
||||
|
||||
func (s *Source) InsertOne(ctx context.Context, jsonData string, canonical bool, database, collection string) (any, error) {
|
||||
var data any
|
||||
err := bson.UnmarshalExtJSON([]byte(jsonData), canonical, &data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res, err := s.MongoClient().Database(database).Collection(collection).InsertOne(ctx, data, options.InsertOne())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return res.InsertedID, nil
|
||||
}
|
||||
|
||||
func (s *Source) UpdateMany(ctx context.Context, filterString string, canonical bool, updateString, database, collection string, upsert bool) ([]any, error) {
|
||||
var filter = bson.D{}
|
||||
err := bson.UnmarshalExtJSON([]byte(filterString), canonical, &filter)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to unmarshal filter string: %w", err)
|
||||
}
|
||||
var update = bson.D{}
|
||||
err = bson.UnmarshalExtJSON([]byte(updateString), false, &update)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to unmarshal update string: %w", err)
|
||||
}
|
||||
|
||||
res, err := s.MongoClient().Database(database).Collection(collection).UpdateMany(ctx, filter, update, options.Update().SetUpsert(upsert))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error updating collection: %w", err)
|
||||
}
|
||||
return []any{res.ModifiedCount, res.UpsertedCount, res.MatchedCount}, nil
|
||||
}
|
||||
|
||||
func (s *Source) UpdateOne(ctx context.Context, filterString string, canonical bool, updateString, database, collection string, upsert bool) (any, error) {
|
||||
var filter = bson.D{}
|
||||
err := bson.UnmarshalExtJSON([]byte(filterString), false, &filter)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to unmarshal filter string: %w", err)
|
||||
}
|
||||
var update = bson.D{}
|
||||
err = bson.UnmarshalExtJSON([]byte(updateString), canonical, &update)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to unmarshal update string: %w", err)
|
||||
}
|
||||
|
||||
res, err := s.MongoClient().Database(database).Collection(collection).UpdateOne(ctx, filter, update, options.Update().SetUpsert(upsert))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error updating collection: %w", err)
|
||||
}
|
||||
return res.ModifiedCount, nil
|
||||
}
|
||||
|
||||
func (s *Source) DeleteMany(ctx context.Context, filterString, database, collection string) (any, error) {
|
||||
var filter = bson.D{}
|
||||
err := bson.UnmarshalExtJSON([]byte(filterString), false, &filter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res, err := s.MongoClient().Database(database).Collection(collection).DeleteMany(ctx, filter, options.Delete())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if res.DeletedCount == 0 {
|
||||
return nil, errors.New("no document found")
|
||||
}
|
||||
return res.DeletedCount, nil
|
||||
}
|
||||
|
||||
func (s *Source) DeleteOne(ctx context.Context, filterString, database, collection string) (any, error) {
|
||||
var filter = bson.D{}
|
||||
err := bson.UnmarshalExtJSON([]byte(filterString), false, &filter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res, err := s.MongoClient().Database(database).Collection(collection).DeleteOne(ctx, filter, options.Delete())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return res.DeletedCount, nil
|
||||
}
|
||||
|
||||
func initMongoDBClient(ctx context.Context, tracer trace.Tracer, name, uri string) (*mongo.Client, error) {
|
||||
// Start a tracing span
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
|
||||
@@ -16,15 +16,21 @@ package serverlessspark
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
dataproc "cloud.google.com/go/dataproc/v2/apiv1"
|
||||
"cloud.google.com/go/dataproc/v2/apiv1/dataprocpb"
|
||||
longrunning "cloud.google.com/go/longrunning/autogen"
|
||||
"cloud.google.com/go/longrunning/autogen/longrunningpb"
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
"google.golang.org/api/iterator"
|
||||
"google.golang.org/api/option"
|
||||
"google.golang.org/protobuf/encoding/protojson"
|
||||
)
|
||||
|
||||
const SourceKind string = "serverless-spark"
|
||||
@@ -121,3 +127,168 @@ func (s *Source) Close() error {
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Source) CancelOperation(ctx context.Context, operation string) (any, error) {
|
||||
req := &longrunningpb.CancelOperationRequest{
|
||||
Name: fmt.Sprintf("projects/%s/locations/%s/operations/%s", s.GetProject(), s.GetLocation(), operation),
|
||||
}
|
||||
client, err := s.GetOperationsClient(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get operations client: %w", err)
|
||||
}
|
||||
err = client.CancelOperation(ctx, req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to cancel operation: %w", err)
|
||||
}
|
||||
return fmt.Sprintf("Cancelled [%s].", operation), nil
|
||||
}
|
||||
|
||||
func (s *Source) CreateBatch(ctx context.Context, batch *dataprocpb.Batch) (map[string]any, error) {
|
||||
req := &dataprocpb.CreateBatchRequest{
|
||||
Parent: fmt.Sprintf("projects/%s/locations/%s", s.GetProject(), s.GetLocation()),
|
||||
Batch: batch,
|
||||
}
|
||||
|
||||
client := s.GetBatchControllerClient()
|
||||
op, err := client.CreateBatch(ctx, req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create batch: %w", err)
|
||||
}
|
||||
meta, err := op.Metadata()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get create batch op metadata: %w", err)
|
||||
}
|
||||
|
||||
projectID, location, batchID, err := ExtractBatchDetails(meta.GetBatch())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error extracting batch details from name %q: %v", meta.GetBatch(), err)
|
||||
}
|
||||
consoleUrl := BatchConsoleURL(projectID, location, batchID)
|
||||
logsUrl := BatchLogsURL(projectID, location, batchID, meta.GetCreateTime().AsTime(), time.Time{})
|
||||
|
||||
wrappedResult := map[string]any{
|
||||
"opMetadata": meta,
|
||||
"consoleUrl": consoleUrl,
|
||||
"logsUrl": logsUrl,
|
||||
}
|
||||
return wrappedResult, nil
|
||||
}
|
||||
|
||||
// ListBatchesResponse is the response from the list batches API.
|
||||
type ListBatchesResponse struct {
|
||||
Batches []Batch `json:"batches"`
|
||||
NextPageToken string `json:"nextPageToken"`
|
||||
}
|
||||
|
||||
// Batch represents a single batch job.
|
||||
type Batch struct {
|
||||
Name string `json:"name"`
|
||||
UUID string `json:"uuid"`
|
||||
State string `json:"state"`
|
||||
Creator string `json:"creator"`
|
||||
CreateTime string `json:"createTime"`
|
||||
Operation string `json:"operation"`
|
||||
ConsoleURL string `json:"consoleUrl"`
|
||||
LogsURL string `json:"logsUrl"`
|
||||
}
|
||||
|
||||
func (s *Source) ListBatches(ctx context.Context, ps *int, pt, filter string) (any, error) {
|
||||
client := s.GetBatchControllerClient()
|
||||
parent := fmt.Sprintf("projects/%s/locations/%s", s.GetProject(), s.GetLocation())
|
||||
req := &dataprocpb.ListBatchesRequest{
|
||||
Parent: parent,
|
||||
OrderBy: "create_time desc",
|
||||
}
|
||||
|
||||
if ps != nil {
|
||||
req.PageSize = int32(*ps)
|
||||
}
|
||||
if pt != "" {
|
||||
req.PageToken = pt
|
||||
}
|
||||
if filter != "" {
|
||||
req.Filter = filter
|
||||
}
|
||||
|
||||
it := client.ListBatches(ctx, req)
|
||||
pager := iterator.NewPager(it, int(req.PageSize), req.PageToken)
|
||||
|
||||
var batchPbs []*dataprocpb.Batch
|
||||
nextPageToken, err := pager.NextPage(&batchPbs)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to list batches: %w", err)
|
||||
}
|
||||
|
||||
batches, err := ToBatches(batchPbs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ListBatchesResponse{Batches: batches, NextPageToken: nextPageToken}, nil
|
||||
}
|
||||
|
||||
// ToBatches converts a slice of protobuf Batch messages to a slice of Batch structs.
|
||||
func ToBatches(batchPbs []*dataprocpb.Batch) ([]Batch, error) {
|
||||
batches := make([]Batch, 0, len(batchPbs))
|
||||
for _, batchPb := range batchPbs {
|
||||
consoleUrl, err := BatchConsoleURLFromProto(batchPb)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error generating console url: %v", err)
|
||||
}
|
||||
logsUrl, err := BatchLogsURLFromProto(batchPb)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error generating logs url: %v", err)
|
||||
}
|
||||
batch := Batch{
|
||||
Name: batchPb.Name,
|
||||
UUID: batchPb.Uuid,
|
||||
State: batchPb.State.Enum().String(),
|
||||
Creator: batchPb.Creator,
|
||||
CreateTime: batchPb.CreateTime.AsTime().Format(time.RFC3339),
|
||||
Operation: batchPb.Operation,
|
||||
ConsoleURL: consoleUrl,
|
||||
LogsURL: logsUrl,
|
||||
}
|
||||
batches = append(batches, batch)
|
||||
}
|
||||
return batches, nil
|
||||
}
|
||||
|
||||
func (s *Source) GetBatch(ctx context.Context, name string) (map[string]any, error) {
|
||||
client := s.GetBatchControllerClient()
|
||||
req := &dataprocpb.GetBatchRequest{
|
||||
Name: fmt.Sprintf("projects/%s/locations/%s/batches/%s", s.GetProject(), s.GetLocation(), name),
|
||||
}
|
||||
|
||||
batchPb, err := client.GetBatch(ctx, req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get batch: %w", err)
|
||||
}
|
||||
|
||||
jsonBytes, err := protojson.Marshal(batchPb)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal batch to JSON: %w", err)
|
||||
}
|
||||
|
||||
var result map[string]any
|
||||
if err := json.Unmarshal(jsonBytes, &result); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal batch JSON: %w", err)
|
||||
}
|
||||
|
||||
consoleUrl, err := BatchConsoleURLFromProto(batchPb)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error generating console url: %v", err)
|
||||
}
|
||||
logsUrl, err := BatchLogsURLFromProto(batchPb)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error generating logs url: %v", err)
|
||||
}
|
||||
|
||||
wrappedResult := map[string]any{
|
||||
"consoleUrl": consoleUrl,
|
||||
"logsUrl": logsUrl,
|
||||
"batch": result,
|
||||
}
|
||||
|
||||
return wrappedResult, nil
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
// Copyright 2025 Google LLC
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -12,7 +12,7 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package common
|
||||
package serverlessspark
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
@@ -23,13 +23,13 @@ import (
|
||||
"cloud.google.com/go/dataproc/v2/apiv1/dataprocpb"
|
||||
)
|
||||
|
||||
var batchFullNameRegex = regexp.MustCompile(`projects/(?P<project>[^/]+)/locations/(?P<location>[^/]+)/batches/(?P<batch_id>[^/]+)`)
|
||||
|
||||
const (
|
||||
logTimeBufferBefore = 1 * time.Minute
|
||||
logTimeBufferAfter = 10 * time.Minute
|
||||
)
|
||||
|
||||
var batchFullNameRegex = regexp.MustCompile(`projects/(?P<project>[^/]+)/locations/(?P<location>[^/]+)/batches/(?P<batch_id>[^/]+)`)
|
||||
|
||||
// Extract BatchDetails extracts the project ID, location, and batch ID from a fully qualified batch name.
|
||||
func ExtractBatchDetails(batchName string) (projectID, location, batchID string, err error) {
|
||||
matches := batchFullNameRegex.FindStringSubmatch(batchName)
|
||||
@@ -39,26 +39,6 @@ func ExtractBatchDetails(batchName string) (projectID, location, batchID string,
|
||||
return matches[1], matches[2], matches[3], nil
|
||||
}
|
||||
|
||||
// BatchConsoleURLFromProto builds a URL to the Google Cloud Console linking to the batch summary page.
|
||||
func BatchConsoleURLFromProto(batchPb *dataprocpb.Batch) (string, error) {
|
||||
projectID, location, batchID, err := ExtractBatchDetails(batchPb.GetName())
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return BatchConsoleURL(projectID, location, batchID), nil
|
||||
}
|
||||
|
||||
// BatchLogsURLFromProto builds a URL to the Google Cloud Console showing Cloud Logging for the given batch and time range.
|
||||
func BatchLogsURLFromProto(batchPb *dataprocpb.Batch) (string, error) {
|
||||
projectID, location, batchID, err := ExtractBatchDetails(batchPb.GetName())
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
createTime := batchPb.GetCreateTime().AsTime()
|
||||
stateTime := batchPb.GetStateTime().AsTime()
|
||||
return BatchLogsURL(projectID, location, batchID, createTime, stateTime), nil
|
||||
}
|
||||
|
||||
// BatchConsoleURL builds a URL to the Google Cloud Console linking to the batch summary page.
|
||||
func BatchConsoleURL(projectID, location, batchID string) string {
|
||||
return fmt.Sprintf("https://console.cloud.google.com/dataproc/batches/%s/%s/summary?project=%s", location, batchID, projectID)
|
||||
@@ -89,3 +69,23 @@ resource.labels.batch_id="%s"`
|
||||
|
||||
return "https://console.cloud.google.com/logs/viewer?" + v.Encode()
|
||||
}
|
||||
|
||||
// BatchConsoleURLFromProto builds a URL to the Google Cloud Console linking to the batch summary page.
|
||||
func BatchConsoleURLFromProto(batchPb *dataprocpb.Batch) (string, error) {
|
||||
projectID, location, batchID, err := ExtractBatchDetails(batchPb.GetName())
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return BatchConsoleURL(projectID, location, batchID), nil
|
||||
}
|
||||
|
||||
// BatchLogsURLFromProto builds a URL to the Google Cloud Console showing Cloud Logging for the given batch and time range.
|
||||
func BatchLogsURLFromProto(batchPb *dataprocpb.Batch) (string, error) {
|
||||
projectID, location, batchID, err := ExtractBatchDetails(batchPb.GetName())
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
createTime := batchPb.GetCreateTime().AsTime()
|
||||
stateTime := batchPb.GetStateTime().AsTime()
|
||||
return BatchLogsURL(projectID, location, batchID, createTime, stateTime), nil
|
||||
}
|
||||
@@ -1,10 +1,10 @@
|
||||
// Copyright 2025 Google LLC
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -12,19 +12,20 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package common
|
||||
package serverlessspark_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"cloud.google.com/go/dataproc/v2/apiv1/dataprocpb"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources/serverlessspark"
|
||||
"google.golang.org/protobuf/types/known/timestamppb"
|
||||
)
|
||||
|
||||
func TestExtractBatchDetails_Success(t *testing.T) {
|
||||
batchName := "projects/my-project/locations/us-central1/batches/my-batch"
|
||||
projectID, location, batchID, err := ExtractBatchDetails(batchName)
|
||||
projectID, location, batchID, err := serverlessspark.ExtractBatchDetails(batchName)
|
||||
if err != nil {
|
||||
t.Errorf("ExtractBatchDetails() error = %v, want no error", err)
|
||||
return
|
||||
@@ -45,7 +46,7 @@ func TestExtractBatchDetails_Success(t *testing.T) {
|
||||
|
||||
func TestExtractBatchDetails_Failure(t *testing.T) {
|
||||
batchName := "invalid-name"
|
||||
_, _, _, err := ExtractBatchDetails(batchName)
|
||||
_, _, _, err := serverlessspark.ExtractBatchDetails(batchName)
|
||||
wantErr := "failed to parse batch name: invalid-name"
|
||||
if err == nil || err.Error() != wantErr {
|
||||
t.Errorf("ExtractBatchDetails() error = %v, want %v", err, wantErr)
|
||||
@@ -53,7 +54,7 @@ func TestExtractBatchDetails_Failure(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestBatchConsoleURL(t *testing.T) {
|
||||
got := BatchConsoleURL("my-project", "us-central1", "my-batch")
|
||||
got := serverlessspark.BatchConsoleURL("my-project", "us-central1", "my-batch")
|
||||
want := "https://console.cloud.google.com/dataproc/batches/us-central1/my-batch/summary?project=my-project"
|
||||
if got != want {
|
||||
t.Errorf("BatchConsoleURL() = %v, want %v", got, want)
|
||||
@@ -63,7 +64,7 @@ func TestBatchConsoleURL(t *testing.T) {
|
||||
func TestBatchLogsURL(t *testing.T) {
|
||||
startTime := time.Date(2025, 10, 1, 5, 0, 0, 0, time.UTC)
|
||||
endTime := time.Date(2025, 10, 1, 6, 0, 0, 0, time.UTC)
|
||||
got := BatchLogsURL("my-project", "us-central1", "my-batch", startTime, endTime)
|
||||
got := serverlessspark.BatchLogsURL("my-project", "us-central1", "my-batch", startTime, endTime)
|
||||
want := "https://console.cloud.google.com/logs/viewer?advancedFilter=" +
|
||||
"resource.type%3D%22cloud_dataproc_batch%22" +
|
||||
"%0Aresource.labels.project_id%3D%22my-project%22" +
|
||||
@@ -82,7 +83,7 @@ func TestBatchConsoleURLFromProto(t *testing.T) {
|
||||
batchPb := &dataprocpb.Batch{
|
||||
Name: "projects/my-project/locations/us-central1/batches/my-batch",
|
||||
}
|
||||
got, err := BatchConsoleURLFromProto(batchPb)
|
||||
got, err := serverlessspark.BatchConsoleURLFromProto(batchPb)
|
||||
if err != nil {
|
||||
t.Fatalf("BatchConsoleURLFromProto() error = %v", err)
|
||||
}
|
||||
@@ -100,7 +101,7 @@ func TestBatchLogsURLFromProto(t *testing.T) {
|
||||
CreateTime: timestamppb.New(createTime),
|
||||
StateTime: timestamppb.New(stateTime),
|
||||
}
|
||||
got, err := BatchLogsURLFromProto(batchPb)
|
||||
got, err := serverlessspark.BatchLogsURLFromProto(batchPb)
|
||||
if err != nil {
|
||||
t.Fatalf("BatchLogsURLFromProto() error = %v", err)
|
||||
}
|
||||
159
internal/sources/snowflake/snowflake.go
Normal file
159
internal/sources/snowflake/snowflake.go
Normal file
@@ -0,0 +1,159 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package snowflake
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/jmoiron/sqlx"
|
||||
_ "github.com/snowflakedb/gosnowflake"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
const SourceKind string = "snowflake"
|
||||
|
||||
// validate interface
|
||||
var _ sources.SourceConfig = Config{}
|
||||
|
||||
func init() {
|
||||
if !sources.Register(SourceKind, newConfig) {
|
||||
panic(fmt.Sprintf("source kind %q already registered", SourceKind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources.SourceConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Account string `yaml:"account" validate:"required"`
|
||||
User string `yaml:"user" validate:"required"`
|
||||
Password string `yaml:"password" validate:"required"`
|
||||
Database string `yaml:"database" validate:"required"`
|
||||
Schema string `yaml:"schema" validate:"required"`
|
||||
Warehouse string `yaml:"warehouse"`
|
||||
Role string `yaml:"role"`
|
||||
}
|
||||
|
||||
func (r Config) SourceConfigKind() string {
|
||||
return SourceKind
|
||||
}
|
||||
|
||||
func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
|
||||
db, err := initSnowflakeConnection(ctx, tracer, r.Name, r.Account, r.User, r.Password, r.Database, r.Schema, r.Warehouse, r.Role)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to create connection: %w", err)
|
||||
}
|
||||
|
||||
err = db.PingContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to connect successfully: %w", err)
|
||||
}
|
||||
|
||||
s := &Source{
|
||||
Config: r,
|
||||
DB: db,
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
var _ sources.Source = &Source{}
|
||||
|
||||
type Source struct {
|
||||
Config
|
||||
DB *sqlx.DB
|
||||
}
|
||||
|
||||
func (s *Source) SourceKind() string {
|
||||
return SourceKind
|
||||
}
|
||||
|
||||
func (s *Source) ToConfig() sources.SourceConfig {
|
||||
return s.Config
|
||||
}
|
||||
|
||||
func (s *Source) SnowflakeDB() *sqlx.DB {
|
||||
return s.DB
|
||||
}
|
||||
|
||||
func (s *Source) RunSQL(ctx context.Context, statement string, params []any) (any, error) {
|
||||
rows, err := s.DB.QueryxContext(ctx, statement, params...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to execute query: %w", err)
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var out []any
|
||||
for rows.Next() {
|
||||
cols, err := rows.Columns()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get columns: %w", err)
|
||||
}
|
||||
|
||||
values := make([]interface{}, len(cols))
|
||||
valuePtrs := make([]interface{}, len(cols))
|
||||
for i := range values {
|
||||
valuePtrs[i] = &values[i]
|
||||
}
|
||||
|
||||
if err := rows.Scan(valuePtrs...); err != nil {
|
||||
return nil, fmt.Errorf("unable to scan row: %w", err)
|
||||
}
|
||||
|
||||
vMap := make(map[string]any)
|
||||
for i, col := range cols {
|
||||
vMap[col] = values[i]
|
||||
}
|
||||
out = append(out, vMap)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, fmt.Errorf("row iteration error: %w", err)
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func initSnowflakeConnection(ctx context.Context, tracer trace.Tracer, name, account, user, password, database, schema, warehouse, role string) (*sqlx.DB, error) {
|
||||
//nolint:all // Reassigned ctx
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
defer span.End()
|
||||
|
||||
// Set defaults for optional parameters
|
||||
if warehouse == "" {
|
||||
warehouse = "COMPUTE_WH"
|
||||
}
|
||||
if role == "" {
|
||||
role = "ACCOUNTADMIN"
|
||||
}
|
||||
|
||||
// Snowflake DSN format: user:password@account/database/schema?warehouse=warehouse&role=role
|
||||
dsn := fmt.Sprintf("%s:%s@%s/%s/%s?warehouse=%s&role=%s", user, password, account, database, schema, warehouse, role)
|
||||
db, err := sqlx.ConnectContext(ctx, "snowflake", dsn)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to create connection: %w", err)
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
129
internal/sources/snowflake/snowflake_test.go
Normal file
129
internal/sources/snowflake/snowflake_test.go
Normal file
@@ -0,0 +1,129 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package snowflake_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources/snowflake"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
)
|
||||
|
||||
func TestParseFromYamlSnowflake(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.SourceConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
sources:
|
||||
my-snowflake-instance:
|
||||
kind: snowflake
|
||||
account: my-account
|
||||
user: my_user
|
||||
password: my_pass
|
||||
database: my_db
|
||||
schema: my_schema
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-snowflake-instance": snowflake.Config{
|
||||
Name: "my-snowflake-instance",
|
||||
Kind: snowflake.SourceKind,
|
||||
Account: "my-account",
|
||||
User: "my_user",
|
||||
Password: "my_pass",
|
||||
Database: "my_db",
|
||||
Schema: "my_schema",
|
||||
Warehouse: "",
|
||||
Role: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Sources server.SourceConfigs `yaml:"sources"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if !cmp.Equal(tc.want, got.Sources) {
|
||||
t.Fatalf("incorrect parse: want %v, got %v", tc.want, got.Sources)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestFailParseFromYaml(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
err string
|
||||
}{
|
||||
{
|
||||
desc: "extra field",
|
||||
in: `
|
||||
sources:
|
||||
my-snowflake-instance:
|
||||
kind: snowflake
|
||||
account: my-account
|
||||
user: my_user
|
||||
password: my_pass
|
||||
database: my_db
|
||||
schema: my_schema
|
||||
foo: bar
|
||||
`,
|
||||
err: "unable to parse source \"my-snowflake-instance\" as \"snowflake\": [3:1] unknown field \"foo\"\n 1 | account: my-account\n 2 | database: my_db\n> 3 | foo: bar\n ^\n 4 | kind: snowflake\n 5 | password: my_pass\n 6 | schema: my_schema\n 7 | ",
|
||||
},
|
||||
{
|
||||
desc: "missing required field",
|
||||
in: `
|
||||
sources:
|
||||
my-snowflake-instance:
|
||||
kind: snowflake
|
||||
account: my-account
|
||||
user: my_user
|
||||
password: my_pass
|
||||
database: my_db
|
||||
`,
|
||||
err: "unable to parse source \"my-snowflake-instance\" as \"snowflake\": Key: 'Config.Schema' Error:Field validation for 'Schema' failed on the 'required' tag",
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Sources server.SourceConfigs `yaml:"sources"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
|
||||
if err == nil {
|
||||
t.Fatalf("expect parsing to fail")
|
||||
}
|
||||
errStr := err.Error()
|
||||
if errStr != tc.err {
|
||||
t.Fatalf("unexpected error: got %q, want %q", errStr, tc.err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -16,14 +16,17 @@ package trino
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"time"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
_ "github.com/trinodb/trino-go-client/trino"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
trinogo "github.com/trinodb/trino-go-client/trino"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
@@ -47,18 +50,21 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Host string `yaml:"host" validate:"required"`
|
||||
Port string `yaml:"port" validate:"required"`
|
||||
User string `yaml:"user"`
|
||||
Password string `yaml:"password"`
|
||||
Catalog string `yaml:"catalog" validate:"required"`
|
||||
Schema string `yaml:"schema" validate:"required"`
|
||||
QueryTimeout string `yaml:"queryTimeout"`
|
||||
AccessToken string `yaml:"accessToken"`
|
||||
KerberosEnabled bool `yaml:"kerberosEnabled"`
|
||||
SSLEnabled bool `yaml:"sslEnabled"`
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Host string `yaml:"host" validate:"required"`
|
||||
Port string `yaml:"port" validate:"required"`
|
||||
User string `yaml:"user"`
|
||||
Password string `yaml:"password"`
|
||||
Catalog string `yaml:"catalog" validate:"required"`
|
||||
Schema string `yaml:"schema" validate:"required"`
|
||||
QueryTimeout string `yaml:"queryTimeout"`
|
||||
AccessToken string `yaml:"accessToken"`
|
||||
KerberosEnabled bool `yaml:"kerberosEnabled"`
|
||||
SSLEnabled bool `yaml:"sslEnabled"`
|
||||
SSLCertPath string `yaml:"sslCertPath"`
|
||||
SSLCert string `yaml:"sslCert"`
|
||||
DisableSslVerification bool `yaml:"disableSslVerification"`
|
||||
}
|
||||
|
||||
func (r Config) SourceConfigKind() string {
|
||||
@@ -66,7 +72,7 @@ func (r Config) SourceConfigKind() string {
|
||||
}
|
||||
|
||||
func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
|
||||
pool, err := initTrinoConnectionPool(ctx, tracer, r.Name, r.Host, r.Port, r.User, r.Password, r.Catalog, r.Schema, r.QueryTimeout, r.AccessToken, r.KerberosEnabled, r.SSLEnabled)
|
||||
pool, err := initTrinoConnectionPool(ctx, tracer, r.Name, r.Host, r.Port, r.User, r.Password, r.Catalog, r.Schema, r.QueryTimeout, r.AccessToken, r.KerberosEnabled, r.SSLEnabled, r.SSLCertPath, r.SSLCert, r.DisableSslVerification)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to create pool: %w", err)
|
||||
}
|
||||
@@ -152,17 +158,35 @@ func (s *Source) RunSQL(ctx context.Context, statement string, params []any) (an
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func initTrinoConnectionPool(ctx context.Context, tracer trace.Tracer, name, host, port, user, password, catalog, schema, queryTimeout, accessToken string, kerberosEnabled, sslEnabled bool) (*sql.DB, error) {
|
||||
func initTrinoConnectionPool(ctx context.Context, tracer trace.Tracer, name, host, port, user, password, catalog, schema, queryTimeout, accessToken string, kerberosEnabled, sslEnabled bool, sslCertPath, sslCert string, disableSslVerification bool) (*sql.DB, error) {
|
||||
//nolint:all // Reassigned ctx
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
defer span.End()
|
||||
|
||||
// Build Trino DSN
|
||||
dsn, err := buildTrinoDSN(host, port, user, password, catalog, schema, queryTimeout, accessToken, kerberosEnabled, sslEnabled)
|
||||
dsn, err := buildTrinoDSN(host, port, user, password, catalog, schema, queryTimeout, accessToken, kerberosEnabled, sslEnabled, sslCertPath, sslCert)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to build DSN: %w", err)
|
||||
}
|
||||
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
|
||||
if disableSslVerification {
|
||||
logger.WarnContext(ctx, "SSL verification is disabled for trino source %s. This is an insecure setting and should not be used in production.\n", name)
|
||||
tr := &http.Transport{
|
||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
|
||||
}
|
||||
client := &http.Client{Transport: tr}
|
||||
clientName := fmt.Sprintf("insecure_trino_client_%s", name)
|
||||
if err := trinogo.RegisterCustomClient(clientName, client); err != nil {
|
||||
return nil, fmt.Errorf("failed to register custom client: %w", err)
|
||||
}
|
||||
dsn = fmt.Sprintf("%s&custom_client=%s", dsn, clientName)
|
||||
}
|
||||
|
||||
db, err := sql.Open("trino", dsn)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open connection: %w", err)
|
||||
@@ -176,7 +200,7 @@ func initTrinoConnectionPool(ctx context.Context, tracer trace.Tracer, name, hos
|
||||
return db, nil
|
||||
}
|
||||
|
||||
func buildTrinoDSN(host, port, user, password, catalog, schema, queryTimeout, accessToken string, kerberosEnabled, sslEnabled bool) (string, error) {
|
||||
func buildTrinoDSN(host, port, user, password, catalog, schema, queryTimeout, accessToken string, kerberosEnabled, sslEnabled bool, sslCertPath, sslCert string) (string, error) {
|
||||
// Build query parameters
|
||||
query := url.Values{}
|
||||
query.Set("catalog", catalog)
|
||||
@@ -190,6 +214,12 @@ func buildTrinoDSN(host, port, user, password, catalog, schema, queryTimeout, ac
|
||||
if kerberosEnabled {
|
||||
query.Set("KerberosEnabled", "true")
|
||||
}
|
||||
if sslCertPath != "" {
|
||||
query.Set("sslCertPath", sslCertPath)
|
||||
}
|
||||
if sslCert != "" {
|
||||
query.Set("sslCert", sslCert)
|
||||
}
|
||||
|
||||
// Build URL
|
||||
scheme := "http"
|
||||
|
||||
@@ -36,6 +36,8 @@ func TestBuildTrinoDSN(t *testing.T) {
|
||||
accessToken string
|
||||
kerberosEnabled bool
|
||||
sslEnabled bool
|
||||
sslCertPath string
|
||||
sslCert string
|
||||
want string
|
||||
wantErr bool
|
||||
}{
|
||||
@@ -49,6 +51,19 @@ func TestBuildTrinoDSN(t *testing.T) {
|
||||
want: "http://testuser@localhost:8080?catalog=hive&schema=default",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with SSL cert path and cert",
|
||||
host: "localhost",
|
||||
port: "8443",
|
||||
user: "testuser",
|
||||
catalog: "hive",
|
||||
schema: "default",
|
||||
sslEnabled: true,
|
||||
sslCertPath: "/path/to/cert.pem",
|
||||
sslCert: "-----BEGIN CERTIFICATE-----\n...\n-----END CERTIFICATE-----\n",
|
||||
want: "https://testuser@localhost:8443?catalog=hive&schema=default&sslCert=-----BEGIN+CERTIFICATE-----%0A...%0A-----END+CERTIFICATE-----%0A&sslCertPath=%2Fpath%2Fto%2Fcert.pem",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with password",
|
||||
host: "localhost",
|
||||
@@ -117,7 +132,7 @@ func TestBuildTrinoDSN(t *testing.T) {
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, err := buildTrinoDSN(tt.host, tt.port, tt.user, tt.password, tt.catalog, tt.schema, tt.queryTimeout, tt.accessToken, tt.kerberosEnabled, tt.sslEnabled)
|
||||
got, err := buildTrinoDSN(tt.host, tt.port, tt.user, tt.password, tt.catalog, tt.schema, tt.queryTimeout, tt.accessToken, tt.kerberosEnabled, tt.sslEnabled, tt.sslCertPath, tt.sslCert)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("buildTrinoDSN() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
@@ -215,6 +230,41 @@ func TestParseFromYamlTrino(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "example with SSL cert path and cert",
|
||||
in: `
|
||||
sources:
|
||||
my-trino-ssl-cert:
|
||||
kind: trino
|
||||
host: localhost
|
||||
port: "8443"
|
||||
user: testuser
|
||||
catalog: hive
|
||||
schema: default
|
||||
sslEnabled: true
|
||||
sslCertPath: /path/to/cert.pem
|
||||
sslCert: |-
|
||||
-----BEGIN CERTIFICATE-----
|
||||
...
|
||||
-----END CERTIFICATE-----
|
||||
disableSslVerification: true
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-trino-ssl-cert": Config{
|
||||
Name: "my-trino-ssl-cert",
|
||||
Kind: SourceKind,
|
||||
Host: "localhost",
|
||||
Port: "8443",
|
||||
User: "testuser",
|
||||
Catalog: "hive",
|
||||
Schema: "default",
|
||||
SSLEnabled: true,
|
||||
SSLCertPath: "/path/to/cert.pem",
|
||||
SSLCert: "-----BEGIN CERTIFICATE-----\n...\n-----END CERTIFICATE-----",
|
||||
DisableSslVerification: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
|
||||
@@ -77,26 +77,21 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
placeholderParts = append(placeholderParts, fmt.Sprintf("$%d", i+3)) // $1, $2 reserved
|
||||
}
|
||||
|
||||
var paramNamesSQL string
|
||||
var paramValuesSQL string
|
||||
|
||||
var stmt string
|
||||
if numParams > 0 {
|
||||
paramNamesSQL = fmt.Sprintf("ARRAY[%s]", strings.Join(quotedNameParts, ", "))
|
||||
paramValuesSQL = fmt.Sprintf("ARRAY[%s]", strings.Join(placeholderParts, ", "))
|
||||
paramNamesSQL := fmt.Sprintf("ARRAY[%s]", strings.Join(quotedNameParts, ", "))
|
||||
paramValuesSQL := fmt.Sprintf("ARRAY[%s]", strings.Join(placeholderParts, ", "))
|
||||
// execute_nl_query is the AlloyDB AI function that executes the natural language query
|
||||
// The first parameter is the natural language query, which is passed as $1
|
||||
// The second parameter is the NLConfig, which is passed as a $2
|
||||
// The following params are the list of PSV values passed to the NLConfig
|
||||
// Example SQL statement being executed:
|
||||
// SELECT alloydb_ai_nl.execute_nl_query(nl_question => 'How many tickets do I have?', nl_config_id => 'cymbal_air_nl_config', param_names => ARRAY ['user_email'], param_values => ARRAY ['hailongli@google.com']);
|
||||
stmtFormat := "SELECT alloydb_ai_nl.execute_nl_query(nl_question => $1, nl_config_id => $2, param_names => %s, param_values => %s);"
|
||||
stmt = fmt.Sprintf(stmtFormat, paramNamesSQL, paramValuesSQL)
|
||||
} else {
|
||||
paramNamesSQL = "ARRAY[]::TEXT[]"
|
||||
paramValuesSQL = "ARRAY[]::TEXT[]"
|
||||
stmt = "SELECT alloydb_ai_nl.execute_nl_query(nl_question => $1, nl_config_id => $2);"
|
||||
}
|
||||
|
||||
// execute_nl_query is the AlloyDB AI function that executes the natural language query
|
||||
// The first parameter is the natural language query, which is passed as $1
|
||||
// The second parameter is the NLConfig, which is passed as a $2
|
||||
// The following params are the list of PSV values passed to the NLConfig
|
||||
// Example SQL statement being executed:
|
||||
// SELECT alloydb_ai_nl.execute_nl_query(nl_question => 'How many tickets do I have?', nl_config_id => 'cymbal_air_nl_config', param_names => ARRAY ['user_email'], param_values => ARRAY ['hailongli@google.com']);
|
||||
stmtFormat := "SELECT alloydb_ai_nl.execute_nl_query(nl_question => $1, nl_config_id => $2, param_names => %s, param_values => %s);"
|
||||
stmt := fmt.Sprintf(stmtFormat, paramNamesSQL, paramValuesSQL)
|
||||
|
||||
newQuestionParam := parameters.NewStringParameter(
|
||||
"question", // name
|
||||
"The natural language question to ask.", // description
|
||||
|
||||
@@ -28,6 +28,21 @@ import (
|
||||
|
||||
const kind string = "cloud-gemini-data-analytics-query"
|
||||
|
||||
// Guidance is the tool guidance string.
|
||||
const Guidance = `Tool guidance:
|
||||
Inputs:
|
||||
1. query: A natural language formulation of a database query.
|
||||
Outputs: (all optional)
|
||||
1. disambiguation_question: Clarification questions or comments where the tool needs the users' input.
|
||||
2. generated_query: The generated query for the user query.
|
||||
3. intent_explanation: An explanation for why the tool produced ` + "`generated_query`" + `.
|
||||
4. query_result: The result of executing ` + "`generated_query`" + `.
|
||||
5. natural_language_answer: The natural language answer that summarizes the ` + "`query`" + ` and ` + "`query_result`" + `.
|
||||
|
||||
Usage guidance:
|
||||
1. If ` + "`disambiguation_question`" + ` is produced, then solicit the needed inputs from the user and try the tool with a new ` + "`query`" + ` that has the needed clarification.
|
||||
2. If ` + "`natural_language_answer`" + ` is produced, use ` + "`intent_explanation`" + ` and ` + "`generated_query`" + ` to see if you need to clarify any assumptions for the user.`
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("tool kind %q already registered", kind))
|
||||
@@ -68,11 +83,18 @@ func (cfg Config) ToolConfigKind() string {
|
||||
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
// Define the parameters for the Gemini Data Analytics Query API
|
||||
// The prompt is the only input parameter.
|
||||
// The query is the only input parameter.
|
||||
allParameters := parameters.Parameters{
|
||||
parameters.NewStringParameterWithRequired("prompt", "The natural language question to ask.", true),
|
||||
parameters.NewStringParameterWithRequired("query", "A natural language formulation of a database query.", true),
|
||||
}
|
||||
// The input and outputs are for tool guidance, usage guidance is for multi-turn interaction.
|
||||
guidance := Guidance
|
||||
|
||||
if cfg.Description != "" {
|
||||
cfg.Description += "\n\n" + guidance
|
||||
} else {
|
||||
cfg.Description = guidance
|
||||
}
|
||||
mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, allParameters, nil)
|
||||
|
||||
return Tool{
|
||||
@@ -105,9 +127,9 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
}
|
||||
|
||||
paramsMap := params.AsMap()
|
||||
prompt, ok := paramsMap["prompt"].(string)
|
||||
query, ok := paramsMap["query"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("prompt parameter not found or not a string")
|
||||
return nil, fmt.Errorf("query parameter not found or not a string")
|
||||
}
|
||||
|
||||
// Parse the access token if provided
|
||||
@@ -125,7 +147,7 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
|
||||
payload := &QueryDataRequest{
|
||||
Parent: payloadParent,
|
||||
Prompt: prompt,
|
||||
Prompt: query,
|
||||
Context: t.Context,
|
||||
GenerationOptions: t.GenerationOptions,
|
||||
}
|
||||
|
||||
@@ -328,9 +328,9 @@ func TestInvoke(t *testing.T) {
|
||||
t.Fatalf("failed to initialize tool: %v", err)
|
||||
}
|
||||
|
||||
// Prepare parameters for invocation - ONLY prompt
|
||||
// Prepare parameters for invocation - ONLY query
|
||||
params := parameters.ParamValues{
|
||||
{Name: "prompt", Value: "How many accounts who have region in Prague are eligible for loans?"},
|
||||
{Name: "query", Value: "How many accounts who have region in Prague are eligible for loans?"},
|
||||
}
|
||||
|
||||
resourceMgr := resources.NewResourceManager(srcs, nil, nil, nil, nil, nil, nil)
|
||||
|
||||
@@ -16,22 +16,13 @@ package fhirfetchpage
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
healthcareds "github.com/googleapis/genai-toolbox/internal/sources/cloudhealthcare"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
"google.golang.org/api/healthcare/v1"
|
||||
|
||||
"net/http"
|
||||
|
||||
"golang.org/x/oauth2"
|
||||
"golang.org/x/oauth2/google"
|
||||
)
|
||||
|
||||
const kind string = "cloud-healthcare-fhir-fetch-page"
|
||||
@@ -54,13 +45,8 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.T
|
||||
}
|
||||
|
||||
type compatibleSource interface {
|
||||
Project() string
|
||||
Region() string
|
||||
DatasetID() string
|
||||
AllowedFHIRStores() map[string]struct{}
|
||||
Service() *healthcare.Service
|
||||
ServiceCreator() healthcareds.HealthcareServiceCreator
|
||||
UseClientAuthorization() bool
|
||||
FHIRFetchPage(context.Context, string, string) (any, error)
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
@@ -117,49 +103,14 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", pageURLKey)
|
||||
}
|
||||
|
||||
var httpClient *http.Client
|
||||
var tokenStr string
|
||||
if source.UseClientAuthorization() {
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
tokenStr, err = accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
ts := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: tokenStr})
|
||||
httpClient = oauth2.NewClient(ctx, ts)
|
||||
} else {
|
||||
// The source.Service() object holds a client with the default credentials.
|
||||
// However, the client is not exported, so we have to create a new one.
|
||||
var err error
|
||||
httpClient, err = google.DefaultClient(ctx, healthcare.CloudHealthcareScope)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create default http client: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create http request: %w", err)
|
||||
}
|
||||
req.Header.Set("Accept", "application/fhir+json;charset=utf-8")
|
||||
|
||||
resp, err := httpClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get fhir page from %q: %w", url, err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
respBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not read response: %w", err)
|
||||
}
|
||||
if resp.StatusCode > 299 {
|
||||
return nil, fmt.Errorf("read: status %d %s: %s", resp.StatusCode, resp.Status, respBytes)
|
||||
}
|
||||
var jsonMap map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(string(respBytes)), &jsonMap); err != nil {
|
||||
return nil, fmt.Errorf("could not unmarshal response as json: %w", err)
|
||||
}
|
||||
return jsonMap, nil
|
||||
return source.FHIRFetchPage(ctx, url, tokenStr)
|
||||
}
|
||||
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (parameters.ParamValues, error) {
|
||||
|
||||
@@ -16,20 +16,16 @@ package fhirpatienteverything
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
healthcareds "github.com/googleapis/genai-toolbox/internal/sources/cloudhealthcare"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/common"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
"google.golang.org/api/googleapi"
|
||||
"google.golang.org/api/healthcare/v1"
|
||||
)
|
||||
|
||||
const kind string = "cloud-healthcare-fhir-patient-everything"
|
||||
@@ -54,13 +50,9 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.T
|
||||
}
|
||||
|
||||
type compatibleSource interface {
|
||||
Project() string
|
||||
Region() string
|
||||
DatasetID() string
|
||||
AllowedFHIRStores() map[string]struct{}
|
||||
Service() *healthcare.Service
|
||||
ServiceCreator() healthcareds.HealthcareServiceCreator
|
||||
UseClientAuthorization() bool
|
||||
FHIRPatientEverything(string, string, string, []googleapi.CallOption) (any, error)
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
@@ -139,20 +131,14 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", patientIDKey)
|
||||
}
|
||||
|
||||
svc := source.Service()
|
||||
// Initialize new service if using user OAuth token
|
||||
var tokenStr string
|
||||
if source.UseClientAuthorization() {
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
tokenStr, err = accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
svc, err = source.ServiceCreator()(tokenStr)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating service from OAuth access token: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
name := fmt.Sprintf("projects/%s/locations/%s/datasets/%s/fhirStores/%s/fhir/Patient/%s", source.Project(), source.Region(), source.DatasetID(), storeID, patientID)
|
||||
var opts []googleapi.CallOption
|
||||
if val, ok := params.AsMap()[typeFilterKey]; ok {
|
||||
types, ok := val.([]any)
|
||||
@@ -176,25 +162,7 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
opts = append(opts, googleapi.QueryParameter("_since", sinceStr))
|
||||
}
|
||||
}
|
||||
|
||||
resp, err := svc.Projects.Locations.Datasets.FhirStores.Fhir.PatientEverything(name).Do(opts...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to call patient everything for %q: %w", name, err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
respBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not read response: %w", err)
|
||||
}
|
||||
if resp.StatusCode > 299 {
|
||||
return nil, fmt.Errorf("patient-everything: status %d %s: %s", resp.StatusCode, resp.Status, respBytes)
|
||||
}
|
||||
var jsonMap map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(string(respBytes)), &jsonMap); err != nil {
|
||||
return nil, fmt.Errorf("could not unmarshal response as json: %w", err)
|
||||
}
|
||||
return jsonMap, nil
|
||||
return source.FHIRPatientEverything(storeID, patientID, tokenStr, opts)
|
||||
}
|
||||
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (parameters.ParamValues, error) {
|
||||
|
||||
@@ -16,20 +16,16 @@ package fhirpatientsearch
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
healthcareds "github.com/googleapis/genai-toolbox/internal/sources/cloudhealthcare"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/common"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
"google.golang.org/api/googleapi"
|
||||
"google.golang.org/api/healthcare/v1"
|
||||
)
|
||||
|
||||
const kind string = "cloud-healthcare-fhir-patient-search"
|
||||
@@ -70,13 +66,9 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.T
|
||||
}
|
||||
|
||||
type compatibleSource interface {
|
||||
Project() string
|
||||
Region() string
|
||||
DatasetID() string
|
||||
AllowedFHIRStores() map[string]struct{}
|
||||
Service() *healthcare.Service
|
||||
ServiceCreator() healthcareds.HealthcareServiceCreator
|
||||
UseClientAuthorization() bool
|
||||
FHIRPatientSearch(string, string, []googleapi.CallOption) (any, error)
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
@@ -169,17 +161,12 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
return nil, err
|
||||
}
|
||||
|
||||
svc := source.Service()
|
||||
// Initialize new service if using user OAuth token
|
||||
var tokenStr string
|
||||
if source.UseClientAuthorization() {
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
tokenStr, err = accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
svc, err = source.ServiceCreator()(tokenStr)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating service from OAuth access token: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
var summary bool
|
||||
@@ -248,26 +235,7 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
if summary {
|
||||
opts = append(opts, googleapi.QueryParameter("_summary", "text"))
|
||||
}
|
||||
|
||||
name := fmt.Sprintf("projects/%s/locations/%s/datasets/%s/fhirStores/%s", source.Project(), source.Region(), source.DatasetID(), storeID)
|
||||
resp, err := svc.Projects.Locations.Datasets.FhirStores.Fhir.SearchType(name, "Patient", &healthcare.SearchResourcesRequest{ResourceType: "Patient"}).Do(opts...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to search patient resources: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
respBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not read response: %w", err)
|
||||
}
|
||||
if resp.StatusCode > 299 {
|
||||
return nil, fmt.Errorf("search: status %d %s: %s", resp.StatusCode, resp.Status, respBytes)
|
||||
}
|
||||
var jsonMap map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(string(respBytes)), &jsonMap); err != nil {
|
||||
return nil, fmt.Errorf("could not unmarshal response as json: %w", err)
|
||||
}
|
||||
return jsonMap, nil
|
||||
return source.FHIRPatientSearch(storeID, tokenStr, opts)
|
||||
}
|
||||
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (parameters.ParamValues, error) {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user