mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-01-11 08:28:11 -05:00
Compare commits
55 Commits
integratio
...
binary-npx
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
57a2ade4c5 | ||
|
|
bcb3566ba3 | ||
|
|
47bbef10c5 | ||
|
|
44e782371e | ||
|
|
705be4018d | ||
|
|
c968ed8925 | ||
|
|
6f5911288d | ||
|
|
cf42389d39 | ||
|
|
49cb83ca5d | ||
|
|
961c4726c7 | ||
|
|
6e961ad48b | ||
|
|
d5a9031a65 | ||
|
|
669b9b3b5d | ||
|
|
7bf04ddbb1 | ||
|
|
2705cf642c | ||
|
|
fb8f757eaf | ||
|
|
6731e25cff | ||
|
|
0aece20ada | ||
|
|
401d0307d5 | ||
|
|
7479f5b871 | ||
|
|
a61a1d78a3 | ||
|
|
2fbf51968d | ||
|
|
54b56ee7cc | ||
|
|
39615a6011 | ||
|
|
83562eb5ec | ||
|
|
bb76fc6a84 | ||
|
|
21e4db2ed4 | ||
|
|
05e075b3fe | ||
|
|
e94eebc6af | ||
|
|
e50dc777b7 | ||
|
|
1aaa84313c | ||
|
|
829e3168ff | ||
|
|
3d76f60401 | ||
|
|
d065b41598 | ||
|
|
41b518b955 | ||
|
|
6e8a9eb8ec | ||
|
|
ef8f3b02f2 | ||
|
|
351b007fe3 | ||
|
|
9d1feca108 | ||
|
|
17b41f6453 | ||
|
|
a7799757c9 | ||
|
|
d961e373e1 | ||
|
|
bcb40a720d | ||
|
|
4a4cf1e712 | ||
|
|
b706b5bc68 | ||
|
|
4d3332d37d | ||
|
|
1203b7370a | ||
|
|
4a26ce3c1b | ||
|
|
306b5becda | ||
|
|
a4506009b9 | ||
|
|
17b70ccaa7 | ||
|
|
001d634de1 | ||
|
|
268700bdbf | ||
|
|
eb793398cd | ||
|
|
cf0fc515b5 |
@@ -825,7 +825,27 @@ steps:
|
||||
elasticsearch \
|
||||
elasticsearch
|
||||
|
||||
|
||||
- id: "snowflake"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
- "SNOWFLAKE_DATABASE=$_SNOWFLAKE_DATABASE"
|
||||
- "SNOWFLAKE_SCHEMA=$_SNOWFLAKE_SCHEMA"
|
||||
secretEnv: ["CLIENT_ID", "SNOWFLAKE_USER", "SNOWFLAKE_PASS", "SNOWFLAKE_ACCOUNT"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Snowflake" \
|
||||
snowflake \
|
||||
snowflake
|
||||
|
||||
- id: "cassandra"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
@@ -1038,6 +1058,12 @@ availableSecrets:
|
||||
env: ELASTICSEARCH_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/elastic_search_pass/versions/latest
|
||||
env: ELASTICSEARCH_PASS
|
||||
- versionName: projects/$PROJECT_ID/secrets/snowflake_account/versions/latest
|
||||
env: SNOWFLAKE_ACCOUNT
|
||||
- versionName: projects/$PROJECT_ID/secrets/snowflake_user/versions/latest
|
||||
env: SNOWFLAKE_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/snowflake_pass/versions/latest
|
||||
env: SNOWFLAKE_PASS
|
||||
- versionName: projects/$PROJECT_ID/secrets/cassandra_user/versions/latest
|
||||
env: CASSANDRA_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/cassandra_pass/versions/latest
|
||||
@@ -1124,4 +1150,5 @@ substitutions:
|
||||
_SINGLESTORE_USER: "root"
|
||||
_MARIADB_PORT: "3307"
|
||||
_MARIADB_DATABASE: test_database
|
||||
|
||||
_SNOWFLAKE_DATABASE: "test"
|
||||
_SNOWFLAKE_SCHEMA: "PUBLIC"
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -20,4 +20,4 @@ node_modules
|
||||
|
||||
# executable
|
||||
genai-toolbox
|
||||
toolbox
|
||||
toolbox
|
||||
@@ -51,6 +51,10 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
|
||||
# Add a new version block here before every release
|
||||
# The order of versions in this file is mirrored into the dropdown
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.25.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.25.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.24.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.24.0/"
|
||||
|
||||
25
CHANGELOG.md
25
CHANGELOG.md
@@ -1,5 +1,30 @@
|
||||
# Changelog
|
||||
|
||||
## [0.25.0](https://github.com/googleapis/genai-toolbox/compare/v0.24.0...v0.25.0) (2026-01-08)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add `embeddingModel` support ([#2121](https://github.com/googleapis/genai-toolbox/issues/2121)) ([9c62f31](https://github.com/googleapis/genai-toolbox/commit/9c62f313ff5edf0a3b5b8a3e996eba078fba4095))
|
||||
* Add `allowed-hosts` flag ([#2254](https://github.com/googleapis/genai-toolbox/issues/2254)) ([17b41f6](https://github.com/googleapis/genai-toolbox/commit/17b41f64531b8fe417c28ada45d1992ba430dc1b))
|
||||
* Add parameter default value to manifest ([#2264](https://github.com/googleapis/genai-toolbox/issues/2264)) ([9d1feca](https://github.com/googleapis/genai-toolbox/commit/9d1feca10810fa42cb4c94a409252f1bd373ee36))
|
||||
* **snowflake:** Add Snowflake Source and Tools ([#858](https://github.com/googleapis/genai-toolbox/issues/858)) ([b706b5b](https://github.com/googleapis/genai-toolbox/commit/b706b5bc685aeda277f277868bae77d38d5fd7b6))
|
||||
* **prebuilt/cloud-sql-mysql:** Update CSQL MySQL prebuilt tools to use IAM ([#2202](https://github.com/googleapis/genai-toolbox/issues/2202)) ([731a32e](https://github.com/googleapis/genai-toolbox/commit/731a32e5360b4d6862d81fcb27d7127c655679a8))
|
||||
* **sources/bigquery:** Make credentials scope configurable ([#2210](https://github.com/googleapis/genai-toolbox/issues/2210)) ([a450600](https://github.com/googleapis/genai-toolbox/commit/a4506009b93771b77fb05ae97044f914967e67ed))
|
||||
* **sources/trino:** Add ssl verification options and fix docs example ([#2155](https://github.com/googleapis/genai-toolbox/issues/2155)) ([4a4cf1e](https://github.com/googleapis/genai-toolbox/commit/4a4cf1e712b671853678dba99c4dc49dd4fc16a2))
|
||||
* **tools/looker:** Add ability to set destination folder with `make_look` and `make_dashboard`. ([#2245](https://github.com/googleapis/genai-toolbox/issues/2245)) ([eb79339](https://github.com/googleapis/genai-toolbox/commit/eb793398cd1cc4006d9808ccda5dc7aea5e92bd5))
|
||||
* **tools/postgressql:** Add tool to list store procedure ([#2156](https://github.com/googleapis/genai-toolbox/issues/2156)) ([cf0fc51](https://github.com/googleapis/genai-toolbox/commit/cf0fc515b57d9b84770076f3c0c5597c4597ef62))
|
||||
* **tools/postgressql:** Add Parameter `embeddedBy` config support ([#2151](https://github.com/googleapis/genai-toolbox/issues/2151)) ([17b70cc](https://github.com/googleapis/genai-toolbox/commit/17b70ccaa754d15bcc33a1a3ecb7e652520fa600))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **server:** Add `embeddingModel` config initialization ([#2281](https://github.com/googleapis/genai-toolbox/issues/2281)) ([a779975](https://github.com/googleapis/genai-toolbox/commit/a7799757c9345f99b6d2717841fbf792d364e1a2))
|
||||
* **sources/cloudgda:** Add import for cloudgda source ([#2217](https://github.com/googleapis/genai-toolbox/issues/2217)) ([7daa411](https://github.com/googleapis/genai-toolbox/commit/7daa4111f4ebfb0a35319fd67a8f7b9f0f99efcf))
|
||||
* **tools/alloydb-wait-for-operation:** Fix connection message generation ([#2228](https://github.com/googleapis/genai-toolbox/issues/2228)) ([7053fbb](https://github.com/googleapis/genai-toolbox/commit/7053fbb1953653143d39a8510916ea97a91022a6))
|
||||
* **tools/alloydbainl:** Only add psv when NL Config Param is defined ([#2265](https://github.com/googleapis/genai-toolbox/issues/2265)) ([ef8f3b0](https://github.com/googleapis/genai-toolbox/commit/ef8f3b02f2f38ce94a6ba9acf35d08b9469bef4e))
|
||||
* **tools/looker:** Looker client OAuth nil pointer error ([#2231](https://github.com/googleapis/genai-toolbox/issues/2231)) ([268700b](https://github.com/googleapis/genai-toolbox/commit/268700bdbf8281de0318d60ca613ed3672990b20))
|
||||
|
||||
## [0.24.0](https://github.com/googleapis/genai-toolbox/compare/v0.23.0...v0.24.0) (2025-12-19)
|
||||
|
||||
|
||||
|
||||
14
README.md
14
README.md
@@ -140,7 +140,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.24.0
|
||||
> export VERSION=0.25.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -153,7 +153,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.24.0
|
||||
> export VERSION=0.25.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -166,7 +166,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.24.0
|
||||
> export VERSION=0.25.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -179,7 +179,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```cmd
|
||||
> :: see releases page for other versions
|
||||
> set VERSION=0.24.0
|
||||
> set VERSION=0.25.0
|
||||
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
> ```
|
||||
>
|
||||
@@ -191,7 +191,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```powershell
|
||||
> # see releases page for other versions
|
||||
> $VERSION = "0.24.0"
|
||||
> $VERSION = "0.25.0"
|
||||
> curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||
> ```
|
||||
>
|
||||
@@ -204,7 +204,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -228,7 +228,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.24.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.25.0
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
11
cmd/root.go
11
cmd/root.go
@@ -198,6 +198,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistroles"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistschemas"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistsequences"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresliststoredprocedure"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablespaces"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablestats"
|
||||
@@ -214,6 +215,8 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparklistbatches"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoreexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoresql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakeexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlistgraphs"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlisttables"
|
||||
@@ -262,6 +265,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/redis"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/serverlessspark"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/singlestore"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/snowflake"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/spanner"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/tidb"
|
||||
@@ -377,7 +381,9 @@ func NewCommand(opts ...Option) *Command {
|
||||
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
|
||||
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
|
||||
flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
|
||||
// TODO: Insecure by default. Might consider updating this for v1.0.0
|
||||
flags.StringSliceVar(&cmd.cfg.AllowedOrigins, "allowed-origins", []string{"*"}, "Specifies a list of origins permitted to access this server. Defaults to '*'.")
|
||||
flags.StringSliceVar(&cmd.cfg.AllowedHosts, "allowed-hosts", []string{"*"}, "Specifies a list of hosts permitted to access this server. Defaults to '*'.")
|
||||
|
||||
// wrap RunE command so that we have access to original Command object
|
||||
cmd.RunE = func(*cobra.Command, []string) error { return run(cmd) }
|
||||
@@ -483,11 +489,11 @@ func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) {
|
||||
}
|
||||
|
||||
// Check for conflicts and merge embeddingModels
|
||||
for name, model := range file.EmbeddingModels {
|
||||
for name, em := range file.EmbeddingModels {
|
||||
if _, exists := merged.EmbeddingModels[name]; exists {
|
||||
conflicts = append(conflicts, fmt.Sprintf("embedding model '%s' (file #%d)", name, fileIndex+1))
|
||||
} else {
|
||||
merged.EmbeddingModels[name] = model
|
||||
merged.EmbeddingModels[name] = em
|
||||
}
|
||||
}
|
||||
|
||||
@@ -943,6 +949,7 @@ func run(cmd *Command) error {
|
||||
|
||||
cmd.cfg.SourceConfigs = finalToolsFile.Sources
|
||||
cmd.cfg.AuthServiceConfigs = finalToolsFile.AuthServices
|
||||
cmd.cfg.EmbeddingModelConfigs = finalToolsFile.EmbeddingModels
|
||||
cmd.cfg.ToolConfigs = finalToolsFile.Tools
|
||||
cmd.cfg.ToolsetConfigs = finalToolsFile.Toolsets
|
||||
cmd.cfg.PromptConfigs = finalToolsFile.Prompts
|
||||
|
||||
@@ -67,6 +67,9 @@ func withDefaults(c server.ServerConfig) server.ServerConfig {
|
||||
if c.AllowedOrigins == nil {
|
||||
c.AllowedOrigins = []string{"*"}
|
||||
}
|
||||
if c.AllowedHosts == nil {
|
||||
c.AllowedHosts = []string{"*"}
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
@@ -220,6 +223,13 @@ func TestServerConfigFlags(t *testing.T) {
|
||||
AllowedOrigins: []string{"http://foo.com", "http://bar.com"},
|
||||
}),
|
||||
},
|
||||
{
|
||||
desc: "allowed hosts",
|
||||
args: []string{"--allowed-hosts", "http://foo.com,http://bar.com"},
|
||||
want: withDefaults(server.ServerConfig{
|
||||
AllowedHosts: []string{"http://foo.com", "http://bar.com"},
|
||||
}),
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
@@ -1352,6 +1362,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
cloudsqlmssqlobsvconfig, _ := prebuiltconfigs.Get("cloud-sql-mssql-observability")
|
||||
serverless_spark_config, _ := prebuiltconfigs.Get("serverless-spark")
|
||||
cloudhealthcare_config, _ := prebuiltconfigs.Get("cloud-healthcare")
|
||||
snowflake_config, _ := prebuiltconfigs.Get("snowflake")
|
||||
|
||||
// Set environment variables
|
||||
t.Setenv("API_KEY", "your_api_key")
|
||||
@@ -1449,6 +1460,14 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
t.Setenv("CLOUD_HEALTHCARE_REGION", "your_gcp_region")
|
||||
t.Setenv("CLOUD_HEALTHCARE_DATASET", "your_healthcare_dataset")
|
||||
|
||||
t.Setenv("SNOWFLAKE_ACCOUNT", "your_account")
|
||||
t.Setenv("SNOWFLAKE_USER", "your_username")
|
||||
t.Setenv("SNOWFLAKE_PASSWORD", "your_pass")
|
||||
t.Setenv("SNOWFLAKE_DATABASE", "your_db")
|
||||
t.Setenv("SNOWFLAKE_SCHEMA", "your_schema")
|
||||
t.Setenv("SNOWFLAKE_WAREHOUSE", "your_wh")
|
||||
t.Setenv("SNOWFLAKE_ROLE", "your_role")
|
||||
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
@@ -1504,7 +1523,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"alloydb_postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "alloydb_postgres_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats"},
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats", "list_stored_procedure"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1534,7 +1553,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_postgres_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats"},
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats", "list_stored_procedure"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1634,7 +1653,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "postgres_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats"},
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats", "list_stored_procedure"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1746,6 +1765,16 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Snowflake prebuilt tool",
|
||||
in: snowflake_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"snowflake_tools": tools.ToolsetConfig{
|
||||
Name: "snowflake_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
|
||||
@@ -1 +1 @@
|
||||
0.24.0
|
||||
0.25.0
|
||||
|
||||
@@ -68,6 +68,7 @@ The BigQuery MCP server is configured using environment variables.
|
||||
export BIGQUERY_PROJECT="<your-gcp-project-id>"
|
||||
export BIGQUERY_LOCATION="<your-dataset-location>" # Optional
|
||||
export BIGQUERY_USE_CLIENT_OAUTH="true" # Optional
|
||||
export BIGQUERY_SCOPES="<comma-separated-scopes>" # Optional
|
||||
```
|
||||
|
||||
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
|
||||
|
||||
18
docs/en/blogs/_index.md
Normal file
18
docs/en/blogs/_index.md
Normal file
@@ -0,0 +1,18 @@
|
||||
---
|
||||
title: "Featured Articles"
|
||||
weight: 3
|
||||
description: Toolbox Medium Blogs
|
||||
manualLink: "https://medium.com/@mcp_toolbox"
|
||||
manualLinkTarget: _blank
|
||||
---
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<title>Redirecting to Featured Articles</title>
|
||||
<link rel="canonical" href="https://medium.com/@mcp_toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://medium.com/@mcp_toolbox"/>
|
||||
</head>
|
||||
<body>
|
||||
<p>If you are not automatically redirected, please <a href="https://medium.com/@mcp_toolbox">follow this link to our articles</a>.</p>
|
||||
</body>
|
||||
</html>
|
||||
@@ -234,7 +234,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.24.0\" # x-release-please-version\n",
|
||||
"version = \"0.25.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -103,7 +103,7 @@ To install Toolbox as a binary on Linux (AMD64):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -114,7 +114,7 @@ To install Toolbox as a binary on macOS (Apple Silicon):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -125,7 +125,7 @@ To install Toolbox as a binary on macOS (Intel):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -136,7 +136,7 @@ To install Toolbox as a binary on Windows (Command Prompt):
|
||||
|
||||
```cmd
|
||||
:: see releases page for other versions
|
||||
set VERSION=0.24.0
|
||||
set VERSION=0.25.0
|
||||
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
```
|
||||
|
||||
@@ -146,7 +146,7 @@ To install Toolbox as a binary on Windows (PowerShell):
|
||||
|
||||
```powershell
|
||||
# see releases page for other versions
|
||||
$VERSION = "0.24.0"
|
||||
$VERSION = "0.25.0"
|
||||
curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||
```
|
||||
|
||||
@@ -158,7 +158,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -177,7 +177,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.24.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.25.0
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -569,11 +569,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/jws": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
||||
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
|
||||
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"jwa": "^2.0.0",
|
||||
"jwa": "^2.0.1",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -3376,22 +3376,23 @@
|
||||
}
|
||||
},
|
||||
"node_modules/body-parser": {
|
||||
"version": "1.20.3",
|
||||
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz",
|
||||
"integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==",
|
||||
"version": "1.20.4",
|
||||
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.4.tgz",
|
||||
"integrity": "sha512-ZTgYYLMOXY9qKU/57FAo8F+HA2dGX7bqGc71txDRC1rS4frdFI5R7NhluHxH6M0YItAP0sHB4uqAOcYKxO6uGA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"bytes": "3.1.2",
|
||||
"bytes": "~3.1.2",
|
||||
"content-type": "~1.0.5",
|
||||
"debug": "2.6.9",
|
||||
"depd": "2.0.0",
|
||||
"destroy": "1.2.0",
|
||||
"http-errors": "2.0.0",
|
||||
"iconv-lite": "0.4.24",
|
||||
"on-finished": "2.4.1",
|
||||
"qs": "6.13.0",
|
||||
"raw-body": "2.5.2",
|
||||
"destroy": "~1.2.0",
|
||||
"http-errors": "~2.0.1",
|
||||
"iconv-lite": "~0.4.24",
|
||||
"on-finished": "~2.4.1",
|
||||
"qs": "~6.14.0",
|
||||
"raw-body": "~2.5.3",
|
||||
"type-is": "~1.6.18",
|
||||
"unpipe": "1.0.0"
|
||||
"unpipe": "~1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8",
|
||||
@@ -3406,11 +3407,40 @@
|
||||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/body-parser/node_modules/http-errors": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz",
|
||||
"integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"depd": "~2.0.0",
|
||||
"inherits": "~2.0.4",
|
||||
"setprototypeof": "~1.2.0",
|
||||
"statuses": "~2.0.2",
|
||||
"toidentifier": "~1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/express"
|
||||
}
|
||||
},
|
||||
"node_modules/body-parser/node_modules/ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
|
||||
},
|
||||
"node_modules/body-parser/node_modules/statuses": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz",
|
||||
"integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/buffer-equal-constant-time": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
|
||||
@@ -3434,6 +3464,7 @@
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
|
||||
"integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
@@ -3830,38 +3861,39 @@
|
||||
}
|
||||
},
|
||||
"node_modules/express": {
|
||||
"version": "4.21.2",
|
||||
"resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz",
|
||||
"integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==",
|
||||
"version": "4.22.1",
|
||||
"resolved": "https://registry.npmjs.org/express/-/express-4.22.1.tgz",
|
||||
"integrity": "sha512-F2X8g9P1X7uCPZMA3MVf9wcTqlyNp7IhH5qPCI0izhaOIYXaW9L535tGA3qmjRzpH+bZczqq7hVKxTR4NWnu+g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"accepts": "~1.3.8",
|
||||
"array-flatten": "1.1.1",
|
||||
"body-parser": "1.20.3",
|
||||
"content-disposition": "0.5.4",
|
||||
"body-parser": "~1.20.3",
|
||||
"content-disposition": "~0.5.4",
|
||||
"content-type": "~1.0.4",
|
||||
"cookie": "0.7.1",
|
||||
"cookie-signature": "1.0.6",
|
||||
"cookie": "~0.7.1",
|
||||
"cookie-signature": "~1.0.6",
|
||||
"debug": "2.6.9",
|
||||
"depd": "2.0.0",
|
||||
"encodeurl": "~2.0.0",
|
||||
"escape-html": "~1.0.3",
|
||||
"etag": "~1.8.1",
|
||||
"finalhandler": "1.3.1",
|
||||
"fresh": "0.5.2",
|
||||
"http-errors": "2.0.0",
|
||||
"finalhandler": "~1.3.1",
|
||||
"fresh": "~0.5.2",
|
||||
"http-errors": "~2.0.0",
|
||||
"merge-descriptors": "1.0.3",
|
||||
"methods": "~1.1.2",
|
||||
"on-finished": "2.4.1",
|
||||
"on-finished": "~2.4.1",
|
||||
"parseurl": "~1.3.3",
|
||||
"path-to-regexp": "0.1.12",
|
||||
"path-to-regexp": "~0.1.12",
|
||||
"proxy-addr": "~2.0.7",
|
||||
"qs": "6.13.0",
|
||||
"qs": "~6.14.0",
|
||||
"range-parser": "~1.2.1",
|
||||
"safe-buffer": "5.2.1",
|
||||
"send": "0.19.0",
|
||||
"serve-static": "1.16.2",
|
||||
"send": "~0.19.0",
|
||||
"serve-static": "~1.16.2",
|
||||
"setprototypeof": "1.2.0",
|
||||
"statuses": "2.0.1",
|
||||
"statuses": "~2.0.1",
|
||||
"type-is": "~1.6.18",
|
||||
"utils-merge": "1.0.1",
|
||||
"vary": "~1.1.2"
|
||||
@@ -4904,6 +4936,7 @@
|
||||
"version": "0.4.24",
|
||||
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
|
||||
"integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"safer-buffer": ">= 2.1.2 < 3"
|
||||
},
|
||||
@@ -5661,11 +5694,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/qs": {
|
||||
"version": "6.13.0",
|
||||
"resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz",
|
||||
"integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==",
|
||||
"version": "6.14.1",
|
||||
"resolved": "https://registry.npmjs.org/qs/-/qs-6.14.1.tgz",
|
||||
"integrity": "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==",
|
||||
"license": "BSD-3-Clause",
|
||||
"dependencies": {
|
||||
"side-channel": "^1.0.6"
|
||||
"side-channel": "^1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.6"
|
||||
@@ -5683,19 +5717,49 @@
|
||||
}
|
||||
},
|
||||
"node_modules/raw-body": {
|
||||
"version": "2.5.2",
|
||||
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz",
|
||||
"integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==",
|
||||
"version": "2.5.3",
|
||||
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.3.tgz",
|
||||
"integrity": "sha512-s4VSOf6yN0rvbRZGxs8Om5CWj6seneMwK3oDb4lWDH0UPhWcxwOWw5+qk24bxq87szX1ydrwylIOp2uG1ojUpA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"bytes": "3.1.2",
|
||||
"http-errors": "2.0.0",
|
||||
"iconv-lite": "0.4.24",
|
||||
"unpipe": "1.0.0"
|
||||
"bytes": "~3.1.2",
|
||||
"http-errors": "~2.0.1",
|
||||
"iconv-lite": "~0.4.24",
|
||||
"unpipe": "~1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/raw-body/node_modules/http-errors": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz",
|
||||
"integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"depd": "~2.0.0",
|
||||
"inherits": "~2.0.4",
|
||||
"setprototypeof": "~1.2.0",
|
||||
"statuses": "~2.0.2",
|
||||
"toidentifier": "~1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/express"
|
||||
}
|
||||
},
|
||||
"node_modules/raw-body/node_modules/statuses": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz",
|
||||
"integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/readable-stream": {
|
||||
"version": "3.6.2",
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
|
||||
@@ -5813,7 +5877,8 @@
|
||||
"node_modules/safer-buffer": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
|
||||
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
|
||||
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/semver": {
|
||||
"version": "7.7.2",
|
||||
|
||||
@@ -882,11 +882,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/jws": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
||||
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
|
||||
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"jwa": "^2.0.0",
|
||||
"jwa": "^2.0.1",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
google-genai==1.56.0
|
||||
google-genai==1.57.0
|
||||
toolbox-core==0.5.4
|
||||
pytest==9.0.2
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
langchain==1.2.0
|
||||
langchain-google-vertexai==3.2.0
|
||||
langchain==1.2.2
|
||||
langchain-google-vertexai==3.2.1
|
||||
langgraph==1.0.5
|
||||
toolbox-langchain==0.5.4
|
||||
pytest==9.0.2
|
||||
|
||||
@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -100,19 +100,19 @@ After you install Looker in the MCP Store, resources and tools from the server a
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -45,19 +45,19 @@ instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -43,19 +43,19 @@ expose your developer assistant tools to a MySQL instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -44,19 +44,19 @@ expose your developer assistant tools to a Neo4j instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -43,19 +43,19 @@ to expose your developer assistant tools to a SQLite instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -68,7 +68,12 @@ networks:
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
To prevent DNS rebinding attack, use the `--allowed-origins` flag to specify a
|
||||
To prevent DNS rebinding attack, use the `--allowed-hosts` flag to specify a
|
||||
list of hosts for validation. E.g. `command: [ "toolbox",
|
||||
"--tools-file", "/config/tools.yaml", "--address", "0.0.0.0",
|
||||
"--allowed-hosts", "localhost:5000"]`
|
||||
|
||||
To implement CORs, use the `--allowed-origins` flag to specify a
|
||||
list of origins permitted to access the server. E.g. `command: [ "toolbox",
|
||||
"--tools-file", "/config/tools.yaml", "--address", "0.0.0.0",
|
||||
"--allowed-origins", "https://foo.bar"]`
|
||||
|
||||
@@ -188,9 +188,13 @@ description: >
|
||||
path: tools.yaml
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
{{< notice tip >}}
|
||||
To prevent DNS rebinding attack, use the `--allowed-origins` flag to specify a
|
||||
list of origins permitted to access the server. E.g. `args: ["--address",
|
||||
"0.0.0.0", "--allowed-hosts", "foo.bar:5000"]`
|
||||
|
||||
To implement CORs, use the `--allowed-origins` flag to specify a
|
||||
list of origins permitted to access the server. E.g. `args: ["--address",
|
||||
"0.0.0.0", "--allowed-origins", "https://foo.bar"]`
|
||||
{{< /notice >}}
|
||||
|
||||
|
||||
@@ -142,14 +142,18 @@ deployment will time out.
|
||||
|
||||
### Update deployed server to be secure
|
||||
|
||||
To prevent DNS rebinding attack, use the `--allowed-origins` flag to specify a
|
||||
list of origins permitted to access the server. In order to do that, you will
|
||||
To prevent DNS rebinding attack, use the `--allowed-hosts` flag to specify a
|
||||
list of hosts. In order to do that, you will
|
||||
have to re-deploy the cloud run service with the new flag.
|
||||
|
||||
To implement CORs checks, use the `--allowed-origins` flag to specify a list of
|
||||
origins permitted to access the server.
|
||||
|
||||
1. Set an environment variable to the cloud run url:
|
||||
|
||||
```bash
|
||||
export URL=<cloud run url>
|
||||
export HOST=<cloud run host>
|
||||
```
|
||||
|
||||
2. Redeploy Toolbox:
|
||||
@@ -160,7 +164,7 @@ have to re-deploy the cloud run service with the new flag.
|
||||
--service-account toolbox-identity \
|
||||
--region us-central1 \
|
||||
--set-secrets "/app/tools.yaml=tools:latest" \
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL"
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL","--allowed-hosts=$HOST"
|
||||
# --allow-unauthenticated # https://cloud.google.com/run/docs/authenticating/public#gcloud
|
||||
```
|
||||
|
||||
@@ -172,7 +176,7 @@ have to re-deploy the cloud run service with the new flag.
|
||||
--service-account toolbox-identity \
|
||||
--region us-central1 \
|
||||
--set-secrets "/app/tools.yaml=tools:latest" \
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL" \
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL","--allowed-hosts=$HOST" \
|
||||
# TODO(dev): update the following to match your VPC if necessary
|
||||
--network default \
|
||||
--subnet default
|
||||
|
||||
@@ -8,25 +8,26 @@ description: >
|
||||
|
||||
## Reference
|
||||
|
||||
| Flag (Short) | Flag (Long) | Description | Default |
|
||||
|--------------|----------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------|
|
||||
| `-a` | `--address` | Address of the interface the server will listen on. | `127.0.0.1` |
|
||||
| | `--disable-reload` | Disables dynamic reloading of tools file. | |
|
||||
| `-h` | `--help` | help for toolbox | |
|
||||
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
|
||||
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
|
||||
| `-p` | `--port` | Port the server will listen on. | `5000` |
|
||||
| | `--prebuilt` | Use a prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
|
||||
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
|
||||
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
|
||||
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
|
||||
| | `--telemetry-service-name` | Sets the value of the service.name resource attribute for telemetry data. | `toolbox` |
|
||||
| Flag (Short) | Flag (Long) | Description | Default |
|
||||
|--------------|----------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------|
|
||||
| `-a` | `--address` | Address of the interface the server will listen on. | `127.0.0.1` |
|
||||
| | `--disable-reload` | Disables dynamic reloading of tools file. | |
|
||||
| `-h` | `--help` | help for toolbox | |
|
||||
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
|
||||
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
|
||||
| `-p` | `--port` | Port the server will listen on. | `5000` |
|
||||
| | `--prebuilt` | Use a prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
|
||||
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
|
||||
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
|
||||
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
|
||||
| | `--telemetry-service-name` | Sets the value of the service.name resource attribute for telemetry data. | `toolbox` |
|
||||
| | `--tools-file` | File path specifying the tool configuration. Cannot be used with --tools-files or --tools-folder. | |
|
||||
| | `--tools-files` | Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file or --tools-folder. | |
|
||||
| | `--tools-folder` | Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file or --tools-files. | |
|
||||
| | `--ui` | Launches the Toolbox UI web server. | |
|
||||
| | `--allowed-origins` | Specifies a list of origins permitted to access this server. | `*` |
|
||||
| `-v` | `--version` | version for toolbox | |
|
||||
| | `--ui` | Launches the Toolbox UI web server. | |
|
||||
| | `--allowed-origins` | Specifies a list of origins permitted to access this server for CORs access. | `*` |
|
||||
| | `--allowed-hosts` | Specifies a list of hosts permitted to access this server to prevent DNS rebinding attacks. | `*` |
|
||||
| `-v` | `--version` | version for toolbox | |
|
||||
|
||||
## Examples
|
||||
|
||||
|
||||
@@ -105,6 +105,8 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `BIGQUERY_LOCATION`: (Optional) The dataset location.
|
||||
* `BIGQUERY_USE_CLIENT_OAUTH`: (Optional) If `true`, forwards the client's
|
||||
OAuth access token for authentication. Defaults to `false`.
|
||||
* `BIGQUERY_SCOPES`: (Optional) A comma-separated list of OAuth scopes to
|
||||
use for authentication.
|
||||
* **Permissions:**
|
||||
* **BigQuery User** (`roles/bigquery.user`) to execute queries and view
|
||||
metadata.
|
||||
|
||||
@@ -94,7 +94,10 @@ cluster][alloydb-free-trial].
|
||||
instance.
|
||||
|
||||
- [`postgres-list-roles`](../tools/postgres/postgres-list-roles.md)
|
||||
Lists all the user-created roles in PostgreSQL database..
|
||||
Lists all the user-created roles in PostgreSQL database.
|
||||
|
||||
- [`postgres-list-stored-procedure`](../tools/postgres/postgres-list-stored-procedure.md)
|
||||
Lists all the stored procedure in PostgreSQL database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
|
||||
@@ -94,6 +94,13 @@ intend to run. Common roles include `roles/bigquery.user` (which includes
|
||||
permissions to run jobs and read data) or `roles/bigbigquery.dataViewer`.
|
||||
Follow this [guide][set-adc] to set up your ADC.
|
||||
|
||||
If you are running on Google Compute Engine (GCE) or Google Kubernetes Engine
|
||||
(GKE), you might need to explicitly set the access scopes for the service
|
||||
account. While you can configure scopes when creating the VM or node pool, you
|
||||
can also specify them in the source configuration using the `scopes` field.
|
||||
Common scopes include `https://www.googleapis.com/auth/bigquery` or
|
||||
`https://www.googleapis.com/auth/cloud-platform`.
|
||||
|
||||
### Authentication via User's OAuth Access Token
|
||||
|
||||
If the `useClientOAuth` parameter is set to `true`, Toolbox will instead use the
|
||||
@@ -124,6 +131,9 @@ sources:
|
||||
# - "my_dataset_1"
|
||||
# - "other_project.my_dataset_2"
|
||||
# impersonateServiceAccount: "service-account@project-id.iam.gserviceaccount.com" # Optional: Service account to impersonate
|
||||
# scopes: # Optional: List of OAuth scopes to request.
|
||||
# - "https://www.googleapis.com/auth/bigquery"
|
||||
# - "https://www.googleapis.com/auth/drive.readonly"
|
||||
```
|
||||
|
||||
Initialize a BigQuery source that uses the client's access token:
|
||||
@@ -140,6 +150,9 @@ sources:
|
||||
# - "my_dataset_1"
|
||||
# - "other_project.my_dataset_2"
|
||||
# impersonateServiceAccount: "service-account@project-id.iam.gserviceaccount.com" # Optional: Service account to impersonate
|
||||
# scopes: # Optional: List of OAuth scopes to request.
|
||||
# - "https://www.googleapis.com/auth/bigquery"
|
||||
# - "https://www.googleapis.com/auth/drive.readonly"
|
||||
```
|
||||
|
||||
## Reference
|
||||
@@ -152,4 +165,5 @@ sources:
|
||||
| writeMode | string | false | Controls the write behavior for tools. `allowed` (default): All queries are permitted. `blocked`: Only `SELECT` statements are allowed for the `bigquery-execute-sql` tool. `protected`: Enables session-based execution where all tools associated with this source instance share the same [BigQuery session](https://cloud.google.com/bigquery/docs/sessions-intro). This allows for stateful operations using temporary tables (e.g., `CREATE TEMP TABLE`). For `bigquery-execute-sql`, `SELECT` statements can be used on all tables, but write operations are restricted to the session's temporary dataset. For tools like `bigquery-sql`, `bigquery-forecast`, and `bigquery-analyze-contribution`, the `writeMode` restrictions do not apply, but they will operate within the shared session. **Note:** The `protected` mode cannot be used with `useClientOAuth: true`. It is also not recommended for multi-user server environments, as all users would share the same session. A session is terminated automatically after 24 hours of inactivity or after 7 days, whichever comes first. A new session is created on the next request, and any temporary data from the previous session will be lost. |
|
||||
| allowedDatasets | []string | false | An optional list of dataset IDs that tools using this source are allowed to access. If provided, any tool operation attempting to access a dataset not in this list will be rejected. To enforce this, two types of operations are also disallowed: 1) Dataset-level operations (e.g., `CREATE SCHEMA`), and 2) operations where table access cannot be statically analyzed (e.g., `EXECUTE IMMEDIATE`, `CREATE PROCEDURE`). If a single dataset is provided, it will be treated as the default for prebuilt tools. |
|
||||
| useClientOAuth | bool | false | If true, forwards the client's OAuth access token from the "Authorization" header to downstream queries. **Note:** This cannot be used with `writeMode: protected`. |
|
||||
| scopes | []string | false | A list of OAuth 2.0 scopes to use for the credentials. If not provided, default scopes are used. |
|
||||
| impersonateServiceAccount | string | false | Service account email to impersonate when making BigQuery and Dataplex API calls. The authenticated principal must have the `roles/iam.serviceAccountTokenCreator` role on the target service account. [Learn More](https://cloud.google.com/iam/docs/service-account-impersonation) |
|
||||
|
||||
@@ -91,7 +91,10 @@ to a database by following these instructions][csql-pg-quickstart].
|
||||
instance.
|
||||
|
||||
- [`postgres-list-roles`](../tools/postgres/postgres-list-roles.md)
|
||||
Lists all the user-created roles in PostgreSQL database..
|
||||
Lists all the user-created roles in PostgreSQL database.
|
||||
|
||||
- [`postgres-list-stored-procedure`](../tools/postgres/postgres-list-stored-procedure.md)
|
||||
Lists all the stored procedure in PostgreSQL database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
|
||||
@@ -85,7 +85,10 @@ reputation for reliability, feature robustness, and performance.
|
||||
server.
|
||||
|
||||
- [`postgres-list-roles`](../tools/postgres/postgres-list-roles.md)
|
||||
Lists all the user-created roles in PostgreSQL database..
|
||||
Lists all the user-created roles in PostgreSQL database.
|
||||
|
||||
- [`postgres-list-stored-procedure`](../tools/postgres/postgres-list-stored-procedure.md)
|
||||
Lists all the stored procedure in PostgreSQL database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
|
||||
63
docs/en/resources/sources/snowflake.md
Normal file
63
docs/en/resources/sources/snowflake.md
Normal file
@@ -0,0 +1,63 @@
|
||||
---
|
||||
title: "Snowflake"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Snowflake is a cloud-based data platform.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
[Snowflake][sf-docs] is a cloud data platform that provides a data warehouse-as-a-service designed for the cloud.
|
||||
|
||||
[sf-docs]: https://docs.snowflake.com/
|
||||
|
||||
## Available Tools
|
||||
|
||||
- [`snowflake-sql`](../tools/snowflake/snowflake-sql.md)
|
||||
Execute SQL queries as prepared statements in Snowflake.
|
||||
|
||||
- [`snowflake-execute-sql`](../tools/snowflake/snowflake-execute-sql.md)
|
||||
Run parameterized SQL statements in Snowflake.
|
||||
|
||||
## Requirements
|
||||
|
||||
### Database User
|
||||
|
||||
This source only uses standard authentication. You will need to create a
|
||||
Snowflake user to login to the database with.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-sf-source:
|
||||
kind: snowflake
|
||||
account: ${SNOWFLAKE_ACCOUNT}
|
||||
user: ${SNOWFLAKE_USER}
|
||||
password: ${SNOWFLAKE_PASSWORD}
|
||||
database: ${SNOWFLAKE_DATABASE}
|
||||
schema: ${SNOWFLAKE_SCHEMA}
|
||||
warehouse: ${SNOWFLAKE_WAREHOUSE}
|
||||
role: ${SNOWFLAKE_ROLE}
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
Use environment variable replacement with the format ${ENV_NAME}
|
||||
instead of hardcoding your secrets into the configuration file.
|
||||
{{< /notice >}}
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "snowflake". |
|
||||
| account | string | true | Your Snowflake account identifier. |
|
||||
| user | string | true | Name of the Snowflake user to connect as (e.g. "my-sf-user"). |
|
||||
| password | string | true | Password of the Snowflake user (e.g. "my-password"). |
|
||||
| database | string | true | Name of the Snowflake database to connect to (e.g. "my_db"). |
|
||||
| schema | string | true | Name of the schema to use (e.g. "my_schema"). |
|
||||
| warehouse | string | false | The virtual warehouse to use. Defaults to "COMPUTE_WH". |
|
||||
| role | string | false | The security role to use. Defaults to "ACCOUNTADMIN". |
|
||||
| timeout | integer | false | The connection timeout in seconds. Defaults to 60. |
|
||||
@@ -50,16 +50,19 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------------|:--------:|:------------:|------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "trino". |
|
||||
| host | string | true | Trino coordinator hostname (e.g. "trino.example.com") |
|
||||
| port | string | true | Trino coordinator port (e.g. "8080", "8443") |
|
||||
| user | string | false | Username for authentication (e.g. "analyst"). Optional for anonymous access. |
|
||||
| password | string | false | Password for basic authentication |
|
||||
| catalog | string | true | Default catalog to use for queries (e.g. "hive") |
|
||||
| schema | string | true | Default schema to use for queries (e.g. "default") |
|
||||
| queryTimeout | string | false | Query timeout duration (e.g. "30m", "1h") |
|
||||
| accessToken | string | false | JWT access token for authentication |
|
||||
| kerberosEnabled | boolean | false | Enable Kerberos authentication (default: false) |
|
||||
| sslEnabled | boolean | false | Enable SSL/TLS (default: false) |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ---------------------- | :------: | :----------: | ---------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "trino". |
|
||||
| host | string | true | Trino coordinator hostname (e.g. "trino.example.com") |
|
||||
| port | string | true | Trino coordinator port (e.g. "8080", "8443") |
|
||||
| user | string | false | Username for authentication (e.g. "analyst"). Optional for anonymous access. |
|
||||
| password | string | false | Password for basic authentication |
|
||||
| catalog | string | true | Default catalog to use for queries (e.g. "hive") |
|
||||
| schema | string | true | Default schema to use for queries (e.g. "default") |
|
||||
| queryTimeout | string | false | Query timeout duration (e.g. "30m", "1h") |
|
||||
| accessToken | string | false | JWT access token for authentication |
|
||||
| kerberosEnabled | boolean | false | Enable Kerberos authentication (default: false) |
|
||||
| sslEnabled | boolean | false | Enable SSL/TLS (default: false) |
|
||||
| disableSslVerification | boolean | false | Skip SSL/TLS certificate verification (default: false) |
|
||||
| sslCertPath | string | false | Path to a custom SSL/TLS certificate file |
|
||||
| sslCert | string | false | Custom SSL/TLS certificate content |
|
||||
|
||||
@@ -18,9 +18,11 @@ It's compatible with the following sources:
|
||||
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
`looker-make-dashboard` takes one parameter:
|
||||
`looker-make-dashboard` takes three parameters:
|
||||
|
||||
1. the `title`
|
||||
2. the `description`
|
||||
3. an optional `folder` id. If not provided, the user's default folder will be used.
|
||||
|
||||
## Example
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ It's compatible with the following sources:
|
||||
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
`looker-make-look` takes eleven parameters:
|
||||
`looker-make-look` takes twelve parameters:
|
||||
|
||||
1. the `model`
|
||||
2. the `explore`
|
||||
@@ -31,6 +31,7 @@ It's compatible with the following sources:
|
||||
9. an optional `vis_config`
|
||||
10. the `title`
|
||||
11. an optional `description`
|
||||
12. an optional `folder` id. If not provided, the user's default folder will be used.
|
||||
|
||||
## Example
|
||||
|
||||
|
||||
@@ -0,0 +1,141 @@
|
||||
---
|
||||
title: "postgres-list-stored-procedure"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "postgres-list-stored-procedure" tool retrieves metadata for stored procedures in PostgreSQL, including procedure definitions, owners, languages, and descriptions.
|
||||
aliases:
|
||||
- /resources/tools/postgres-list-stored-procedure
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `postgres-list-stored-procedure` tool queries PostgreSQL system catalogs (`pg_proc`, `pg_namespace`, `pg_roles`, and `pg_language`) to retrieve comprehensive metadata about stored procedures in the database. It filters for procedures (kind = 'p') and provides the full procedure definition along with ownership and language information.
|
||||
|
||||
Compatible sources:
|
||||
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
The tool returns a JSON array where each element represents a stored procedure with its schema, name, owner, language, complete definition, and optional description. Results are sorted by schema name and procedure name, with a default limit of 20 procedures.
|
||||
|
||||
## Parameters
|
||||
|
||||
| parameter | type | required | default | description |
|
||||
|--------------|---------|----------|---------|-------------|
|
||||
| role_name | string | false | null | Optional: The owner name to filter stored procedures by (supports partial matching) |
|
||||
| schema_name | string | false | null | Optional: The schema name to filter stored procedures by (supports partial matching) |
|
||||
| limit | integer | false | 20 | Optional: The maximum number of stored procedures to return |
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_stored_procedure:
|
||||
kind: postgres-list-stored-procedure
|
||||
source: postgres-source
|
||||
description: "Retrieves stored procedure metadata including definitions and owners."
|
||||
```
|
||||
|
||||
### Example Requests
|
||||
|
||||
**List all stored procedures (default limit 20):**
|
||||
```json
|
||||
{}
|
||||
```
|
||||
|
||||
**Filter by specific owner (role):**
|
||||
```json
|
||||
{
|
||||
"role_name": "app_user"
|
||||
}
|
||||
```
|
||||
|
||||
**Filter by schema:**
|
||||
```json
|
||||
{
|
||||
"schema_name": "public"
|
||||
}
|
||||
```
|
||||
|
||||
**Filter by owner and schema with custom limit:**
|
||||
```json
|
||||
{
|
||||
"role_name": "postgres",
|
||||
"schema_name": "public",
|
||||
"limit": 50
|
||||
}
|
||||
```
|
||||
|
||||
**Filter by partial schema name:**
|
||||
```json
|
||||
{
|
||||
"schema_name": "audit"
|
||||
}
|
||||
```
|
||||
|
||||
### Example Response
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"schema_name": "public",
|
||||
"name": "process_payment",
|
||||
"owner": "postgres",
|
||||
"language": "plpgsql",
|
||||
"definition": "CREATE OR REPLACE PROCEDURE public.process_payment(p_order_id integer, p_amount numeric)\n LANGUAGE plpgsql\nAS $procedure$\nBEGIN\n UPDATE orders SET status = 'paid', amount = p_amount WHERE id = p_order_id;\n INSERT INTO payment_log (order_id, amount, timestamp) VALUES (p_order_id, p_amount, now());\n COMMIT;\nEND\n$procedure$",
|
||||
"description": "Processes payment for an order and logs the transaction"
|
||||
},
|
||||
{
|
||||
"schema_name": "public",
|
||||
"name": "cleanup_old_records",
|
||||
"owner": "postgres",
|
||||
"language": "plpgsql",
|
||||
"definition": "CREATE OR REPLACE PROCEDURE public.cleanup_old_records(p_days_old integer)\n LANGUAGE plpgsql\nAS $procedure$\nDECLARE\n v_deleted integer;\nBEGIN\n DELETE FROM audit_logs WHERE created_at < now() - (p_days_old || ' days')::interval;\n GET DIAGNOSTICS v_deleted = ROW_COUNT;\n RAISE NOTICE 'Deleted % records', v_deleted;\nEND\n$procedure$",
|
||||
"description": "Removes audit log records older than specified days"
|
||||
},
|
||||
{
|
||||
"schema_name": "audit",
|
||||
"name": "audit_table_changes",
|
||||
"owner": "app_user",
|
||||
"language": "plpgsql",
|
||||
"definition": "CREATE OR REPLACE PROCEDURE audit.audit_table_changes()\n LANGUAGE plpgsql\nAS $procedure$\nBEGIN\n INSERT INTO audit.change_log (table_name, operation, changed_at) VALUES (TG_TABLE_NAME, TG_OP, now());\nEND\n$procedure$",
|
||||
"description": null
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
## Output Fields Reference
|
||||
|
||||
| field | type | description |
|
||||
|-------------|---------|-------------|
|
||||
| schema_name | string | Name of the schema containing the stored procedure. |
|
||||
| name | string | Name of the stored procedure. |
|
||||
| owner | string | PostgreSQL role/user who owns the stored procedure. |
|
||||
| language | string | Programming language in which the procedure is written (e.g., plpgsql, sql, c). |
|
||||
| definition | string | Complete SQL definition of the stored procedure, including the CREATE PROCEDURE statement. |
|
||||
| description | string | Optional description or comment for the procedure (may be null if no comment is set). |
|
||||
|
||||
## Use Cases
|
||||
|
||||
- **Code review and auditing**: Export procedure definitions for version control or compliance audits.
|
||||
- **Documentation generation**: Automatically extract procedure metadata and descriptions for documentation.
|
||||
- **Permission auditing**: Identify procedures owned by specific users or in specific schemas.
|
||||
- **Migration planning**: Retrieve all procedure definitions when planning database migrations.
|
||||
- **Dependency analysis**: Review procedure definitions to understand dependencies and call chains.
|
||||
- **Security assessment**: Audit which roles own and can modify stored procedures.
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
- The tool filters at the database level using LIKE pattern matching, so partial matches are supported.
|
||||
- Procedure definitions can be large; consider using the `limit` parameter for large databases with many procedures.
|
||||
- Results are ordered by schema name and procedure name for consistent output.
|
||||
- The default limit of 20 procedures is suitable for most use cases; increase as needed.
|
||||
|
||||
## Notes
|
||||
|
||||
- Only stored **procedures** are returned; functions and other callable objects are excluded via the `prokind = 'p'` filter.
|
||||
- Filtering uses `LIKE` pattern matching, so filter values support partial matches (e.g., `role_name: "app"` will match "app_user", "app_admin", etc.).
|
||||
- The `definition` field contains the complete, runnable CREATE PROCEDURE statement.
|
||||
- The `description` field is populated from comments set via PostgreSQL's COMMENT command and may be null.
|
||||
7
docs/en/resources/tools/snowflake/_index.md
Normal file
7
docs/en/resources/tools/snowflake/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "Snowflake"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with Snowflake Sources.
|
||||
---
|
||||
40
docs/en/resources/tools/snowflake/snowflake-execute-sql.md
Normal file
40
docs/en/resources/tools/snowflake/snowflake-execute-sql.md
Normal file
@@ -0,0 +1,40 @@
|
||||
---
|
||||
title: "snowflake-execute-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "snowflake-execute-sql" tool executes a SQL statement against a Snowflake
|
||||
database.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `snowflake-execute-sql` tool executes a SQL statement against a Snowflake
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [snowflake](../../sources/snowflake.md)
|
||||
|
||||
`snowflake-execute-sql` takes one input parameter `sql` and run the sql
|
||||
statement against the `source`.
|
||||
|
||||
> **Note:** This tool is intended for developer assistant workflows with
|
||||
> human-in-the-loop and shouldn't be used for production agents.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
execute_sql_tool:
|
||||
kind: snowflake-execute-sql
|
||||
source: my-snowflake-instance
|
||||
description: Use this tool to execute sql statement.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------|:-------------:|:------------:|-----------------------------------------------------------|
|
||||
| kind | string | true | Must be "snowflake-execute-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| authRequired | array[string] | false | List of auth services that are required to use this tool. |
|
||||
103
docs/en/resources/tools/snowflake/snowflake-sql.md
Normal file
103
docs/en/resources/tools/snowflake/snowflake-sql.md
Normal file
@@ -0,0 +1,103 @@
|
||||
---
|
||||
title: "snowflake-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "snowflake-sql" tool executes a pre-defined SQL statement against a
|
||||
Snowflake database.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `snowflake-sql` tool executes a pre-defined SQL statement against a Snowflake
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [snowflake](../../sources/snowflake.md)
|
||||
|
||||
The specified SQL statement is executed as a prepared statement, and specified
|
||||
parameters will be inserted according to their position: e.g. `:1` will be the
|
||||
first parameter specified, `:2` will be the second parameter, and so on.
|
||||
|
||||
> **Note:** This tool uses parameterized queries to prevent SQL injections.
|
||||
> Query parameters can be used as substitutes for arbitrary expressions.
|
||||
> Parameters cannot be used as substitutes for identifiers, column names, table
|
||||
> names, or other parts of the query.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search_flights_by_number:
|
||||
kind: snowflake-sql
|
||||
source: my-snowflake-instance
|
||||
statement: |
|
||||
SELECT * FROM flights
|
||||
WHERE airline = :1
|
||||
AND flight_number = :2
|
||||
LIMIT 10
|
||||
description: |
|
||||
Use this tool to get information for a specific flight.
|
||||
Takes an airline code and flight number and returns info on the flight.
|
||||
Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number.
|
||||
A airline code is a code for an airline service consisting of two-character
|
||||
airline designator and followed by flight number, which is 1 to 4 digit number.
|
||||
For example, if given CY 0123, the airline is "CY", and flight_number is "123".
|
||||
Another example for this is DL 1234, the airline is "DL", and flight_number is "1234".
|
||||
If the tool returns more than one option choose the date closes to today.
|
||||
Example:
|
||||
{{
|
||||
"airline": "CY",
|
||||
"flight_number": "888",
|
||||
}}
|
||||
Example:
|
||||
{{
|
||||
"airline": "DL",
|
||||
"flight_number": "1234",
|
||||
}}
|
||||
parameters:
|
||||
- name: airline
|
||||
type: string
|
||||
description: Airline unique 2 letter identifier
|
||||
- name: flight_number
|
||||
type: string
|
||||
description: 1 to 4 digit number
|
||||
```
|
||||
|
||||
### Example with Template Parameters
|
||||
|
||||
> **Note:** This tool allows direct modifications to the SQL statement,
|
||||
> including identifiers, column names, and table names. **This makes it more
|
||||
> vulnerable to SQL injections**. Using basic parameters only (see above) is
|
||||
> recommended for performance and safety reasons. For more details, please check
|
||||
> [templateParameters](..#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_table:
|
||||
kind: snowflake
|
||||
source: my-snowflake-instance
|
||||
statement: |
|
||||
SELECT * FROM {{.tableName}};
|
||||
description: |
|
||||
Use this tool to list all information from a specific table.
|
||||
Example:
|
||||
{{
|
||||
"tableName": "flights",
|
||||
}}
|
||||
templateParameters:
|
||||
- name: tableName
|
||||
type: string
|
||||
description: Table to select from
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------------|:--------------------------------------------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "snowflake-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute on. |
|
||||
| parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](..#template-parameters) | false | List of [templateParameters](..#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
| authRequired | array[string] | false | List of auth services that are required to use this tool. |
|
||||
@@ -16,11 +16,7 @@ database. It's compatible with any of the following sources:
|
||||
|
||||
- [trino](../../sources/trino.md)
|
||||
|
||||
The specified SQL statement is executed as a [prepared statement][trino-prepare],
|
||||
and specified parameters will be inserted according to their position: e.g. `$1`
|
||||
will be the first parameter specified, `$2` will be the second parameter, and so
|
||||
on. If template parameters are included, they will be resolved before execution
|
||||
of the prepared statement.
|
||||
The specified SQL statement is executed as a [prepared statement][trino-prepare], and expects parameters in the SQL query to be in the form of placeholders `?`.
|
||||
|
||||
[trino-prepare]: https://trino.io/docs/current/sql/prepare.html
|
||||
|
||||
@@ -38,8 +34,8 @@ tools:
|
||||
source: my-trino-instance
|
||||
statement: |
|
||||
SELECT * FROM hive.sales.orders
|
||||
WHERE region = $1
|
||||
AND order_date >= DATE($2)
|
||||
WHERE region = ?
|
||||
AND order_date >= DATE(?)
|
||||
LIMIT 10
|
||||
description: |
|
||||
Use this tool to get information for orders in a specific region.
|
||||
|
||||
@@ -771,7 +771,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.24.0\" # x-release-please-version\n",
|
||||
"version = \"0.25.0\" # x-release-please-version\n",
|
||||
"! curl -L -o /content/toolbox https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -123,7 +123,7 @@ In this section, we will download and install the Toolbox binary.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
export VERSION="0.24.0"
|
||||
export VERSION="0.25.0"
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -220,7 +220,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.24.0\" # x-release-please-version\n",
|
||||
"version = \"0.25.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -179,7 +179,7 @@ to use BigQuery, and then run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
161
docs/en/samples/snowflake/_index.md
Normal file
161
docs/en/samples/snowflake/_index.md
Normal file
@@ -0,0 +1,161 @@
|
||||
---
|
||||
title: "Snowflake"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
How to get started running Toolbox with MCP Inspector and Snowflake as the source.
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
[Model Context Protocol](https://modelcontextprotocol.io) is an open protocol
|
||||
that standardizes how applications provide context to LLMs. Check out this page
|
||||
on how to [connect to Toolbox via MCP](../../how-to/connect_via_mcp.md).
|
||||
|
||||
## Before you begin
|
||||
|
||||
This guide assumes you have already done the following:
|
||||
|
||||
1. [Create a Snowflake account](https://signup.snowflake.com/).
|
||||
1. Connect to the instance using [SnowSQL](https://docs.snowflake.com/en/user-guide/snowsql), or any other Snowflake client.
|
||||
|
||||
## Step 1: Set up your environment
|
||||
|
||||
Copy the environment template and update it with your Snowflake credentials:
|
||||
|
||||
```bash
|
||||
cp examples/snowflake-env.sh my-snowflake-env.sh
|
||||
```
|
||||
|
||||
Edit `my-snowflake-env.sh` with your actual Snowflake connection details:
|
||||
|
||||
```bash
|
||||
export SNOWFLAKE_ACCOUNT="your-account-identifier"
|
||||
export SNOWFLAKE_USER="your-username"
|
||||
export SNOWFLAKE_PASSWORD="your-password"
|
||||
export SNOWFLAKE_DATABASE="your-database"
|
||||
export SNOWFLAKE_SCHEMA="your-schema"
|
||||
export SNOWFLAKE_WAREHOUSE="COMPUTE_WH"
|
||||
export SNOWFLAKE_ROLE="ACCOUNTADMIN"
|
||||
```
|
||||
|
||||
## Step 2: Install Toolbox
|
||||
|
||||
In this section, we will download and install the Toolbox binary.
|
||||
|
||||
1. Download the latest version of Toolbox as a binary:
|
||||
|
||||
{{< notice tip >}}
|
||||
Select the
|
||||
[correct binary](https://github.com/googleapis/genai-toolbox/releases)
|
||||
corresponding to your OS and CPU architecture.
|
||||
{{< /notice >}}
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
export VERSION="0.10.0"
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
## Step 3: Configure the tools
|
||||
|
||||
You have two options:
|
||||
|
||||
#### Option A: Use the prebuilt configuration
|
||||
|
||||
```bash
|
||||
./toolbox --prebuilt snowflake
|
||||
```
|
||||
|
||||
#### Option B: Use the custom configuration
|
||||
|
||||
Create a `tools.yaml` file and add the following content. You must replace the placeholders with your actual Snowflake configuration.
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
snowflake-source:
|
||||
kind: snowflake
|
||||
account: ${SNOWFLAKE_ACCOUNT}
|
||||
user: ${SNOWFLAKE_USER}
|
||||
password: ${SNOWFLAKE_PASSWORD}
|
||||
database: ${SNOWFLAKE_DATABASE}
|
||||
schema: ${SNOWFLAKE_SCHEMA}
|
||||
warehouse: ${SNOWFLAKE_WAREHOUSE}
|
||||
role: ${SNOWFLAKE_ROLE}
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: snowflake-execute-sql
|
||||
source: snowflake-source
|
||||
description: Use this tool to execute SQL.
|
||||
|
||||
list_tables:
|
||||
kind: snowflake-sql
|
||||
source: snowflake-source
|
||||
description: "Lists detailed schema information for user-created tables."
|
||||
statement: |
|
||||
SELECT table_name, table_type
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = current_schema()
|
||||
ORDER BY table_name;
|
||||
```
|
||||
|
||||
For more info on tools, check out the
|
||||
[Tools](../../resources/tools/) section.
|
||||
|
||||
## Step 4: Run the Toolbox server
|
||||
|
||||
Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
|
||||
|
||||
```bash
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
## Step 5: Connect to MCP Inspector
|
||||
|
||||
1. Run the MCP Inspector:
|
||||
|
||||
```bash
|
||||
npx @modelcontextprotocol/inspector
|
||||
```
|
||||
|
||||
1. Type `y` when it asks to install the inspector package.
|
||||
|
||||
1. It should show the following when the MCP Inspector is up and running (please take note of `<YOUR_SESSION_TOKEN>`):
|
||||
|
||||
```bash
|
||||
Starting MCP inspector...
|
||||
⚙️ Proxy server listening on localhost:6277
|
||||
🔑 Session token: <YOUR_SESSION_TOKEN>
|
||||
Use this token to authenticate requests or set DANGEROUSLY_OMIT_AUTH=true to disable auth
|
||||
|
||||
🚀 MCP Inspector is up and running at:
|
||||
http://localhost:6274/?MCP_PROXY_AUTH_TOKEN=<YOUR_SESSION_TOKEN>
|
||||
```
|
||||
|
||||
1. Open the above link in your browser.
|
||||
|
||||
1. For `Transport Type`, select `Streamable HTTP`.
|
||||
|
||||
1. For `URL`, type in `http://127.0.0.1:5000/mcp`.
|
||||
|
||||
1. For `Configuration` -> `Proxy Session Token`, make sure `<YOUR_SESSION_TOKEN>` is present.
|
||||
|
||||
1. Click Connect.
|
||||
|
||||
1. Select `List Tools`, you will see a list of tools configured in `tools.yaml`.
|
||||
|
||||
1. Test out your tools here!
|
||||
|
||||
## What's next
|
||||
|
||||
- Learn more about [MCP Inspector](../../how-to/connect_via_mcp.md).
|
||||
- Learn more about [Toolbox Resources](../../resources/).
|
||||
- Learn more about [Toolbox How-to guides](../../how-to/).
|
||||
18
docs/en/samples/snowflake/runme.py
Normal file
18
docs/en/samples/snowflake/runme.py
Normal file
@@ -0,0 +1,18 @@
|
||||
import asyncio
|
||||
from toolbox_core import ToolboxClient
|
||||
|
||||
|
||||
async def main():
|
||||
# Replace with the actual URL where your Toolbox service is running
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as toolbox:
|
||||
tool = await toolbox.load_tool("execute_sql")
|
||||
result = await tool("SELECT 1")
|
||||
print(result)
|
||||
|
||||
tool = await toolbox.load_tool("list_tables")
|
||||
result = await tool(table_names="DIM_DATE")
|
||||
print(result)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
68
docs/en/samples/snowflake/snowflake-config.yaml
Normal file
68
docs/en/samples/snowflake/snowflake-config.yaml
Normal file
@@ -0,0 +1,68 @@
|
||||
# Copyright 2026 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
sources:
|
||||
my-snowflake-db:
|
||||
kind: snowflake
|
||||
account: ${SNOWFLAKE_ACCOUNT}
|
||||
user: ${SNOWFLAKE_USER}
|
||||
password: ${SNOWFLAKE_PASSWORD}
|
||||
database: ${SNOWFLAKE_DATABASE}
|
||||
schema: ${SNOWFLAKE_SCHEMA}
|
||||
warehouse: ${SNOWFLAKE_WAREHOUSE} # Optional, defaults to COMPUTE_WH if not set
|
||||
role: ${SNOWFLAKE_ROLE} # Optional, defaults to ACCOUNTADMIN if not set
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: snowflake-execute-sql
|
||||
source: my-snowflake-db
|
||||
description: Execute arbitrary SQL statements on Snowflake
|
||||
|
||||
get_customer_orders:
|
||||
kind: snowflake-sql
|
||||
source: my-snowflake-db
|
||||
description: Get orders for a specific customer
|
||||
statement: |
|
||||
SELECT o.order_id, o.order_date, o.total_amount, o.status
|
||||
FROM orders o
|
||||
WHERE o.customer_id = $1
|
||||
ORDER BY o.order_date DESC
|
||||
parameters:
|
||||
- name: customer_id
|
||||
type: string
|
||||
description: The customer ID to look up orders for
|
||||
|
||||
daily_sales_report:
|
||||
kind: snowflake-sql
|
||||
source: my-snowflake-db
|
||||
description: Generate daily sales report for a specific date
|
||||
statement: |
|
||||
SELECT
|
||||
DATE(order_date) as sales_date,
|
||||
COUNT(*) as total_orders,
|
||||
SUM(total_amount) as total_revenue,
|
||||
AVG(total_amount) as avg_order_value
|
||||
FROM orders
|
||||
WHERE DATE(order_date) = $1
|
||||
GROUP BY DATE(order_date)
|
||||
parameters:
|
||||
- name: report_date
|
||||
type: string
|
||||
description: The date to generate report for (YYYY-MM-DD format)
|
||||
|
||||
toolsets:
|
||||
snowflake-analytics:
|
||||
- execute_sql
|
||||
- get_customer_orders
|
||||
- daily_sales_report
|
||||
28
docs/en/samples/snowflake/snowflake-env.sh
Normal file
28
docs/en/samples/snowflake/snowflake-env.sh
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Snowflake Connection Configuration
|
||||
# Copy this file to snowflake-env.sh and update with your actual values
|
||||
# Then source it before running the toolbox: source snowflake-env.sh
|
||||
|
||||
# Required environment variables
|
||||
export SNOWFLAKE_ACCOUNT="your-account-identifier" # e.g., "xy12345.snowflakecomputing.com"
|
||||
export SNOWFLAKE_USER="your-username" # Your Snowflake username
|
||||
export SNOWFLAKE_PASSWORD="your-password" # Your Snowflake password
|
||||
export SNOWFLAKE_DATABASE="your-database" # Database name
|
||||
export SNOWFLAKE_SCHEMA="your-schema" # Schema name (usually "PUBLIC")
|
||||
|
||||
# Optional environment variables (will use defaults if not set)
|
||||
export SNOWFLAKE_WAREHOUSE="COMPUTE_WH" # Warehouse name (default: COMPUTE_WH)
|
||||
export SNOWFLAKE_ROLE="ACCOUNTADMIN" # Role name (default: ACCOUNTADMIN)
|
||||
|
||||
echo "Snowflake environment variables have been set!"
|
||||
echo "Account: $SNOWFLAKE_ACCOUNT"
|
||||
echo "User: $SNOWFLAKE_USER"
|
||||
echo "Database: $SNOWFLAKE_DATABASE"
|
||||
echo "Schema: $SNOWFLAKE_SCHEMA"
|
||||
echo "Warehouse: $SNOWFLAKE_WAREHOUSE"
|
||||
echo "Role: $SNOWFLAKE_ROLE"
|
||||
echo ""
|
||||
echo "You can now run the toolbox with:"
|
||||
echo " ./toolbox --prebuilt snowflake # Use prebuilt configuration"
|
||||
echo " ./toolbox --tools-file docs/en/samples/snowflake/snowflake-config.yaml # Use custom configuration"
|
||||
56
docs/en/samples/snowflake/test-snowflake.sh
Normal file
56
docs/en/samples/snowflake/test-snowflake.sh
Normal file
@@ -0,0 +1,56 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Test script to demonstrate Snowflake configuration with environment variables
|
||||
# This script shows how to set up and test the Snowflake toolbox configuration
|
||||
|
||||
echo "=== Testing Snowflake Configuration ==="
|
||||
echo ""
|
||||
|
||||
# Set up test environment variables (replace with your actual values)
|
||||
echo "Setting up test environment variables..."
|
||||
export SNOWFLAKE_ACCOUNT="test-account"
|
||||
export SNOWFLAKE_USER="test-user"
|
||||
export SNOWFLAKE_PASSWORD="test-password"
|
||||
export SNOWFLAKE_DATABASE="test-database"
|
||||
export SNOWFLAKE_SCHEMA="test-schema"
|
||||
export SNOWFLAKE_WAREHOUSE="COMPUTE_WH"
|
||||
export SNOWFLAKE_ROLE="ACCOUNTADMIN"
|
||||
|
||||
echo "Environment variables set:"
|
||||
echo " SNOWFLAKE_ACCOUNT: $SNOWFLAKE_ACCOUNT"
|
||||
echo " SNOWFLAKE_USER: $SNOWFLAKE_USER"
|
||||
echo " SNOWFLAKE_DATABASE: $SNOWFLAKE_DATABASE"
|
||||
echo " SNOWFLAKE_SCHEMA: $SNOWFLAKE_SCHEMA"
|
||||
echo " SNOWFLAKE_WAREHOUSE: $SNOWFLAKE_WAREHOUSE"
|
||||
echo " SNOWFLAKE_ROLE: $SNOWFLAKE_ROLE"
|
||||
echo ""
|
||||
|
||||
echo "=== Testing Prebuilt Configuration ==="
|
||||
echo "This will attempt to initialize with the prebuilt Snowflake configuration:"
|
||||
echo "Command: ./toolbox --prebuilt snowflake --stdio"
|
||||
echo ""
|
||||
echo "Expected result: Connection failure due to test credentials (this is normal)"
|
||||
echo ""
|
||||
|
||||
# Test the prebuilt configuration (this will fail with test credentials, which is expected)
|
||||
timeout 5s ./toolbox --prebuilt snowflake --stdio 2>&1 | head -5
|
||||
|
||||
echo ""
|
||||
echo "=== Testing Custom Configuration ==="
|
||||
echo "This will attempt to initialize with the custom Snowflake configuration:"
|
||||
echo "Command: ./toolbox --tools-file docs/en/samples/snowflake/snowflake-config.yaml --stdio"
|
||||
echo ""
|
||||
echo "Expected result: Connection failure due to test credentials (this is normal)"
|
||||
echo ""
|
||||
|
||||
# Test the custom configuration (this will fail with test credentials, which is expected)
|
||||
timeout 5s ./toolbox --tools-file docs/en/samples/snowflake/snowflake-config.yaml --stdio 2>&1 | head -5
|
||||
|
||||
echo ""
|
||||
echo "=== Instructions for Real Usage ==="
|
||||
echo "1. Copy docs/en/samples/snowflake/snowflake-env.sh to your own file"
|
||||
echo "2. Edit it with your actual Snowflake credentials"
|
||||
echo "3. Source the file: source your-snowflake-env.sh"
|
||||
echo "4. Run: ./toolbox --prebuilt snowflake"
|
||||
echo ""
|
||||
echo "For more information, see docs/en/samples/snowflake"
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "mcp-toolbox-for-databases",
|
||||
"version": "0.24.0",
|
||||
"version": "0.25.0",
|
||||
"description": "MCP Toolbox for Databases is an open-source MCP server for more than 30 different datasources.",
|
||||
"contextFileName": "MCP-TOOLBOX-EXTENSION.md"
|
||||
}
|
||||
41
go.mod
41
go.mod
@@ -37,12 +37,14 @@ require (
|
||||
github.com/google/go-cmp v0.7.0
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/jackc/pgx/v5 v5.7.6
|
||||
github.com/jmoiron/sqlx v1.4.0
|
||||
github.com/looker-open-source/sdk-codegen/go v0.25.21
|
||||
github.com/microsoft/go-mssqldb v1.9.3
|
||||
github.com/nakagami/firebirdsql v0.9.15
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4
|
||||
github.com/redis/go-redis/v9 v9.17.2
|
||||
github.com/sijms/go-ora/v2 v2.9.0
|
||||
github.com/snowflakedb/gosnowflake v1.18.1
|
||||
github.com/spf13/cobra v1.10.1
|
||||
github.com/thlib/go-timezone-local v0.0.7
|
||||
github.com/trinodb/trino-go-client v0.330.0
|
||||
@@ -88,13 +90,40 @@ require (
|
||||
cloud.google.com/go/monitoring v1.24.3 // indirect
|
||||
cloud.google.com/go/trace v1.11.7 // indirect
|
||||
filippo.io/edwards25519 v1.1.0 // indirect
|
||||
github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect
|
||||
github.com/99designs/keyring v1.2.2 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 // indirect
|
||||
github.com/BurntSushi/toml v1.4.0 // indirect
|
||||
github.com/GoogleCloudPlatform/grpc-gcp-go/grpcgcp v1.5.3 // indirect
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0 // indirect
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.54.0 // indirect
|
||||
github.com/PuerkitoBio/goquery v1.10.3 // indirect
|
||||
github.com/VictoriaMetrics/easyproto v0.1.4 // indirect
|
||||
github.com/ajg/form v1.5.1 // indirect
|
||||
github.com/apache/arrow-go/v18 v18.4.0 // indirect
|
||||
github.com/apache/arrow/go/v15 v15.0.2 // indirect
|
||||
github.com/apache/thrift v0.22.0 // indirect
|
||||
github.com/aws/aws-sdk-go-v2 v1.39.0 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/config v1.31.8 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.18.12 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.15 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.8.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.88.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.29.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.34.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.38.4 // indirect
|
||||
github.com/aws/smithy-go v1.23.0 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 // indirect
|
||||
github.com/couchbase/gocbcore/v10 v10.8.1 // indirect
|
||||
@@ -102,8 +131,10 @@ require (
|
||||
github.com/couchbase/goprotostellar v1.0.2 // indirect
|
||||
github.com/couchbase/tools-common/errors v1.0.0 // indirect
|
||||
github.com/couchbaselabs/gocbconnstr/v2 v2.0.0 // indirect
|
||||
github.com/danieljoos/wincred v1.2.2 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/dvsekhvalnov/jose2go v1.7.0 // indirect
|
||||
github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect
|
||||
github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
@@ -115,7 +146,9 @@ require (
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect
|
||||
github.com/godror/knownpb v0.3.0 // indirect
|
||||
github.com/golang-jwt/jwt/v5 v5.3.0 // indirect
|
||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect
|
||||
github.com/golang-sql/sqlexp v0.1.0 // indirect
|
||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
|
||||
@@ -127,6 +160,7 @@ require (
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 // indirect
|
||||
github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect
|
||||
github.com/hashicorp/go-uuid v1.0.3 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
@@ -140,19 +174,25 @@ require (
|
||||
github.com/jcmturner/rpc/v2 v2.0.3 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0 // indirect
|
||||
github.com/klauspost/asmfmt v1.3.2 // indirect
|
||||
github.com/klauspost/compress v1.18.0 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.11 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 // indirect
|
||||
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/montanaflynn/stats v0.7.1 // indirect
|
||||
github.com/mtibben/percent v0.2.1 // indirect
|
||||
github.com/nakagami/chacha20 v0.1.0 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/pierrec/lz4 v2.6.1+incompatible // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
|
||||
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/sirupsen/logrus v1.9.3 // indirect
|
||||
github.com/spf13/pflag v1.0.9 // indirect
|
||||
github.com/spiffe/go-spiffe/v2 v2.5.0 // indirect
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
||||
@@ -182,6 +222,7 @@ require (
|
||||
golang.org/x/sync v0.18.0 // indirect
|
||||
golang.org/x/sys v0.38.0 // indirect
|
||||
golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8 // indirect
|
||||
golang.org/x/term v0.37.0 // indirect
|
||||
golang.org/x/text v0.31.0 // indirect
|
||||
golang.org/x/time v0.14.0 // indirect
|
||||
golang.org/x/tools v0.38.0 // indirect
|
||||
|
||||
40
go.sum
40
go.sum
@@ -643,6 +643,10 @@ filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
|
||||
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
|
||||
gioui.org v0.0.0-20210308172011-57750fc8a0a6/go.mod h1:RSH6KIUZ0p2xy5zHDxgAM4zumjgTw83q2ge/PI+yyw8=
|
||||
git.sr.ht/~sbinet/gg v0.3.1/go.mod h1:KGYtlADtqsqANL9ueOFkWymvzUvLMQllU5Ixo+8v3pc=
|
||||
github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMbk2FiG/kXiLl8BRyzTWDw7gX/Hz7Dd5eDMs=
|
||||
github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4=
|
||||
github.com/99designs/keyring v1.2.2 h1:pZd3neh/EmUzWONb35LxQfvuY7kiSXAq3HQd97+XBn0=
|
||||
github.com/99designs/keyring v1.2.2/go.mod h1:wes/FrByc8j7lFOAGLGSNEg8f/PaI3cgTBqhFkHUrPk=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1 h1:B+blDbyVIG3WaikNxPnhPiJ1MThR03b3vKGtER95TP4=
|
||||
@@ -653,11 +657,15 @@ github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.3.1 h1:Wgf5rZb
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.3.1/go.mod h1:xxCBG/f/4Vbmh2XQJBsOmNdxWUY5j/s27jujKPbQf14=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.1.1 h1:bFWuoEKg+gImo7pvkiQEFAc8ocibADgXeiLAxWhWmkI=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.1.1/go.mod h1:Vih/3yc6yac2JzU4hzpaDupBJP0Flaia9rXXrU8xyww=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 h1:u/LLAOFgsMv7HmNL4Qufg58y+qElGOt5qv0z1mURkRY=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0/go.mod h1:2e8rMJtl2+2j+HXbTBwnyGpm5Nou7KhvSfxOq8JpTag=
|
||||
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c h1:udKWzYgxTojEKWjV8V+WSxDXJ4NFATAsZjh8iIbsQIg=
|
||||
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0=
|
||||
github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
|
||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||
github.com/ClickHouse/ch-go v0.68.0 h1:zd2VD8l2aVYnXFRyhTyKCrxvhSz1AaY4wBUXu/f0GiU=
|
||||
github.com/ClickHouse/ch-go v0.68.0/go.mod h1:C89Fsm7oyck9hr6rRo5gqqiVtaIY6AjdD0WFMyNRQ5s=
|
||||
@@ -701,6 +709,8 @@ github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUS
|
||||
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
|
||||
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
|
||||
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||
github.com/apache/arrow-go/v18 v18.4.0 h1:/RvkGqH517iY8bZKc4FD5/kkdwXJGjxf28JIXbJ/oB0=
|
||||
github.com/apache/arrow-go/v18 v18.4.0/go.mod h1:Aawvwhj8x2jURIzD9Moy72cF0FyJXOpkYpdmGRHcw14=
|
||||
github.com/apache/arrow/go/v10 v10.0.1/go.mod h1:YvhnlEePVnBS4+0z3fhPfUy7W1Ikj0Ih0vcRo/gZ1M0=
|
||||
github.com/apache/arrow/go/v11 v11.0.0/go.mod h1:Eg5OsL5H+e299f7u5ssuXsuHQVEGC4xei5aX110hRiI=
|
||||
github.com/apache/arrow/go/v15 v15.0.2 h1:60IliRbiyTWCWjERBCkO1W4Qun9svcYoZrSLcyOsMLE=
|
||||
@@ -708,6 +718,8 @@ github.com/apache/arrow/go/v15 v15.0.2/go.mod h1:DGXsR3ajT524njufqf95822i+KTh+ye
|
||||
github.com/apache/cassandra-gocql-driver/v2 v2.0.0 h1:Omnzb1Z/P90Dr2TbVNu54ICQL7TKVIIsJO231w484HU=
|
||||
github.com/apache/cassandra-gocql-driver/v2 v2.0.0/go.mod h1:QH/asJjB3mHvY6Dot6ZKMMpTcOrWJ8i9GhsvG1g0PK4=
|
||||
github.com/apache/thrift v0.16.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU=
|
||||
github.com/apache/thrift v0.22.0 h1:r7mTJdj51TMDe6RtcmNdQxgn9XcyfGDOzegMDRg47uc=
|
||||
github.com/apache/thrift v0.22.0/go.mod h1:1e7J/O1Ae6ZQMTYdy9xa3w9k+XHWPfRvdPyJeynQ+/g=
|
||||
github.com/aws/aws-sdk-go v1.55.8 h1:JRmEUbU52aJQZ2AjX4q4Wu7t4uZjOu71uyNmaWlUkJQ=
|
||||
github.com/aws/aws-sdk-go v1.55.8/go.mod h1:ZkViS9AqA6otK+JBBNH2++sx1sgxrPKcSzPPvQkUtXk=
|
||||
github.com/aws/aws-sdk-go-v2 v1.39.0 h1:xm5WV/2L4emMRmMjHFykqiA4M/ra0DJVSWUkDyBjbg4=
|
||||
@@ -720,6 +732,8 @@ github.com/aws/aws-sdk-go-v2/credentials v1.18.12 h1:zmc9e1q90wMn8wQbjryy8IwA6Q4
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.18.12/go.mod h1:3VzdRDR5u3sSJRI4kYcOSIBbeYsgtVk7dG5R/U6qLWY=
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.7 h1:Is2tPmieqGS2edBnmOJIbdvOA6Op+rRpaYR60iBAwXM=
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.7/go.mod h1:F1i5V5421EGci570yABvpIXgRIBPb5JM+lSkHF6Dq5w=
|
||||
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.15 h1:7Zwtt/lP3KNRkeZre7soMELMGNoBrutx8nobg1jKWmo=
|
||||
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.15/go.mod h1:436h2adoHb57yd+8W+gYPrrA9U/R/SuAuOO42Ushzhw=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.7 h1:UCxq0X9O3xrlENdKf1r9eRJoKz/b0AfGkpp3a7FPlhg=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.7/go.mod h1:rHRoJUNUASj5Z/0eqI4w32vKvC7atoWR0jC+IkmVH8k=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.7 h1:Y6DTZUn7ZUC4th9FMBbo8LVE+1fyq3ofw+tRwkUd3PY=
|
||||
@@ -804,6 +818,8 @@ github.com/couchbaselabs/gocbconnstr/v2 v2.0.0 h1:HU9DlAYYWR69jQnLN6cpg0fh0hxW/8
|
||||
github.com/couchbaselabs/gocbconnstr/v2 v2.0.0/go.mod h1:o7T431UOfFVHDNvMBUmUxpHnhivwv7BziUao/nMl81E=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/danieljoos/wincred v1.2.2 h1:774zMFJrqaeYCK2W57BgAem/MLi6mtSE47MB6BOJ0i0=
|
||||
github.com/danieljoos/wincred v1.2.2/go.mod h1:w7w4Utbrz8lqeMbDAK0lkNJUv5sAOkFi7nd/ogr0Uh8=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
|
||||
@@ -822,6 +838,8 @@ github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3
|
||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/dvsekhvalnov/jose2go v1.7.0 h1:bnQc8+GMnidJZA8zc6lLEAb4xNrIqHwO+9TzqvtQZPo=
|
||||
github.com/dvsekhvalnov/jose2go v1.7.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.8.0 h1:7k1Ua+qluFr6p1jfJjGDl97ssJS/P7cHNInzfxgBQAo=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.8.0/go.mod h1:YLHer5cj0csTzNFXoNQ8qhtGY1GTvSqPnKWKaqQE3Hk=
|
||||
github.com/elastic/go-elasticsearch/v9 v9.2.0 h1:COeL/g20+ixnUbffe4Wfbu88emrHjAq/LhVfmrjqRQs=
|
||||
@@ -905,6 +923,7 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn
|
||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||
github.com/go-playground/validator/v10 v10.28.0 h1:Q7ibns33JjyW48gHkuFT91qX48KG0ktULL6FgHdG688=
|
||||
github.com/go-playground/validator/v10 v10.28.0/go.mod h1:GoI6I1SjPBh9p7ykNE/yj3fFYbyDOpwMn5KXd+m2hUU=
|
||||
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
|
||||
github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo=
|
||||
github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
@@ -915,6 +934,8 @@ github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
|
||||
github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
|
||||
github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 h1:ZpnhV/YsD2/4cESfV5+Hoeu/iUR3ruzNvZ+yQfO03a0=
|
||||
github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4=
|
||||
github.com/godror/godror v0.49.6 h1:ts4ZGw8uLJ42e1D7aXmVuSrld0/lzUzmIUjuUuQOgGM=
|
||||
github.com/godror/godror v0.49.6/go.mod h1:kTMcxZzRw73RT5kn9v3JkBK4kHI6dqowHotqV72ebU8=
|
||||
github.com/godror/knownpb v0.3.0 h1:+caUdy8hTtl7X05aPl3tdL540TvCcaQA6woZQroLZMw=
|
||||
@@ -1066,6 +1087,8 @@ github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4Zs
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3/go.mod h1:o//XUCC/F+yRGJoPO/VU0GSB0f8Nhgmxx0VIRUvaC0w=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 h1:8Tjv8EJ+pM1xP8mK6egEbD1OgnVTyacbefKhmbLhIhU=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2/go.mod h1:pkJQ2tZHJ0aFOVEEot6oZmaVEZcRme73eIFmhiVuRWs=
|
||||
github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8ZofjG1Y75iExal34USq5p+wiN1tpie8IrU=
|
||||
github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0=
|
||||
github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8=
|
||||
github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
@@ -1110,6 +1133,8 @@ github.com/jcmturner/gokrb5/v8 v8.4.4 h1:x1Sv4HaTpepFkXbt2IkL29DXRf8sOfZXo8eRKh6
|
||||
github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs=
|
||||
github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY=
|
||||
github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc=
|
||||
github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o=
|
||||
github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||
@@ -1121,6 +1146,7 @@ github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALr
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
|
||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4=
|
||||
github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE=
|
||||
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||
github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU=
|
||||
@@ -1144,6 +1170,8 @@ github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0
|
||||
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
|
||||
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
|
||||
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||
github.com/looker-open-source/sdk-codegen/go v0.25.21 h1:nlZ1nz22SKluBNkzplrMHBPEVgJO3zVLF6aAws1rrRA=
|
||||
github.com/looker-open-source/sdk-codegen/go v0.25.21/go.mod h1:Br1ntSiruDJ/4nYNjpYyWyCbqJ7+GQceWbIgn0hYims=
|
||||
github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
|
||||
@@ -1156,9 +1184,13 @@ github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-sqlite3 v1.14.14/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
|
||||
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/microsoft/go-mssqldb v1.9.3 h1:hy4p+LDC8LIGvI3JATnLVmBOLMJbmn5X400mr5j0lPs=
|
||||
github.com/microsoft/go-mssqldb v1.9.3/go.mod h1:GBbW9ASTiDC+mpgWDGKdm3FnFLTUsLYN3iFL90lQ+PA=
|
||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs=
|
||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY=
|
||||
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8DFdX7uMikMLXX4oubIzJF4kv/wI=
|
||||
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE=
|
||||
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
|
||||
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
|
||||
@@ -1174,6 +1206,8 @@ github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjY
|
||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
||||
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
|
||||
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
|
||||
github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs=
|
||||
github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns=
|
||||
github.com/nakagami/chacha20 v0.1.0 h1:2fbf5KeVUw7oRpAe6/A7DqvBJLYYu0ka5WstFbnkEVo=
|
||||
github.com/nakagami/chacha20 v0.1.0/go.mod h1:xpoujepNFA7MvYLvX5xKHzlOHimDrLI9Ll8zfOJ0l2E=
|
||||
github.com/nakagami/firebirdsql v0.9.15 h1:Mf05jaFI8+kjy6sBstsAu76zOkJ44AGd6cpApWNrp/0=
|
||||
@@ -1182,6 +1216,7 @@ github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdh
|
||||
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4 h1:7toxehVcYkZbyxV4W3Ib9VcnyRBQPucF+VwNNmtSXi4=
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4/go.mod h1:Vff8OwT7QpLm7L2yYr85XNWe9Rbqlbeb9asNXJTHO4k=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/oklog/ulid/v2 v2.0.2 h1:r4fFzBm+bv0wNKNh5eXTwU7i85y5x+uwkxCUTNVQqLc=
|
||||
github.com/oklog/ulid/v2 v2.0.2/go.mod h1:mtBL0Qe/0HAx6/a4Z30qxVIAL1eQDweXq5lxOEiwQ68=
|
||||
github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8=
|
||||
@@ -1247,6 +1282,8 @@ github.com/sijms/go-ora/v2 v2.9.0/go.mod h1:QgFInVi3ZWyqAiJwzBQA+nbKYKH77tdp1PYo
|
||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
github.com/snowflakedb/gosnowflake v1.18.1 h1:Nb4AWSnSBWe1UKpKTwCZxjYhYo1JH7GgKhO3wW1kR10=
|
||||
github.com/snowflakedb/gosnowflake v1.18.1/go.mod h1:7D4+cLepOWrerVsH+tevW3zdMJ5/WrEN7ZceAC6xBv0=
|
||||
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||
github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4=
|
||||
github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I=
|
||||
@@ -1650,10 +1687,12 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20220610221304-9f5ed59c137d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220624220833-87e55d714810/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220829200755-d48e67d00261/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
@@ -2075,6 +2114,7 @@ google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aO
|
||||
google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||
|
||||
@@ -14,7 +14,11 @@
|
||||
|
||||
package embeddingmodels
|
||||
|
||||
import "context"
|
||||
import (
|
||||
"context"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// EmbeddingModelConfig is the interface for configuring embedding models.
|
||||
type EmbeddingModelConfig interface {
|
||||
@@ -27,3 +31,29 @@ type EmbeddingModel interface {
|
||||
ToConfig() EmbeddingModelConfig
|
||||
EmbedParameters(context.Context, []string) ([][]float32, error)
|
||||
}
|
||||
|
||||
type VectorFormatter func(vectorFloats []float32) any
|
||||
|
||||
// FormatVectorForPgvector converts a slice of floats into a PostgreSQL vector literal string: '[x, y, z]'
|
||||
func FormatVectorForPgvector(vectorFloats []float32) any {
|
||||
if len(vectorFloats) == 0 {
|
||||
return "[]"
|
||||
}
|
||||
|
||||
// Pre-allocate the builder.
|
||||
var b strings.Builder
|
||||
b.Grow(len(vectorFloats) * 10)
|
||||
|
||||
b.WriteByte('[')
|
||||
for i, f := range vectorFloats {
|
||||
if i > 0 {
|
||||
b.WriteString(", ")
|
||||
}
|
||||
b.Write(strconv.AppendFloat(nil, float64(f), 'g', -1, 32))
|
||||
}
|
||||
b.WriteByte(']')
|
||||
|
||||
return b.String()
|
||||
}
|
||||
|
||||
var _ VectorFormatter = FormatVectorForPgvector
|
||||
|
||||
@@ -49,6 +49,7 @@ var expectedToolSources = []string{
|
||||
"postgres",
|
||||
"serverless-spark",
|
||||
"singlestore",
|
||||
"snowflake",
|
||||
"spanner-postgres",
|
||||
"spanner",
|
||||
"sqlite",
|
||||
@@ -127,9 +128,9 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
sqlite_config, _ := Get("sqlite")
|
||||
neo4jconfig, _ := Get("neo4j")
|
||||
healthcare_config, _ := Get("cloud-healthcare")
|
||||
|
||||
snowflake_config, _ := Get("snowflake")
|
||||
if len(alloydb_admin_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch alloydb prebuilt tools yaml")
|
||||
t.Fatalf("unexpected error: could not fetch alloydb admin prebuilt tools yaml")
|
||||
}
|
||||
if len(alloydb_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch alloydb prebuilt tools yaml")
|
||||
@@ -197,6 +198,9 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
if len(singlestore_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch singlestore prebuilt tools yaml")
|
||||
}
|
||||
if len(snowflake_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch snowflake prebuilt tools yaml")
|
||||
}
|
||||
if len(spanner_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch spanner prebuilt tools yaml")
|
||||
}
|
||||
@@ -218,6 +222,9 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
if len(healthcare_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch healthcare prebuilt tools yaml")
|
||||
}
|
||||
if len(snowflake_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch snowflake prebuilt tools yaml")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFailGetPrebuiltTool(t *testing.T) {
|
||||
|
||||
@@ -224,6 +224,10 @@ tools:
|
||||
kind: postgres-list-roles
|
||||
source: alloydb-pg-source
|
||||
|
||||
list_stored_procedure:
|
||||
kind: postgres-list-stored-procedure
|
||||
source: alloydb-pg-source
|
||||
|
||||
toolsets:
|
||||
alloydb_postgres_database_tools:
|
||||
- execute_sql
|
||||
@@ -254,3 +258,4 @@ toolsets:
|
||||
- list_database_stats
|
||||
- list_roles
|
||||
- list_table_stats
|
||||
- list_stored_procedure
|
||||
|
||||
@@ -18,6 +18,7 @@ sources:
|
||||
project: ${BIGQUERY_PROJECT}
|
||||
location: ${BIGQUERY_LOCATION:}
|
||||
useClientOAuth: ${BIGQUERY_USE_CLIENT_OAUTH:false}
|
||||
scopes: ${BIGQUERY_SCOPES:}
|
||||
|
||||
tools:
|
||||
analyze_contribution:
|
||||
|
||||
@@ -226,6 +226,10 @@ tools:
|
||||
kind: postgres-list-roles
|
||||
source: cloudsql-pg-source
|
||||
|
||||
list_stored_procedure:
|
||||
kind: postgres-list-stored-procedure
|
||||
source: cloudsql-pg-source
|
||||
|
||||
toolsets:
|
||||
cloud_sql_postgres_database_tools:
|
||||
- execute_sql
|
||||
@@ -256,3 +260,4 @@ toolsets:
|
||||
- list_database_stats
|
||||
- list_roles
|
||||
- list_table_stats
|
||||
- list_stored_procedure
|
||||
|
||||
@@ -225,6 +225,10 @@ tools:
|
||||
kind: postgres-list-roles
|
||||
source: postgresql-source
|
||||
|
||||
list_stored_procedure:
|
||||
kind: postgres-list-stored-procedure
|
||||
source: postgresql-source
|
||||
|
||||
toolsets:
|
||||
postgres_database_tools:
|
||||
- execute_sql
|
||||
@@ -255,3 +259,4 @@ toolsets:
|
||||
- list_database_stats
|
||||
- list_roles
|
||||
- list_table_stats
|
||||
- list_stored_procedure
|
||||
|
||||
142
internal/prebuiltconfigs/tools/snowflake.yaml
Normal file
142
internal/prebuiltconfigs/tools/snowflake.yaml
Normal file
@@ -0,0 +1,142 @@
|
||||
# Copyright 2026 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
sources:
|
||||
snowflake-source:
|
||||
kind: snowflake
|
||||
account: ${SNOWFLAKE_ACCOUNT}
|
||||
user: ${SNOWFLAKE_USER}
|
||||
password: ${SNOWFLAKE_PASSWORD}
|
||||
database: ${SNOWFLAKE_DATABASE}
|
||||
schema: ${SNOWFLAKE_SCHEMA}
|
||||
warehouse: ${SNOWFLAKE_WAREHOUSE}
|
||||
role: ${SNOWFLAKE_ROLE}
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: snowflake-execute-sql
|
||||
source: snowflake-source
|
||||
description: Use this tool to execute SQL.
|
||||
|
||||
list_tables:
|
||||
kind: snowflake-sql
|
||||
source: snowflake-source
|
||||
description: "Lists detailed schema information (object type, columns, constraints, indexes, owner, comment) as JSON for user-created tables. Filters by a comma-separated list of names. If names are omitted, lists all tables in the specified database and schema."
|
||||
statement: |
|
||||
WITH
|
||||
input_param AS (
|
||||
SELECT ? AS param -- Single bind variable here
|
||||
)
|
||||
,
|
||||
all_tables_mode AS (
|
||||
SELECT COALESCE(TRIM(param), '') = '' AS is_all_tables
|
||||
FROM input_param
|
||||
) --SELECT * FROM all_tables_mode;
|
||||
,
|
||||
filtered_table_names AS (
|
||||
SELECT DISTINCT TRIM(LOWER(value)) AS table_name
|
||||
FROM input_param, all_tables_mode, TABLE(SPLIT_TO_TABLE(param, ','))
|
||||
WHERE NOT is_all_tables
|
||||
) -- SELECT * FROM filtered_table_names;
|
||||
,
|
||||
table_info AS (
|
||||
SELECT
|
||||
t.TABLE_CATALOG,
|
||||
t.TABLE_SCHEMA,
|
||||
t.TABLE_NAME,
|
||||
t.TABLE_TYPE,
|
||||
t.TABLE_OWNER,
|
||||
t.COMMENT
|
||||
FROM
|
||||
all_tables_mode
|
||||
CROSS JOIN ${SNOWFLAKE_DATABASE}.INFORMATION_SCHEMA.TABLES T
|
||||
WHERE
|
||||
t.TABLE_TYPE = 'BASE TABLE'
|
||||
AND t.TABLE_SCHEMA NOT IN ('INFORMATION_SCHEMA')
|
||||
AND t.TABLE_SCHEMA = '${SNOWFLAKE_SCHEMA}'
|
||||
AND is_all_tables OR LOWER(T.TABLE_NAME) IN (SELECT table_name FROM filtered_table_names)
|
||||
) -- SELECT * FROM table_info;
|
||||
,
|
||||
columns_info AS (
|
||||
SELECT
|
||||
c.TABLE_CATALOG AS database_name,
|
||||
c.TABLE_SCHEMA AS schema_name,
|
||||
c.TABLE_NAME AS table_name,
|
||||
c.COLUMN_NAME AS column_name,
|
||||
c.DATA_TYPE AS data_type,
|
||||
c.ORDINAL_POSITION AS column_ordinal_position,
|
||||
c.IS_NULLABLE AS is_nullable,
|
||||
c.COLUMN_DEFAULT AS column_default,
|
||||
c.COMMENT AS column_comment
|
||||
FROM
|
||||
${SNOWFLAKE_DATABASE}.INFORMATION_SCHEMA.COLUMNS c
|
||||
INNER JOIN table_info USING (TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME)
|
||||
)
|
||||
,
|
||||
constraints_info AS (
|
||||
SELECT
|
||||
tc.TABLE_CATALOG AS database_name,
|
||||
tc.TABLE_SCHEMA AS schema_name,
|
||||
tc.TABLE_NAME AS table_name,
|
||||
tc.CONSTRAINT_NAME AS constraint_name,
|
||||
tc.CONSTRAINT_TYPE AS constraint_type
|
||||
FROM
|
||||
${SNOWFLAKE_DATABASE}.INFORMATION_SCHEMA.TABLE_CONSTRAINTS tc
|
||||
INNER JOIN table_info USING (TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME)
|
||||
GROUP BY
|
||||
tc.TABLE_CATALOG, tc.TABLE_SCHEMA, tc.TABLE_NAME, tc.CONSTRAINT_NAME, tc.CONSTRAINT_TYPE
|
||||
)
|
||||
SELECT
|
||||
ti.TABLE_SCHEMA AS schema_name,
|
||||
ti.TABLE_NAME AS object_name,
|
||||
OBJECT_CONSTRUCT(
|
||||
'schema_name', ti.TABLE_SCHEMA,
|
||||
'object_name', ti.TABLE_NAME,
|
||||
'object_type', ti.TABLE_TYPE,
|
||||
'owner', ti.TABLE_OWNER,
|
||||
'comment', ti.COMMENT,
|
||||
'columns', COALESCE(
|
||||
(SELECT ARRAY_AGG(
|
||||
OBJECT_CONSTRUCT(
|
||||
'column_name', ci.column_name,
|
||||
'data_type', ci.data_type,
|
||||
'ordinal_position', ci.column_ordinal_position,
|
||||
'is_nullable', ci.is_nullable,
|
||||
'column_default', ci.column_default,
|
||||
'column_comment', ci.column_comment
|
||||
)
|
||||
) FROM columns_info ci WHERE ci.table_name = ti.TABLE_NAME AND ci.schema_name = ti.TABLE_SCHEMA),
|
||||
ARRAY_CONSTRUCT()
|
||||
),
|
||||
'constraints', COALESCE(
|
||||
(SELECT ARRAY_AGG(
|
||||
OBJECT_CONSTRUCT(
|
||||
'constraint_name', cons.constraint_name,
|
||||
'constraint_type', cons.constraint_type
|
||||
)
|
||||
) FROM constraints_info cons WHERE cons.table_name = ti.TABLE_NAME AND cons.schema_name = ti.TABLE_SCHEMA),
|
||||
ARRAY_CONSTRUCT()
|
||||
)
|
||||
) AS object_details
|
||||
FROM table_info ti
|
||||
ORDER BY ti.TABLE_SCHEMA, ti.TABLE_NAME;
|
||||
parameters:
|
||||
- name: table_names
|
||||
type: string
|
||||
description: "Optional: A comma-separated list of table names. If empty, details for all tables in the specified database and schema will be listed."
|
||||
|
||||
toolsets:
|
||||
snowflake_tools:
|
||||
- execute_sql
|
||||
- list_tables
|
||||
@@ -246,6 +246,14 @@ func toolInvokeHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
s.logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
||||
|
||||
params, err = tool.EmbedParams(ctx, params, s.ResourceMgr.GetEmbeddingModelMap())
|
||||
if err != nil {
|
||||
err = fmt.Errorf("error embedding parameters: %w", err)
|
||||
s.logger.DebugContext(ctx, err.Error())
|
||||
_ = render.Render(w, r, newErrResponse(err, http.StatusBadRequest))
|
||||
return
|
||||
}
|
||||
|
||||
res, err := tool.Invoke(ctx, s.ResourceMgr, params, accessToken)
|
||||
|
||||
// Determine what error to return to the users.
|
||||
|
||||
@@ -24,6 +24,7 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/log"
|
||||
"github.com/googleapis/genai-toolbox/internal/prompts"
|
||||
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
||||
@@ -64,6 +65,10 @@ func (t MockTool) ParseParams(data map[string]any, claimsMap map[string]map[stri
|
||||
return parameters.ParseParams(t.Params, data, claimsMap)
|
||||
}
|
||||
|
||||
func (t MockTool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.Params, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
func (t MockTool) Manifest() tools.Manifest {
|
||||
pMs := make([]parameters.ParameterManifest, 0, len(t.Params))
|
||||
for _, p := range t.Params {
|
||||
|
||||
@@ -68,6 +68,8 @@ type ServerConfig struct {
|
||||
UI bool
|
||||
// Specifies a list of origins permitted to access this server.
|
||||
AllowedOrigins []string
|
||||
// Specifies a list of hosts permitted to access this server
|
||||
AllowedHosts []string
|
||||
}
|
||||
|
||||
type logFormat string
|
||||
|
||||
@@ -300,6 +300,21 @@ func InitializeConfigs(ctx context.Context, cfg ServerConfig) (
|
||||
return sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, nil
|
||||
}
|
||||
|
||||
func hostCheck(allowedHosts map[string]struct{}) func(http.Handler) http.Handler {
|
||||
return func(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
_, hasWildcard := allowedHosts["*"]
|
||||
_, hostIsAllowed := allowedHosts[r.Host]
|
||||
if !hasWildcard && !hostIsAllowed {
|
||||
// Return 400 Bad Request or 403 Forbidden to block the attack
|
||||
http.Error(w, "Invalid Host header", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// NewServer returns a Server object based on provided Config.
|
||||
func NewServer(ctx context.Context, cfg ServerConfig) (*Server, error) {
|
||||
instrumentation, err := util.InstrumentationFromContext(ctx)
|
||||
@@ -374,7 +389,7 @@ func NewServer(ctx context.Context, cfg ServerConfig) (*Server, error) {
|
||||
|
||||
// cors
|
||||
if slices.Contains(cfg.AllowedOrigins, "*") {
|
||||
s.logger.WarnContext(ctx, "wildcard (`*`) allows all origin to access the resource and is not secure. Use it with cautious for public, non-sensitive data, or during local development. Recommended to use `--allowed-origins` flag to prevent DNS rebinding attacks")
|
||||
s.logger.WarnContext(ctx, "wildcard (`*`) allows all origin to access the resource and is not secure. Use it with cautious for public, non-sensitive data, or during local development. Recommended to use `--allowed-origins` flag")
|
||||
}
|
||||
corsOpts := cors.Options{
|
||||
AllowedOrigins: cfg.AllowedOrigins,
|
||||
@@ -385,6 +400,15 @@ func NewServer(ctx context.Context, cfg ServerConfig) (*Server, error) {
|
||||
MaxAge: 300, // cache preflight results for 5 minutes
|
||||
}
|
||||
r.Use(cors.Handler(corsOpts))
|
||||
// validate hosts for DNS rebinding attacks
|
||||
if slices.Contains(cfg.AllowedHosts, "*") {
|
||||
s.logger.WarnContext(ctx, "wildcard (`*`) allows all hosts to access the resource and is not secure. Use it with cautious for public, non-sensitive data, or during local development. Recommended to use `--allowed-hosts` flag to prevent DNS rebinding attacks")
|
||||
}
|
||||
allowedHostsMap := make(map[string]struct{}, len(cfg.AllowedHosts))
|
||||
for _, h := range cfg.AllowedHosts {
|
||||
allowedHostsMap[h] = struct{}{}
|
||||
}
|
||||
r.Use(hostCheck(allowedHostsMap))
|
||||
|
||||
// control plane
|
||||
apiR, err := apiRouter(s)
|
||||
|
||||
@@ -43,9 +43,10 @@ func TestServe(t *testing.T) {
|
||||
|
||||
addr, port := "127.0.0.1", 5000
|
||||
cfg := server.ServerConfig{
|
||||
Version: "0.0.0",
|
||||
Address: addr,
|
||||
Port: port,
|
||||
Version: "0.0.0",
|
||||
Address: addr,
|
||||
Port: port,
|
||||
AllowedHosts: []string{"*"},
|
||||
}
|
||||
|
||||
otelShutdown, err := telemetry.SetupOTel(ctx, "0.0.0", "", false, "toolbox")
|
||||
|
||||
@@ -43,6 +43,9 @@ import (
|
||||
|
||||
const SourceKind string = "bigquery"
|
||||
|
||||
// CloudPlatformScope is a broad scope for Google Cloud Platform services.
|
||||
const CloudPlatformScope = "https://www.googleapis.com/auth/cloud-platform"
|
||||
|
||||
const (
|
||||
// No write operations are allowed.
|
||||
WriteModeBlocked string = "blocked"
|
||||
@@ -77,14 +80,42 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources
|
||||
|
||||
type Config struct {
|
||||
// BigQuery configs
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Project string `yaml:"project" validate:"required"`
|
||||
Location string `yaml:"location"`
|
||||
WriteMode string `yaml:"writeMode"`
|
||||
AllowedDatasets []string `yaml:"allowedDatasets"`
|
||||
UseClientOAuth bool `yaml:"useClientOAuth"`
|
||||
ImpersonateServiceAccount string `yaml:"impersonateServiceAccount"`
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Project string `yaml:"project" validate:"required"`
|
||||
Location string `yaml:"location"`
|
||||
WriteMode string `yaml:"writeMode"`
|
||||
AllowedDatasets StringOrStringSlice `yaml:"allowedDatasets"`
|
||||
UseClientOAuth bool `yaml:"useClientOAuth"`
|
||||
ImpersonateServiceAccount string `yaml:"impersonateServiceAccount"`
|
||||
Scopes StringOrStringSlice `yaml:"scopes"`
|
||||
}
|
||||
|
||||
// StringOrStringSlice is a custom type that can unmarshal both a single string
|
||||
// (which it splits by comma) and a sequence of strings into a string slice.
|
||||
type StringOrStringSlice []string
|
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||
func (s *StringOrStringSlice) UnmarshalYAML(unmarshal func(any) error) error {
|
||||
var v any
|
||||
if err := unmarshal(&v); err != nil {
|
||||
return err
|
||||
}
|
||||
switch val := v.(type) {
|
||||
case string:
|
||||
*s = strings.Split(val, ",")
|
||||
return nil
|
||||
case []any:
|
||||
for _, item := range val {
|
||||
if str, ok := item.(string); ok {
|
||||
*s = append(*s, str)
|
||||
} else {
|
||||
return fmt.Errorf("element in sequence is not a string: %v", item)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return fmt.Errorf("cannot unmarshal %T into StringOrStringSlice", v)
|
||||
}
|
||||
|
||||
func (r Config) SourceConfigKind() string {
|
||||
@@ -133,7 +164,7 @@ func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.So
|
||||
|
||||
} else {
|
||||
// Initializes a BigQuery Google SQL source
|
||||
client, restService, tokenSource, err = initBigQueryConnection(ctx, tracer, r.Name, r.Project, r.Location, r.ImpersonateServiceAccount)
|
||||
client, restService, tokenSource, err = initBigQueryConnection(ctx, tracer, r.Name, r.Project, r.Location, r.ImpersonateServiceAccount, r.Scopes)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating client from ADC: %w", err)
|
||||
}
|
||||
@@ -396,19 +427,26 @@ func (s *Source) BigQueryTokenSource() oauth2.TokenSource {
|
||||
return s.TokenSource
|
||||
}
|
||||
|
||||
func (s *Source) BigQueryTokenSourceWithScope(ctx context.Context, scope string) (oauth2.TokenSource, error) {
|
||||
func (s *Source) BigQueryTokenSourceWithScope(ctx context.Context, scopes []string) (oauth2.TokenSource, error) {
|
||||
if len(scopes) == 0 {
|
||||
scopes = s.Scopes
|
||||
if len(scopes) == 0 {
|
||||
scopes = []string{CloudPlatformScope}
|
||||
}
|
||||
}
|
||||
|
||||
if s.ImpersonateServiceAccount != "" {
|
||||
// Create impersonated credentials token source with the requested scope
|
||||
// Create impersonated credentials token source with the requested scopes
|
||||
ts, err := impersonate.CredentialsTokenSource(ctx, impersonate.CredentialsConfig{
|
||||
TargetPrincipal: s.ImpersonateServiceAccount,
|
||||
Scopes: []string{scope},
|
||||
Scopes: scopes,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create impersonated credentials for %q with scope %q: %w", s.ImpersonateServiceAccount, scope, err)
|
||||
return nil, fmt.Errorf("failed to create impersonated credentials for %q with scopes %v: %w", s.ImpersonateServiceAccount, scopes, err)
|
||||
}
|
||||
return ts, nil
|
||||
}
|
||||
return google.DefaultTokenSource(ctx, scope)
|
||||
return google.DefaultTokenSource(ctx, scopes...)
|
||||
}
|
||||
|
||||
func (s *Source) GetMaxQueryResultRows() int {
|
||||
@@ -454,7 +492,7 @@ func (s *Source) lazyInitDataplexClient(ctx context.Context, tracer trace.Tracer
|
||||
|
||||
return func() (*dataplexapi.CatalogClient, DataplexClientCreator, error) {
|
||||
once.Do(func() {
|
||||
c, cc, e := initDataplexConnection(ctx, tracer, s.Name, s.Project, s.UseClientOAuth, s.ImpersonateServiceAccount)
|
||||
c, cc, e := initDataplexConnection(ctx, tracer, s.Name, s.Project, s.UseClientOAuth, s.ImpersonateServiceAccount, s.Scopes)
|
||||
if e != nil {
|
||||
err = fmt.Errorf("failed to initialize dataplex client: %w", e)
|
||||
return
|
||||
@@ -620,6 +658,7 @@ func initBigQueryConnection(
|
||||
project string,
|
||||
location string,
|
||||
impersonateServiceAccount string,
|
||||
scopes []string,
|
||||
) (*bigqueryapi.Client, *bigqueryrestapi.Service, oauth2.TokenSource, error) {
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
defer span.End()
|
||||
@@ -632,12 +671,21 @@ func initBigQueryConnection(
|
||||
var tokenSource oauth2.TokenSource
|
||||
var opts []option.ClientOption
|
||||
|
||||
var credScopes []string
|
||||
if len(scopes) > 0 {
|
||||
credScopes = scopes
|
||||
} else if impersonateServiceAccount != "" {
|
||||
credScopes = []string{CloudPlatformScope}
|
||||
} else {
|
||||
credScopes = []string{bigqueryapi.Scope}
|
||||
}
|
||||
|
||||
if impersonateServiceAccount != "" {
|
||||
// Create impersonated credentials token source with cloud-platform scope
|
||||
// Create impersonated credentials token source
|
||||
// This broader scope is needed for tools like conversational analytics
|
||||
cloudPlatformTokenSource, err := impersonate.CredentialsTokenSource(ctx, impersonate.CredentialsConfig{
|
||||
TargetPrincipal: impersonateServiceAccount,
|
||||
Scopes: []string{"https://www.googleapis.com/auth/cloud-platform"},
|
||||
Scopes: credScopes,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, nil, nil, fmt.Errorf("failed to create impersonated credentials for %q: %w", impersonateServiceAccount, err)
|
||||
@@ -649,9 +697,9 @@ func initBigQueryConnection(
|
||||
}
|
||||
} else {
|
||||
// Use default credentials
|
||||
cred, err := google.FindDefaultCredentials(ctx, bigqueryapi.Scope)
|
||||
cred, err := google.FindDefaultCredentials(ctx, credScopes...)
|
||||
if err != nil {
|
||||
return nil, nil, nil, fmt.Errorf("failed to find default Google Cloud credentials with scope %q: %w", bigqueryapi.Scope, err)
|
||||
return nil, nil, nil, fmt.Errorf("failed to find default Google Cloud credentials with scopes %v: %w", credScopes, err)
|
||||
}
|
||||
tokenSource = cred.TokenSource
|
||||
opts = []option.ClientOption{
|
||||
@@ -742,6 +790,7 @@ func initDataplexConnection(
|
||||
project string,
|
||||
useClientOAuth bool,
|
||||
impersonateServiceAccount string,
|
||||
scopes []string,
|
||||
) (*dataplexapi.CatalogClient, DataplexClientCreator, error) {
|
||||
var client *dataplexapi.CatalogClient
|
||||
var clientCreator DataplexClientCreator
|
||||
@@ -760,11 +809,16 @@ func initDataplexConnection(
|
||||
} else {
|
||||
var opts []option.ClientOption
|
||||
|
||||
credScopes := scopes
|
||||
if len(credScopes) == 0 {
|
||||
credScopes = []string{CloudPlatformScope}
|
||||
}
|
||||
|
||||
if impersonateServiceAccount != "" {
|
||||
// Create impersonated credentials token source
|
||||
ts, err := impersonate.CredentialsTokenSource(ctx, impersonate.CredentialsConfig{
|
||||
TargetPrincipal: impersonateServiceAccount,
|
||||
Scopes: []string{"https://www.googleapis.com/auth/cloud-platform"},
|
||||
Scopes: credScopes,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to create impersonated credentials for %q: %w", impersonateServiceAccount, err)
|
||||
@@ -775,7 +829,7 @@ func initDataplexConnection(
|
||||
}
|
||||
} else {
|
||||
// Use default credentials
|
||||
cred, err := google.FindDefaultCredentials(ctx)
|
||||
cred, err := google.FindDefaultCredentials(ctx, credScopes...)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to find default Google Cloud credentials: %w", err)
|
||||
}
|
||||
|
||||
@@ -132,6 +132,28 @@ func TestParseFromYamlBigQuery(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with custom scopes example",
|
||||
in: `
|
||||
sources:
|
||||
my-instance:
|
||||
kind: bigquery
|
||||
project: my-project
|
||||
location: us
|
||||
scopes:
|
||||
- https://www.googleapis.com/auth/bigquery
|
||||
- https://www.googleapis.com/auth/cloud-platform
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-instance": bigquery.Config{
|
||||
Name: "my-instance",
|
||||
Kind: bigquery.SourceKind,
|
||||
Project: "my-project",
|
||||
Location: "us",
|
||||
Scopes: []string{"https://www.googleapis.com/auth/bigquery", "https://www.googleapis.com/auth/cloud-platform"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
|
||||
159
internal/sources/snowflake/snowflake.go
Normal file
159
internal/sources/snowflake/snowflake.go
Normal file
@@ -0,0 +1,159 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package snowflake
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/jmoiron/sqlx"
|
||||
_ "github.com/snowflakedb/gosnowflake"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
const SourceKind string = "snowflake"
|
||||
|
||||
// validate interface
|
||||
var _ sources.SourceConfig = Config{}
|
||||
|
||||
func init() {
|
||||
if !sources.Register(SourceKind, newConfig) {
|
||||
panic(fmt.Sprintf("source kind %q already registered", SourceKind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources.SourceConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Account string `yaml:"account" validate:"required"`
|
||||
User string `yaml:"user" validate:"required"`
|
||||
Password string `yaml:"password" validate:"required"`
|
||||
Database string `yaml:"database" validate:"required"`
|
||||
Schema string `yaml:"schema" validate:"required"`
|
||||
Warehouse string `yaml:"warehouse"`
|
||||
Role string `yaml:"role"`
|
||||
}
|
||||
|
||||
func (r Config) SourceConfigKind() string {
|
||||
return SourceKind
|
||||
}
|
||||
|
||||
func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
|
||||
db, err := initSnowflakeConnection(ctx, tracer, r.Name, r.Account, r.User, r.Password, r.Database, r.Schema, r.Warehouse, r.Role)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to create connection: %w", err)
|
||||
}
|
||||
|
||||
err = db.PingContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to connect successfully: %w", err)
|
||||
}
|
||||
|
||||
s := &Source{
|
||||
Config: r,
|
||||
DB: db,
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
var _ sources.Source = &Source{}
|
||||
|
||||
type Source struct {
|
||||
Config
|
||||
DB *sqlx.DB
|
||||
}
|
||||
|
||||
func (s *Source) SourceKind() string {
|
||||
return SourceKind
|
||||
}
|
||||
|
||||
func (s *Source) ToConfig() sources.SourceConfig {
|
||||
return s.Config
|
||||
}
|
||||
|
||||
func (s *Source) SnowflakeDB() *sqlx.DB {
|
||||
return s.DB
|
||||
}
|
||||
|
||||
func (s *Source) RunSQL(ctx context.Context, statement string, params []any) (any, error) {
|
||||
rows, err := s.DB.QueryxContext(ctx, statement, params...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to execute query: %w", err)
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var out []any
|
||||
for rows.Next() {
|
||||
cols, err := rows.Columns()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get columns: %w", err)
|
||||
}
|
||||
|
||||
values := make([]interface{}, len(cols))
|
||||
valuePtrs := make([]interface{}, len(cols))
|
||||
for i := range values {
|
||||
valuePtrs[i] = &values[i]
|
||||
}
|
||||
|
||||
if err := rows.Scan(valuePtrs...); err != nil {
|
||||
return nil, fmt.Errorf("unable to scan row: %w", err)
|
||||
}
|
||||
|
||||
vMap := make(map[string]any)
|
||||
for i, col := range cols {
|
||||
vMap[col] = values[i]
|
||||
}
|
||||
out = append(out, vMap)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, fmt.Errorf("row iteration error: %w", err)
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func initSnowflakeConnection(ctx context.Context, tracer trace.Tracer, name, account, user, password, database, schema, warehouse, role string) (*sqlx.DB, error) {
|
||||
//nolint:all // Reassigned ctx
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
defer span.End()
|
||||
|
||||
// Set defaults for optional parameters
|
||||
if warehouse == "" {
|
||||
warehouse = "COMPUTE_WH"
|
||||
}
|
||||
if role == "" {
|
||||
role = "ACCOUNTADMIN"
|
||||
}
|
||||
|
||||
// Snowflake DSN format: user:password@account/database/schema?warehouse=warehouse&role=role
|
||||
dsn := fmt.Sprintf("%s:%s@%s/%s/%s?warehouse=%s&role=%s", user, password, account, database, schema, warehouse, role)
|
||||
db, err := sqlx.ConnectContext(ctx, "snowflake", dsn)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to create connection: %w", err)
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
129
internal/sources/snowflake/snowflake_test.go
Normal file
129
internal/sources/snowflake/snowflake_test.go
Normal file
@@ -0,0 +1,129 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package snowflake_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources/snowflake"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
)
|
||||
|
||||
func TestParseFromYamlSnowflake(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.SourceConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
sources:
|
||||
my-snowflake-instance:
|
||||
kind: snowflake
|
||||
account: my-account
|
||||
user: my_user
|
||||
password: my_pass
|
||||
database: my_db
|
||||
schema: my_schema
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-snowflake-instance": snowflake.Config{
|
||||
Name: "my-snowflake-instance",
|
||||
Kind: snowflake.SourceKind,
|
||||
Account: "my-account",
|
||||
User: "my_user",
|
||||
Password: "my_pass",
|
||||
Database: "my_db",
|
||||
Schema: "my_schema",
|
||||
Warehouse: "",
|
||||
Role: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Sources server.SourceConfigs `yaml:"sources"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if !cmp.Equal(tc.want, got.Sources) {
|
||||
t.Fatalf("incorrect parse: want %v, got %v", tc.want, got.Sources)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestFailParseFromYaml(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
err string
|
||||
}{
|
||||
{
|
||||
desc: "extra field",
|
||||
in: `
|
||||
sources:
|
||||
my-snowflake-instance:
|
||||
kind: snowflake
|
||||
account: my-account
|
||||
user: my_user
|
||||
password: my_pass
|
||||
database: my_db
|
||||
schema: my_schema
|
||||
foo: bar
|
||||
`,
|
||||
err: "unable to parse source \"my-snowflake-instance\" as \"snowflake\": [3:1] unknown field \"foo\"\n 1 | account: my-account\n 2 | database: my_db\n> 3 | foo: bar\n ^\n 4 | kind: snowflake\n 5 | password: my_pass\n 6 | schema: my_schema\n 7 | ",
|
||||
},
|
||||
{
|
||||
desc: "missing required field",
|
||||
in: `
|
||||
sources:
|
||||
my-snowflake-instance:
|
||||
kind: snowflake
|
||||
account: my-account
|
||||
user: my_user
|
||||
password: my_pass
|
||||
database: my_db
|
||||
`,
|
||||
err: "unable to parse source \"my-snowflake-instance\" as \"snowflake\": Key: 'Config.Schema' Error:Field validation for 'Schema' failed on the 'required' tag",
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Sources server.SourceConfigs `yaml:"sources"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
|
||||
if err == nil {
|
||||
t.Fatalf("expect parsing to fail")
|
||||
}
|
||||
errStr := err.Error()
|
||||
if errStr != tc.err {
|
||||
t.Fatalf("unexpected error: got %q, want %q", errStr, tc.err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -16,14 +16,17 @@ package trino
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"time"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
_ "github.com/trinodb/trino-go-client/trino"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
trinogo "github.com/trinodb/trino-go-client/trino"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
@@ -47,18 +50,21 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Host string `yaml:"host" validate:"required"`
|
||||
Port string `yaml:"port" validate:"required"`
|
||||
User string `yaml:"user"`
|
||||
Password string `yaml:"password"`
|
||||
Catalog string `yaml:"catalog" validate:"required"`
|
||||
Schema string `yaml:"schema" validate:"required"`
|
||||
QueryTimeout string `yaml:"queryTimeout"`
|
||||
AccessToken string `yaml:"accessToken"`
|
||||
KerberosEnabled bool `yaml:"kerberosEnabled"`
|
||||
SSLEnabled bool `yaml:"sslEnabled"`
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Host string `yaml:"host" validate:"required"`
|
||||
Port string `yaml:"port" validate:"required"`
|
||||
User string `yaml:"user"`
|
||||
Password string `yaml:"password"`
|
||||
Catalog string `yaml:"catalog" validate:"required"`
|
||||
Schema string `yaml:"schema" validate:"required"`
|
||||
QueryTimeout string `yaml:"queryTimeout"`
|
||||
AccessToken string `yaml:"accessToken"`
|
||||
KerberosEnabled bool `yaml:"kerberosEnabled"`
|
||||
SSLEnabled bool `yaml:"sslEnabled"`
|
||||
SSLCertPath string `yaml:"sslCertPath"`
|
||||
SSLCert string `yaml:"sslCert"`
|
||||
DisableSslVerification bool `yaml:"disableSslVerification"`
|
||||
}
|
||||
|
||||
func (r Config) SourceConfigKind() string {
|
||||
@@ -66,7 +72,7 @@ func (r Config) SourceConfigKind() string {
|
||||
}
|
||||
|
||||
func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
|
||||
pool, err := initTrinoConnectionPool(ctx, tracer, r.Name, r.Host, r.Port, r.User, r.Password, r.Catalog, r.Schema, r.QueryTimeout, r.AccessToken, r.KerberosEnabled, r.SSLEnabled)
|
||||
pool, err := initTrinoConnectionPool(ctx, tracer, r.Name, r.Host, r.Port, r.User, r.Password, r.Catalog, r.Schema, r.QueryTimeout, r.AccessToken, r.KerberosEnabled, r.SSLEnabled, r.SSLCertPath, r.SSLCert, r.DisableSslVerification)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to create pool: %w", err)
|
||||
}
|
||||
@@ -152,17 +158,35 @@ func (s *Source) RunSQL(ctx context.Context, statement string, params []any) (an
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func initTrinoConnectionPool(ctx context.Context, tracer trace.Tracer, name, host, port, user, password, catalog, schema, queryTimeout, accessToken string, kerberosEnabled, sslEnabled bool) (*sql.DB, error) {
|
||||
func initTrinoConnectionPool(ctx context.Context, tracer trace.Tracer, name, host, port, user, password, catalog, schema, queryTimeout, accessToken string, kerberosEnabled, sslEnabled bool, sslCertPath, sslCert string, disableSslVerification bool) (*sql.DB, error) {
|
||||
//nolint:all // Reassigned ctx
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
defer span.End()
|
||||
|
||||
// Build Trino DSN
|
||||
dsn, err := buildTrinoDSN(host, port, user, password, catalog, schema, queryTimeout, accessToken, kerberosEnabled, sslEnabled)
|
||||
dsn, err := buildTrinoDSN(host, port, user, password, catalog, schema, queryTimeout, accessToken, kerberosEnabled, sslEnabled, sslCertPath, sslCert)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to build DSN: %w", err)
|
||||
}
|
||||
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
|
||||
if disableSslVerification {
|
||||
logger.WarnContext(ctx, "SSL verification is disabled for trino source %s. This is an insecure setting and should not be used in production.\n", name)
|
||||
tr := &http.Transport{
|
||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
|
||||
}
|
||||
client := &http.Client{Transport: tr}
|
||||
clientName := fmt.Sprintf("insecure_trino_client_%s", name)
|
||||
if err := trinogo.RegisterCustomClient(clientName, client); err != nil {
|
||||
return nil, fmt.Errorf("failed to register custom client: %w", err)
|
||||
}
|
||||
dsn = fmt.Sprintf("%s&custom_client=%s", dsn, clientName)
|
||||
}
|
||||
|
||||
db, err := sql.Open("trino", dsn)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open connection: %w", err)
|
||||
@@ -176,7 +200,7 @@ func initTrinoConnectionPool(ctx context.Context, tracer trace.Tracer, name, hos
|
||||
return db, nil
|
||||
}
|
||||
|
||||
func buildTrinoDSN(host, port, user, password, catalog, schema, queryTimeout, accessToken string, kerberosEnabled, sslEnabled bool) (string, error) {
|
||||
func buildTrinoDSN(host, port, user, password, catalog, schema, queryTimeout, accessToken string, kerberosEnabled, sslEnabled bool, sslCertPath, sslCert string) (string, error) {
|
||||
// Build query parameters
|
||||
query := url.Values{}
|
||||
query.Set("catalog", catalog)
|
||||
@@ -190,6 +214,12 @@ func buildTrinoDSN(host, port, user, password, catalog, schema, queryTimeout, ac
|
||||
if kerberosEnabled {
|
||||
query.Set("KerberosEnabled", "true")
|
||||
}
|
||||
if sslCertPath != "" {
|
||||
query.Set("sslCertPath", sslCertPath)
|
||||
}
|
||||
if sslCert != "" {
|
||||
query.Set("sslCert", sslCert)
|
||||
}
|
||||
|
||||
// Build URL
|
||||
scheme := "http"
|
||||
|
||||
@@ -36,6 +36,8 @@ func TestBuildTrinoDSN(t *testing.T) {
|
||||
accessToken string
|
||||
kerberosEnabled bool
|
||||
sslEnabled bool
|
||||
sslCertPath string
|
||||
sslCert string
|
||||
want string
|
||||
wantErr bool
|
||||
}{
|
||||
@@ -49,6 +51,19 @@ func TestBuildTrinoDSN(t *testing.T) {
|
||||
want: "http://testuser@localhost:8080?catalog=hive&schema=default",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with SSL cert path and cert",
|
||||
host: "localhost",
|
||||
port: "8443",
|
||||
user: "testuser",
|
||||
catalog: "hive",
|
||||
schema: "default",
|
||||
sslEnabled: true,
|
||||
sslCertPath: "/path/to/cert.pem",
|
||||
sslCert: "-----BEGIN CERTIFICATE-----\n...\n-----END CERTIFICATE-----\n",
|
||||
want: "https://testuser@localhost:8443?catalog=hive&schema=default&sslCert=-----BEGIN+CERTIFICATE-----%0A...%0A-----END+CERTIFICATE-----%0A&sslCertPath=%2Fpath%2Fto%2Fcert.pem",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with password",
|
||||
host: "localhost",
|
||||
@@ -117,7 +132,7 @@ func TestBuildTrinoDSN(t *testing.T) {
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, err := buildTrinoDSN(tt.host, tt.port, tt.user, tt.password, tt.catalog, tt.schema, tt.queryTimeout, tt.accessToken, tt.kerberosEnabled, tt.sslEnabled)
|
||||
got, err := buildTrinoDSN(tt.host, tt.port, tt.user, tt.password, tt.catalog, tt.schema, tt.queryTimeout, tt.accessToken, tt.kerberosEnabled, tt.sslEnabled, tt.sslCertPath, tt.sslCert)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("buildTrinoDSN() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
@@ -215,6 +230,41 @@ func TestParseFromYamlTrino(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "example with SSL cert path and cert",
|
||||
in: `
|
||||
sources:
|
||||
my-trino-ssl-cert:
|
||||
kind: trino
|
||||
host: localhost
|
||||
port: "8443"
|
||||
user: testuser
|
||||
catalog: hive
|
||||
schema: default
|
||||
sslEnabled: true
|
||||
sslCertPath: /path/to/cert.pem
|
||||
sslCert: |-
|
||||
-----BEGIN CERTIFICATE-----
|
||||
...
|
||||
-----END CERTIFICATE-----
|
||||
disableSslVerification: true
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-trino-ssl-cert": Config{
|
||||
Name: "my-trino-ssl-cert",
|
||||
Kind: SourceKind,
|
||||
Host: "localhost",
|
||||
Port: "8443",
|
||||
User: "testuser",
|
||||
Catalog: "hive",
|
||||
Schema: "default",
|
||||
SSLEnabled: true,
|
||||
SSLCertPath: "/path/to/cert.pem",
|
||||
SSLCert: "-----BEGIN CERTIFICATE-----\n...\n-----END CERTIFICATE-----",
|
||||
DisableSslVerification: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"fmt"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
@@ -166,6 +167,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.AllParams, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
// Manifest returns the tool's manifest.
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"fmt"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
@@ -172,6 +173,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.AllParams, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
// Manifest returns the tool's manifest.
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"fmt"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
@@ -177,6 +178,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.AllParams, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
// Manifest returns the tool's manifest.
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"fmt"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
@@ -148,6 +149,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.AllParams, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
// Manifest returns the tool's manifest.
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"fmt"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
@@ -152,6 +153,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.AllParams, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
// Manifest returns the tool's manifest.
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"fmt"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
@@ -152,6 +153,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.AllParams, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
// Manifest returns the tool's manifest.
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"fmt"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
@@ -142,6 +143,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.AllParams, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
// Manifest returns the tool's manifest.
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"fmt"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
@@ -147,6 +148,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.AllParams, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
// Manifest returns the tool's manifest.
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"fmt"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
@@ -147,6 +148,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.AllParams, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
// Manifest returns the tool's manifest.
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
"time"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
@@ -271,6 +272,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.AllParams, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
// Manifest returns the tool's manifest.
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
|
||||
@@ -20,6 +20,7 @@ import (
|
||||
"strings"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
@@ -76,26 +77,21 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
placeholderParts = append(placeholderParts, fmt.Sprintf("$%d", i+3)) // $1, $2 reserved
|
||||
}
|
||||
|
||||
var paramNamesSQL string
|
||||
var paramValuesSQL string
|
||||
|
||||
var stmt string
|
||||
if numParams > 0 {
|
||||
paramNamesSQL = fmt.Sprintf("ARRAY[%s]", strings.Join(quotedNameParts, ", "))
|
||||
paramValuesSQL = fmt.Sprintf("ARRAY[%s]", strings.Join(placeholderParts, ", "))
|
||||
paramNamesSQL := fmt.Sprintf("ARRAY[%s]", strings.Join(quotedNameParts, ", "))
|
||||
paramValuesSQL := fmt.Sprintf("ARRAY[%s]", strings.Join(placeholderParts, ", "))
|
||||
// execute_nl_query is the AlloyDB AI function that executes the natural language query
|
||||
// The first parameter is the natural language query, which is passed as $1
|
||||
// The second parameter is the NLConfig, which is passed as a $2
|
||||
// The following params are the list of PSV values passed to the NLConfig
|
||||
// Example SQL statement being executed:
|
||||
// SELECT alloydb_ai_nl.execute_nl_query(nl_question => 'How many tickets do I have?', nl_config_id => 'cymbal_air_nl_config', param_names => ARRAY ['user_email'], param_values => ARRAY ['hailongli@google.com']);
|
||||
stmtFormat := "SELECT alloydb_ai_nl.execute_nl_query(nl_question => $1, nl_config_id => $2, param_names => %s, param_values => %s);"
|
||||
stmt = fmt.Sprintf(stmtFormat, paramNamesSQL, paramValuesSQL)
|
||||
} else {
|
||||
paramNamesSQL = "ARRAY[]::TEXT[]"
|
||||
paramValuesSQL = "ARRAY[]::TEXT[]"
|
||||
stmt = "SELECT alloydb_ai_nl.execute_nl_query(nl_question => $1, nl_config_id => $2);"
|
||||
}
|
||||
|
||||
// execute_nl_query is the AlloyDB AI function that executes the natural language query
|
||||
// The first parameter is the natural language query, which is passed as $1
|
||||
// The second parameter is the NLConfig, which is passed as a $2
|
||||
// The following params are the list of PSV values passed to the NLConfig
|
||||
// Example SQL statement being executed:
|
||||
// SELECT alloydb_ai_nl.execute_nl_query(nl_question => 'How many tickets do I have?', nl_config_id => 'cymbal_air_nl_config', param_names => ARRAY ['user_email'], param_values => ARRAY ['hailongli@google.com']);
|
||||
stmtFormat := "SELECT alloydb_ai_nl.execute_nl_query(nl_question => $1, nl_config_id => $2, param_names => %s, param_values => %s);"
|
||||
stmt := fmt.Sprintf(stmtFormat, paramNamesSQL, paramValuesSQL)
|
||||
|
||||
newQuestionParam := parameters.NewStringParameter(
|
||||
"question", // name
|
||||
"The natural language question to ask.", // description
|
||||
@@ -156,6 +152,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.Parameters, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
@@ -22,6 +22,7 @@ import (
|
||||
bigqueryapi "cloud.google.com/go/bigquery"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/uuid"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
bigqueryds "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
@@ -311,6 +312,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.Parameters, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
@@ -25,6 +25,7 @@ import (
|
||||
|
||||
bigqueryapi "cloud.google.com/go/bigquery"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
@@ -55,7 +56,7 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.T
|
||||
|
||||
type compatibleSource interface {
|
||||
BigQueryClient() *bigqueryapi.Client
|
||||
BigQueryTokenSourceWithScope(ctx context.Context, scope string) (oauth2.TokenSource, error)
|
||||
BigQueryTokenSourceWithScope(ctx context.Context, scopes []string) (oauth2.TokenSource, error)
|
||||
BigQueryProject() string
|
||||
BigQueryLocation() string
|
||||
GetMaxQueryResultRows() int
|
||||
@@ -190,10 +191,10 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
} else {
|
||||
// Get cloud-platform token source for Gemini Data Analytics API during initialization
|
||||
tokenSource, err := source.BigQueryTokenSourceWithScope(ctx, "https://www.googleapis.com/auth/cloud-platform")
|
||||
// Get a token source for the Gemini Data Analytics API.
|
||||
tokenSource, err := source.BigQueryTokenSourceWithScope(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get cloud-platform token source: %w", err)
|
||||
return nil, fmt.Errorf("failed to get token source: %w", err)
|
||||
}
|
||||
|
||||
// Use cloud-platform token source for Gemini Data Analytics API
|
||||
@@ -267,6 +268,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.Parameters, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
@@ -22,6 +22,7 @@ import (
|
||||
|
||||
bigqueryapi "cloud.google.com/go/bigquery"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
bigqueryds "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
@@ -284,6 +285,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.Parameters, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
|
||||
bigqueryapi "cloud.google.com/go/bigquery"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
bigqueryds "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
@@ -274,6 +275,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.Parameters, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ import (
|
||||
|
||||
bigqueryapi "cloud.google.com/go/bigquery"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
bqutil "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerycommon"
|
||||
@@ -160,6 +161,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.Parameters, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ import (
|
||||
|
||||
bigqueryapi "cloud.google.com/go/bigquery"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
bqutil "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerycommon"
|
||||
@@ -170,6 +171,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.Parameters, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ import (
|
||||
|
||||
bigqueryapi "cloud.google.com/go/bigquery"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
@@ -166,6 +167,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.Parameters, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ import (
|
||||
|
||||
bigqueryapi "cloud.google.com/go/bigquery"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
bqutil "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerycommon"
|
||||
@@ -177,6 +178,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.Parameters, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
@@ -22,6 +22,7 @@ import (
|
||||
dataplexapi "cloud.google.com/go/dataplex/apiv1"
|
||||
dataplexpb "cloud.google.com/go/dataplex/apiv1/dataplexpb"
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
bigqueryds "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
@@ -262,6 +263,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.Parameters, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
// Returns the tool manifest
|
||||
return t.manifest
|
||||
|
||||
@@ -22,6 +22,7 @@ import (
|
||||
|
||||
bigqueryapi "cloud.google.com/go/bigquery"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
bigqueryds "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
@@ -216,6 +217,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.AllParams, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ import (
|
||||
|
||||
"cloud.google.com/go/bigtable"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
@@ -119,6 +120,10 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.AllParams, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ import (
|
||||
|
||||
gocql "github.com/apache/cassandra-gocql-driver/v2"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
@@ -140,6 +141,12 @@ func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any)
|
||||
return parameters.ParseParams(t.AllParams, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
func (t Tool) GetAuthTokenHeaderName(resourceMgr tools.SourceProvider) (string, error) {
|
||||
return "Authorization", nil
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user