Compare commits
57 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ff6ddaf73b | ||
|
|
9c045253bd | ||
|
|
3b1cce72e7 | ||
|
|
4f725dc85e | ||
|
|
0faf75db95 | ||
|
|
5e08aeb7ed | ||
|
|
1cac9b5b37 | ||
|
|
d91bdfcbdc | ||
|
|
c60a9601b4 | ||
|
|
bff528093d | ||
|
|
34f78bd89d | ||
|
|
000d6ada38 | ||
|
|
a09f628b52 | ||
|
|
80a8ebfa0b | ||
|
|
0588e178d6 | ||
|
|
f80f18aaf6 | ||
|
|
f79cdd6144 | ||
|
|
c3a58e1d16 | ||
|
|
c7b443d94a | ||
|
|
af3d791dea | ||
|
|
a00880aeda | ||
|
|
b72a4b9855 | ||
|
|
0527532bd7 | ||
|
|
e6e1545a97 | ||
|
|
48f49c5e9a | ||
|
|
6ab871ea93 | ||
|
|
8d0fa6783a | ||
|
|
8da5a8f68d | ||
|
|
ecf9d65e8a | ||
|
|
8749b03003 | ||
|
|
bfabcf826e | ||
|
|
78ec2bb52e | ||
|
|
e843f73079 | ||
|
|
6eaf36ac85 | ||
|
|
051e686476 | ||
|
|
9af55b651d | ||
|
|
5fa1660fc8 | ||
|
|
d9ee17d2c7 | ||
|
|
da5e130819 | ||
|
|
ce831a47c3 | ||
|
|
a0c4483a47 | ||
|
|
7aae8d50b3 | ||
|
|
b41d512759 | ||
|
|
898403f753 | ||
|
|
38db9bd62b | ||
|
|
376c3bec92 | ||
|
|
8190c92818 | ||
|
|
064adf2c79 | ||
|
|
8fc63c3c73 | ||
|
|
8a9344ac06 | ||
|
|
780e67fd22 | ||
|
|
54dddb772b | ||
|
|
20e2c1703f | ||
|
|
9f545ce9c4 | ||
|
|
3ef18e1713 | ||
|
|
0615e3a4e5 | ||
|
|
4ca4b96223 |
@@ -62,7 +62,6 @@ steps:
|
||||
postgressql \
|
||||
postgresexecutesql
|
||||
|
||||
|
||||
- id: "alloydb-pg"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
@@ -121,8 +120,7 @@ steps:
|
||||
- "BIGTABLE_PROJECT=$PROJECT_ID"
|
||||
- "BIGTABLE_INSTANCE=$_BIGTABLE_INSTANCE"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv:
|
||||
["CLIENT_ID"]
|
||||
secretEnv: ["CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
@@ -153,7 +151,7 @@ steps:
|
||||
"BigQuery" \
|
||||
bigquery \
|
||||
bigquery
|
||||
|
||||
|
||||
- id: "dataplex"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
@@ -274,8 +272,7 @@ steps:
|
||||
- "CLOUD_SQL_MYSQL_DATABASE=$_DATABASE_NAME"
|
||||
- "CLOUD_SQL_MYSQL_REGION=$_REGION"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv:
|
||||
["CLOUD_SQL_MYSQL_USER", "CLOUD_SQL_MYSQL_PASS", "CLIENT_ID"]
|
||||
secretEnv: ["CLOUD_SQL_MYSQL_USER", "CLOUD_SQL_MYSQL_PASS", "CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
@@ -387,7 +384,7 @@ steps:
|
||||
sqlite
|
||||
|
||||
- id: "couchbase"
|
||||
name : golang:1
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
@@ -395,7 +392,8 @@ steps:
|
||||
- "COUCHBASE_SCOPE=$_COUCHBASE_SCOPE"
|
||||
- "COUCHBASE_BUCKET=$_COUCHBASE_BUCKET"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["COUCHBASE_CONNECTION", "COUCHBASE_USER", "COUCHBASE_PASS", "CLIENT_ID"]
|
||||
secretEnv:
|
||||
["COUCHBASE_CONNECTION", "COUCHBASE_USER", "COUCHBASE_PASS", "CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
@@ -408,7 +406,7 @@ steps:
|
||||
couchbase
|
||||
|
||||
- id: "redis"
|
||||
name : golang:1
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
@@ -425,9 +423,9 @@ steps:
|
||||
"Redis" \
|
||||
redis \
|
||||
redis
|
||||
|
||||
|
||||
- id: "valkey"
|
||||
name : golang:1
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
@@ -475,7 +473,13 @@ steps:
|
||||
- "FIRESTORE_PROJECT=$PROJECT_ID"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
- "LOOKER_VERIFY_SSL=$_LOOKER_VERIFY_SSL"
|
||||
secretEnv: ["CLIENT_ID", "LOOKER_BASE_URL", "LOOKER_CLIENT_ID", "LOOKER_CLIENT_SECRET"]
|
||||
secretEnv:
|
||||
[
|
||||
"CLIENT_ID",
|
||||
"LOOKER_BASE_URL",
|
||||
"LOOKER_CLIENT_ID",
|
||||
"LOOKER_CLIENT_SECRET",
|
||||
]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
@@ -486,26 +490,6 @@ steps:
|
||||
"Looker" \
|
||||
looker \
|
||||
looker
|
||||
|
||||
- id: "duckdb"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
secretEnv: ["CLIENT_ID"]
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"DuckDB" \
|
||||
duckdb \
|
||||
duckdb
|
||||
|
||||
|
||||
- id: "alloydbwaitforoperation"
|
||||
name: golang:1
|
||||
@@ -525,7 +509,48 @@ steps:
|
||||
"Alloydb Wait for Operation" \
|
||||
utility \
|
||||
utility/alloydbwaitforoperation
|
||||
|
||||
|
||||
- id: "tidb"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "TIDB_DATABASE=$_DATABASE_NAME"
|
||||
- "TIDB_HOST=$_TIDB_HOST"
|
||||
- "TIDB_PORT=$_TIDB_PORT"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["CLIENT_ID", "TIDB_USER", "TIDB_PASS"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"TiDB" \
|
||||
tidb \
|
||||
tidbsql tidbexecutesql
|
||||
|
||||
- id: "kuzu"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
secretEnv: ["CLIENT_ID"]
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Kuzu" \
|
||||
kuzu \
|
||||
kuzu
|
||||
|
||||
availableSecrets:
|
||||
secretManager:
|
||||
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_user/versions/latest
|
||||
@@ -584,7 +609,10 @@ availableSecrets:
|
||||
env: LOOKER_CLIENT_ID
|
||||
- versionName: projects/107716898620/secrets/looker_client_secret/versions/latest
|
||||
env: LOOKER_CLIENT_SECRET
|
||||
|
||||
- versionName: projects/107716898620/secrets/tidb_user/versions/latest
|
||||
env: TIDB_USER
|
||||
- versionName: projects/107716898620/secrets/tidb_pass/versions/latest
|
||||
env: TIDB_PASS
|
||||
|
||||
options:
|
||||
logging: CLOUD_LOGGING_ONLY
|
||||
@@ -616,4 +644,6 @@ substitutions:
|
||||
_DGRAPHURL: "https://play.dgraph.io"
|
||||
_COUCHBASE_BUCKET: "couchbase-bucket"
|
||||
_COUCHBASE_SCOPE: "couchbase-scope"
|
||||
_LOOKER_VERIFY_SSL: "true"
|
||||
_LOOKER_VERIFY_SSL: "true"
|
||||
_TIDB_HOST: 127.0.0.1
|
||||
_TIDB_PORT: "4000"
|
||||
|
||||
3
.github/release-please.yml
vendored
@@ -24,6 +24,7 @@ extraFiles: [
|
||||
"docs/en/getting-started/local_quickstart_js.md",
|
||||
"docs/en/getting-started/local_quickstart_go.md",
|
||||
"docs/en/getting-started/mcp_quickstart/_index.md",
|
||||
"docs/en/samples/alloydb/_index.md",
|
||||
"docs/en/samples/bigquery/local_quickstart.md",
|
||||
"docs/en/samples/bigquery/mcp_quickstart/_index.md",
|
||||
"docs/en/samples/bigquery/colab_quickstart_bigquery.ipynb",
|
||||
@@ -37,6 +38,8 @@ extraFiles: [
|
||||
"docs/en/how-to/connect-ide/cloud_sql_mysql_mcp.md",
|
||||
"docs/en/how-to/connect-ide/firestore_mcp.md",
|
||||
"docs/en/how-to/connect-ide/looker_mcp.md",
|
||||
"docs/en/how-to/connect-ide/mysql_mcp.md",
|
||||
"docs/en/how-to/connect-ide/mssql_mcp.md",
|
||||
"docs/en/how-to/connect-ide/postgres_mcp.md",
|
||||
"docs/en/how-to/connect-ide/spanner_mcp.md",
|
||||
]
|
||||
|
||||
2
.github/workflows/schedule_reporter.yml
vendored
@@ -26,4 +26,4 @@ jobs:
|
||||
contents: 'read'
|
||||
uses: ./.github/workflows/cloud_build_failure_reporter.yml
|
||||
with:
|
||||
trigger_names: "toolbox-test-nightly,toolbox-test-on-merge"
|
||||
trigger_names: "toolbox-test-nightly,toolbox-test-on-merge,toolbox-continuous-release"
|
||||
|
||||
20
CHANGELOG.md
@@ -1,5 +1,25 @@
|
||||
# Changelog
|
||||
|
||||
## [0.11.0](https://github.com/googleapis/genai-toolbox/compare/v0.11.0...v0.11.0) (2025-08-05)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **tools/bigquery-sql:** Ensure invoke always returns a non-null value ([#1020](https://github.com/googleapis/genai-toolbox/issues/1020)) ([9af55b6](https://github.com/googleapis/genai-toolbox/commit/9af55b651d836f268eda342ea27380e7c9967c94))
|
||||
* **tools/bigquery-execute-sql:** Update the return messages ([#1034](https://github.com/googleapis/genai-toolbox/issues/1034)) ([051e686](https://github.com/googleapis/genai-toolbox/commit/051e686476a781ca49f7617764d507916a1188b8))
|
||||
|
||||
### Features
|
||||
|
||||
* Add TiDB source and tool ([#829](https://github.com/googleapis/genai-toolbox/issues/829)) ([6eaf36a](https://github.com/googleapis/genai-toolbox/commit/6eaf36ac8505d523fa4f5a4ac3c97209fd688cef))
|
||||
* Interactive web UI for Toolbox ([#1065](https://github.com/googleapis/genai-toolbox/issues/1065)) ([8749b03](https://github.com/googleapis/genai-toolbox/commit/8749b030035e65361047c4ead13dfacb8e9a9b59))
|
||||
* **prebuiltconfigs/cloud-sql-postgres:** Introduce additional parameter to limit context in list tables ([#1062](https://github.com/googleapis/genai-toolbox/issues/1062)) ([c3a58e1](https://github.com/googleapis/genai-toolbox/commit/c3a58e1d1678dc14d8de5006511df597fd75faa3))
|
||||
* **tools/looker-query-url:** Add support for `looker-query-url` tool ([#1015](https://github.com/googleapis/genai-toolbox/issues/1015)) ([327ddf0](https://github.com/googleapis/genai-toolbox/commit/327ddf0439058aa5ecd2c7ae8251fcde6aeff18c))
|
||||
* **tools/dataplex-lookup-entry:** Add support for `dataplex-lookup-entry` tool ([#1009](https://github.com/googleapis/genai-toolbox/issues/1009)) ([5fa1660](https://github.com/googleapis/genai-toolbox/commit/5fa1660fc8631989b4d13abea205b6426bb506a5))
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **tools/bigquery,mssql,mysql,postgres,spanner,tidb:** Add query logging to execute-sql tools ([#1069](https://github.com/googleapis/genai-toolbox/issues/1069)) ([0527532]([https://github.com/googleapis/genai-toolbox/commit/0527532bd7085ef9eb8f9c30f430a2f2f35cef32))
|
||||
|
||||
## [0.10.0](https://github.com/googleapis/genai-toolbox/compare/v0.9.0...v0.10.0) (2025-07-25)
|
||||
|
||||
|
||||
|
||||
@@ -114,7 +114,7 @@ To install Toolbox as a binary:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.10.0
|
||||
export VERSION=0.11.0
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -127,7 +127,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.10.0
|
||||
export VERSION=0.11.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -151,7 +151,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.10.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.11.0
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
20
cmd/BUILD
@@ -1,20 +0,0 @@
|
||||
load("//tools/build_defs/go:go_library.bzl", "go_library")
|
||||
load("//tools/build_defs/go:go_test.bzl", "go_test")
|
||||
|
||||
go_library(
|
||||
name = "cmd",
|
||||
srcs = [
|
||||
"options.go",
|
||||
"root.go",
|
||||
],
|
||||
embedsrcs = ["version.txt"],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "cmd_test",
|
||||
srcs = [
|
||||
"options_test.go",
|
||||
"root_test.go",
|
||||
],
|
||||
library = ":cmd",
|
||||
)
|
||||
12
cmd/root.go
@@ -51,9 +51,9 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigtable"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/couchbase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexlookupentry"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchentries"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dgraph"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/duckdbsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoredeletedocuments"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetdocuments"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetrules"
|
||||
@@ -61,6 +61,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequerycollection"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/http"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/kuzu/kuzucypher"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdimensions"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetexplores"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetfilters"
|
||||
@@ -94,6 +95,8 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannersql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlitesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/alloydbwaitforoperation"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/wait"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/valkey"
|
||||
@@ -109,9 +112,9 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/couchbase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/dataplex"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/dgraph"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/duckdb"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/firestore"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/http"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/kuzu"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/looker"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mongodb"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mssql"
|
||||
@@ -121,6 +124,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/redis"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/spanner"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/tidb"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/valkey"
|
||||
)
|
||||
|
||||
@@ -222,6 +226,7 @@ func NewCommand(opts ...Option) *Command {
|
||||
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", "Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. Allowed: 'alloydb-postgres-admin', alloydb-postgres', 'bigquery', 'cloud-sql-mysql', 'cloud-sql-postgres', 'cloud-sql-mssql', 'dataplex', 'firestore', 'looker', 'mssql', 'mysql', 'postgres', 'spanner', 'spanner-postgres'.")
|
||||
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
|
||||
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
|
||||
flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
|
||||
|
||||
// wrap RunE command so that we have access to original Command object
|
||||
cmd.RunE = func(*cobra.Command, []string) error { return run(cmd) }
|
||||
@@ -798,6 +803,9 @@ func run(cmd *Command) error {
|
||||
return errMsg
|
||||
}
|
||||
cmd.logger.InfoContext(ctx, "Server ready to serve!")
|
||||
if cmd.cfg.UI {
|
||||
cmd.logger.InfoContext(ctx, "Toolbox UI is up and running at: http://localhost:5000/ui")
|
||||
}
|
||||
|
||||
go func() {
|
||||
defer close(srvErr)
|
||||
|
||||
@@ -1250,7 +1250,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"dataplex-tools": tools.ToolsetConfig{
|
||||
Name: "dataplex-tools",
|
||||
ToolNames: []string{"dataplex_search_entries"},
|
||||
ToolNames: []string{"dataplex_search_entries", "dataplex_lookup_entry"},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -1 +1 @@
|
||||
0.10.0
|
||||
0.11.0
|
||||
|
||||
@@ -234,7 +234,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.10.0\" # x-release-please-version\n",
|
||||
"version = \"0.11.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -86,7 +86,7 @@ To install Toolbox as a binary:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.10.0
|
||||
export VERSION=0.11.0
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -97,7 +97,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.10.0
|
||||
export VERSION=0.11.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -115,7 +115,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.10.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.11.0
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
@@ -136,6 +136,15 @@ Toolbox enables dynamic reloading by default. To disable, use the
|
||||
`--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
|
||||
#### Launching Toolbox UI
|
||||
|
||||
To launch Toolbox's interactive UI, use the `--ui` flag. This allows you to test tools and toolsets
|
||||
with features such as authorized parameters. To learn more, visit [Toolbox UI](../../how-to/use-toolbox-ui/index.md).
|
||||
|
||||
```sh
|
||||
./toolbox --ui
|
||||
```
|
||||
|
||||
#### Homebrew Users
|
||||
|
||||
If you installed Toolbox using Homebrew, the `toolbox` binary is available in your system path. You can start the server with the same command:
|
||||
|
||||
|
Before Width: | Height: | Size: 154 KiB After Width: | Height: | Size: 76 KiB |
@@ -137,7 +137,7 @@ postgres` and a password next time.
|
||||
|
||||
```sql
|
||||
INSERT INTO hotels(id, name, location, price_tier, checkin_date, checkout_date, booked)
|
||||
VALUES
|
||||
VALUES
|
||||
(1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-22', '2024-04-20', B'0'),
|
||||
(2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', B'0'),
|
||||
(3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', B'0'),
|
||||
@@ -171,7 +171,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -167,7 +167,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -133,7 +133,7 @@ postgres` and a password next time.
|
||||
|
||||
```sql
|
||||
INSERT INTO hotels(id, name, location, price_tier, checkin_date, checkout_date, booked)
|
||||
VALUES
|
||||
VALUES
|
||||
(1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-22', '2024-04-20', B'0'),
|
||||
(2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', B'0'),
|
||||
(3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', B'0'),
|
||||
@@ -167,7 +167,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -466,14 +466,14 @@ async function run() {
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
|
||||
conversationHistory.push(...toolResponses);
|
||||
|
||||
|
||||
// Call the AI again with the tool results.
|
||||
response = await ai.generate({ messages: conversationHistory, tools });
|
||||
conversationHistory.push(response.message);
|
||||
}
|
||||
|
||||
|
||||
console.log(response.text);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -6,337 +6,9 @@ description: >
|
||||
Create your AlloyDB database with MCP Toolbox.
|
||||
---
|
||||
|
||||
This guide covers how to use [MCP Toolbox for Databases][toolbox] to create
|
||||
AlloyDB clusters and instances from IDE enabling their E2E journey.
|
||||
|
||||
- [Cursor][cursor]
|
||||
- [Windsurf][windsurf] (Codium)
|
||||
- [Visual Studio Code][vscode] (Copilot)
|
||||
- [Cline][cline] (VS Code extension)
|
||||
- [Claude desktop][claudedesktop]
|
||||
- [Claude code][claudecode]
|
||||
- [Gemini CLI][geminicli]
|
||||
- [Gemini Code Assist][geminicodeassist]
|
||||
|
||||
[toolbox]: https://github.com/googleapis/genai-toolbox
|
||||
[cursor]: #configure-your-mcp-client
|
||||
[windsurf]: #configure-your-mcp-client
|
||||
[vscode]: #configure-your-mcp-client
|
||||
[cline]: #configure-your-mcp-client
|
||||
[claudedesktop]: #configure-your-mcp-client
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
[geminicli]: #configure-your-mcp-client
|
||||
[geminicodeassist]: #configure-your-mcp-client
|
||||
|
||||
## Before you begin
|
||||
|
||||
1. In the Google Cloud console, on the [project selector
|
||||
page](https://console.cloud.google.com/projectselector2/home/dashboard),
|
||||
select or create a Google Cloud project.
|
||||
|
||||
1. [Make sure that billing is enabled for your Google Cloud
|
||||
project](https://cloud.google.com/billing/docs/how-to/verify-billing-enabled#confirm_billing_is_enabled_on_a_project).
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct
|
||||
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
|
||||
to your OS and CPU architecture. You are required to use Toolbox version
|
||||
V0.10.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Verify the installation:
|
||||
|
||||
```bash
|
||||
./toolbox --version
|
||||
```
|
||||
|
||||
## Configure your MCP Client
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Claude code" lang="en" %}}
|
||||
|
||||
1. Install [Claude
|
||||
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
1. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
1. Generate Access token to be used as API_KEY using `gcloud auth
|
||||
print-access-token`.
|
||||
|
||||
> **Note:** The lifetime of token is 1 hour.
|
||||
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb-admin": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "alloydb-postgres-admin", "--stdio"],
|
||||
"env": {
|
||||
"API_KEY": "your-api-key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude code to apply the new configuration.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Claude desktop" lang="en" %}}
|
||||
|
||||
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
|
||||
1. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
1. Generate Access token to be used as API_KEY using `gcloud auth
|
||||
print-access-token`.
|
||||
|
||||
> **Note:** The lifetime of token is 1 hour.
|
||||
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb-admin": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "alloydb-postgres-admin", "--stdio"],
|
||||
"env": {
|
||||
"API_KEY": "your-api-key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude desktop.
|
||||
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
|
||||
new MCP server available.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Cline" lang="en" %}}
|
||||
|
||||
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap
|
||||
the **MCP Servers** icon.
|
||||
1. Tap Configure MCP Servers to open the configuration file.
|
||||
1. Generate Access token to be used as API_KEY using `gcloud auth
|
||||
print-access-token`.
|
||||
|
||||
> **Note:** The lifetime of token is 1 hour.
|
||||
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb-admin": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "alloydb-postgres-admin", "--stdio"],
|
||||
"env": {
|
||||
"API_KEY": "your-api-key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. You should see a green active status after the server is successfully
|
||||
connected.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Cursor" lang="en" %}}
|
||||
|
||||
1. Create a `.cursor` directory in your project root if it doesn't exist.
|
||||
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
1. Generate Access token to be used as API_KEY using `gcloud auth
|
||||
print-access-token`.
|
||||
|
||||
> **Note:** The lifetime of token is 1 hour.
|
||||
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb-admin": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "alloydb-postgres-admin", "--stdio"],
|
||||
"env": {
|
||||
"API_KEY": "your-api-key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
|
||||
Settings > MCP**. You should see a green active status after the server is
|
||||
successfully connected.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
|
||||
|
||||
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
|
||||
create a `.vscode` directory in your project root if it doesn't exist.
|
||||
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
|
||||
1. Generate Access token to be used as API_KEY using `gcloud auth
|
||||
print-access-token`.
|
||||
|
||||
> **Note:** The lifetime of token is 1 hour.
|
||||
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb-admin": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "alloydb-postgres-admin", "--stdio"],
|
||||
"env": {
|
||||
"API_KEY": "your-api-key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
|
||||
Cascade assistant.
|
||||
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
|
||||
1. Generate Access token to be used as API_KEY using `gcloud auth
|
||||
print-access-token`.
|
||||
|
||||
> **Note:** The lifetime of token is 1 hour.
|
||||
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb-admin": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "alloydb-postgres-admin", "--stdio"],
|
||||
"env": {
|
||||
"API_KEY": "your-api-key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini CLI" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini
|
||||
CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create
|
||||
a `settings.json` file.
|
||||
1. Generate Access token to be used as API_KEY using `gcloud auth print-access-token`.
|
||||
|
||||
> **Note:** The lifetime of token is 1 hour.
|
||||
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb-admin": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "alloydb-postgres-admin", "--stdio"],
|
||||
"env": {
|
||||
"API_KEY": "your-api-key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini Code
|
||||
Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist)
|
||||
extension in Visual Studio Code.
|
||||
1. Enable Agent Mode in Gemini Code Assist chat.
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create
|
||||
a `settings.json` file.
|
||||
1. Generate Access token to be used as API_KEY using `gcloud auth print-access-token`.
|
||||
|
||||
> **Note:** The lifetime of token is 1 hour.
|
||||
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb-admin": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "alloydb-postgres-admin", "--stdio"],
|
||||
"env": {
|
||||
"API_KEY": "your-api-key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Use Tools
|
||||
|
||||
Your AI tool is now connected to AlloyDB using MCP. Try asking your AI assistant
|
||||
to create a database, cluster or instance.
|
||||
|
||||
The following tools are available to the LLM:
|
||||
|
||||
1. **alloydb-create-cluster**: creates alloydb cluster
|
||||
1. **alloydb-create-instance**: creates alloydb instance (PRIMARY, READ_POOL or SECONDARY)
|
||||
1. **alloydb-get-operation**: polls on operations API until the operation is done.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
will adapt to the tools available, so this shouldn't affect most users.
|
||||
{{< /notice >}}
|
||||
|
||||
## Connect to your Data
|
||||
|
||||
After setting up an AlloyDB cluster and instance, you can [connect your IDE to
|
||||
the
|
||||
database](https://cloud.google.com/alloydb/docs/pre-built-tools-with-mcp-toolbox).
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://cloud.google.com/alloydb/docs/create-database-with-mcp-toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/alloydb/docs/create-database-with-mcp-toolbox"/>
|
||||
</head>
|
||||
</html>
|
||||
@@ -7,7 +7,7 @@ description: >
|
||||
---
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://cloud.google.com/alloydb/docs/pre-built-tools-with-mcp-toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/alloydb/docs/pre-built-tools-with-mcp-toolbox"/>
|
||||
<link rel="canonical" href="https://cloud.google.com/alloydb/docs/connect-ide-using-mcp-toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/alloydb/docs/connect-ide-using-mcp-toolbox"/>
|
||||
</head>
|
||||
</html>
|
||||
|
||||
@@ -62,19 +62,19 @@ to expose your developer assistant tools to a Firestore instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/windows/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/windows/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -46,19 +46,19 @@ to expose your developer assistant tools to a Looker instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
289
docs/en/how-to/connect-ide/mssql_mcp.md
Normal file
@@ -0,0 +1,289 @@
|
||||
---
|
||||
title: SQL Server using MCP
|
||||
type: docs
|
||||
weight: 2
|
||||
description: "Connect your IDE to SQL Server using Toolbox."
|
||||
---
|
||||
|
||||
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like SQL Server. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a SQL Server instance:
|
||||
|
||||
* [Cursor][cursor]
|
||||
* [Windsurf][windsurf] (Codium)
|
||||
* [Visual Studio Code][vscode] (Copilot)
|
||||
* [Cline][cline] (VS Code extension)
|
||||
* [Claude desktop][claudedesktop]
|
||||
* [Claude code][claudecode]
|
||||
* [Gemini CLI][geminicli]
|
||||
* [Gemini Code Assist][geminicodeassist]
|
||||
|
||||
[toolbox]: https://github.com/googleapis/genai-toolbox
|
||||
[cursor]: #configure-your-mcp-client
|
||||
[windsurf]: #configure-your-mcp-client
|
||||
[vscode]: #configure-your-mcp-client
|
||||
[cline]: #configure-your-mcp-client
|
||||
[claudedesktop]: #configure-your-mcp-client
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
[geminicli]: #configure-your-mcp-client
|
||||
[geminicodeassist]: #configure-your-mcp-client
|
||||
|
||||
## Set up the database
|
||||
|
||||
1. [Create or select a SQL Server instance.](https://www.microsoft.com/en-us/sql-server/sql-server-downloads)
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version V0.10.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Verify the installation:
|
||||
|
||||
```bash
|
||||
./toolbox --version
|
||||
```
|
||||
|
||||
## Configure your MCP Client
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Claude code" lang="en" %}}
|
||||
|
||||
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
1. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlserver": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude code to apply the new configuration.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Claude desktop" lang="en" %}}
|
||||
|
||||
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
|
||||
1. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlserver": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude desktop.
|
||||
1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cline" lang="en" %}}
|
||||
|
||||
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
|
||||
1. Tap Configure MCP Servers to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlserver": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cursor" lang="en" %}}
|
||||
|
||||
1. Create a `.cursor` directory in your project root if it doesn't exist.
|
||||
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlserver": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
|
||||
|
||||
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
|
||||
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcp" : {
|
||||
"servers": {
|
||||
"cloud-sql-sqlserver": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","cloud-sql-mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
|
||||
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlserver": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini CLI" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlserver": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
|
||||
1. Enable Agent Mode in Gemini Code Assist chat.
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sqlserver": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Use Tools
|
||||
|
||||
Your AI tool is now connected to SQL Server using MCP. Try asking your AI assistant to list tables, create a table, or define and execute other SQL statements.
|
||||
|
||||
The following tools are available to the LLM:
|
||||
|
||||
1. **list_tables**: lists tables and descriptions
|
||||
1. **execute_sql**: execute any SQL statement
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
|
||||
{{< /notice >}}
|
||||
287
docs/en/how-to/connect-ide/mysql_mcp.md
Normal file
@@ -0,0 +1,287 @@
|
||||
---
|
||||
title: MySQL using MCP
|
||||
type: docs
|
||||
weight: 2
|
||||
description: "Connect your IDE to MySQL using Toolbox."
|
||||
---
|
||||
|
||||
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like MySQL. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a MySQL instance:
|
||||
|
||||
* [Cursor][cursor]
|
||||
* [Windsurf][windsurf] (Codium)
|
||||
* [Visual Studio Code][vscode] (Copilot)
|
||||
* [Cline][cline] (VS Code extension)
|
||||
* [Claude desktop][claudedesktop]
|
||||
* [Claude code][claudecode]
|
||||
* [Gemini CLI][geminicli]
|
||||
* [Gemini Code Assist][geminicodeassist]
|
||||
|
||||
[toolbox]: https://github.com/googleapis/genai-toolbox
|
||||
[cursor]: #configure-your-mcp-client
|
||||
[windsurf]: #configure-your-mcp-client
|
||||
[vscode]: #configure-your-mcp-client
|
||||
[cline]: #configure-your-mcp-client
|
||||
[claudedesktop]: #configure-your-mcp-client
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
[geminicli]: #configure-your-mcp-client
|
||||
[geminicodeassist]: #configure-your-mcp-client
|
||||
|
||||
## Set up the database
|
||||
|
||||
1. [Create or select a MySQL instance.](https://dev.mysql.com/downloads/installer/)
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version V0.10.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Verify the installation:
|
||||
|
||||
```bash
|
||||
./toolbox --version
|
||||
```
|
||||
|
||||
## Configure your MCP Client
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Claude code" lang="en" %}}
|
||||
|
||||
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
1. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"mysql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "mysql", "--stdio"],
|
||||
"env": {
|
||||
"MYSQL_HOST": "",
|
||||
"MYSQL_PORT": "",
|
||||
"MYSQL_DATABASE": "",
|
||||
"MYSQL_USER": "",
|
||||
"MYSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude code to apply the new configuration.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Claude desktop" lang="en" %}}
|
||||
|
||||
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
|
||||
1. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"mysql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "mysql", "--stdio"],
|
||||
"env": {
|
||||
"MYSQL_HOST": "",
|
||||
"MYSQL_PORT": "",
|
||||
"MYSQL_DATABASE": "",
|
||||
"MYSQL_USER": "",
|
||||
"MYSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude desktop.
|
||||
1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cline" lang="en" %}}
|
||||
|
||||
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
|
||||
1. Tap Configure MCP Servers to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"mysql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "mysql", "--stdio"],
|
||||
"env": {
|
||||
"MYSQL_HOST": "",
|
||||
"MYSQL_PORT": "",
|
||||
"MYSQL_DATABASE": "",
|
||||
"MYSQL_USER": "",
|
||||
"MYSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cursor" lang="en" %}}
|
||||
|
||||
1. Create a `.cursor` directory in your project root if it doesn't exist.
|
||||
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"mysql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "mysql", "--stdio"],
|
||||
"env": {
|
||||
"MYSQL_HOST": "",
|
||||
"MYSQL_PORT": "",
|
||||
"MYSQL_DATABASE": "",
|
||||
"MYSQL_USER": "",
|
||||
"MYSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
|
||||
|
||||
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
|
||||
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"mysql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mysql","--stdio"],
|
||||
"env": {
|
||||
"MYSQL_HOST": "",
|
||||
"MYSQL_PORT": "",
|
||||
"MYSQL_DATABASE": "",
|
||||
"MYSQL_USER": "",
|
||||
"MYSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
|
||||
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"mysql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mysql","--stdio"],
|
||||
"env": {
|
||||
"MYSQL_HOST": "",
|
||||
"MYSQL_PORT": "",
|
||||
"MYSQL_DATABASE": "",
|
||||
"MYSQL_USER": "",
|
||||
"MYSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini CLI" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"mysql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mysql","--stdio"],
|
||||
"env": {
|
||||
"MYSQL_HOST": "",
|
||||
"MYSQL_PORT": "",
|
||||
"MYSQL_DATABASE": "",
|
||||
"MYSQL_USER": "",
|
||||
"MYSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
|
||||
1. Enable Agent Mode in Gemini Code Assist chat.
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"mysql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mysql","--stdio"],
|
||||
"env": {
|
||||
"MYSQL_HOST": "",
|
||||
"MYSQL_PORT": "",
|
||||
"MYSQL_DATABASE": "",
|
||||
"MYSQL_USER": "",
|
||||
"MYSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Use Tools
|
||||
|
||||
Your AI tool is now connected to MySQL using MCP. Try asking your AI assistant to list tables, create a table, or define and execute other SQL statements.
|
||||
|
||||
The following tools are available to the LLM:
|
||||
|
||||
1. **list_tables**: lists tables and descriptions
|
||||
1. **execute_sql**: execute any SQL statement
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
|
||||
{{< /notice >}}
|
||||
@@ -52,19 +52,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
BIN
docs/en/how-to/use-toolbox-ui/edit-headers.gif
Normal file
|
After Width: | Height: | Size: 36 MiB |
BIN
docs/en/how-to/use-toolbox-ui/edit-headers.png
Normal file
|
After Width: | Height: | Size: 298 KiB |
106
docs/en/how-to/use-toolbox-ui/index.md
Normal file
@@ -0,0 +1,106 @@
|
||||
---
|
||||
title: "Toolbox UI"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
How to effectively use Toolbox UI.
|
||||
---
|
||||
|
||||
Toolbox UI is a built-in web interface that allows users to visually inspect and test out configured resources such as tools and toolsets.
|
||||
|
||||
## Launching Toolbox UI
|
||||
|
||||
To launch Toolbox's interactive UI, use the `--ui` flag.
|
||||
|
||||
```sh
|
||||
./toolbox --ui
|
||||
```
|
||||
|
||||
Toolbox UI will be served from the same host and port as the Toolbox Server, with the `/ui` suffix. Once Toolbox
|
||||
is launched, the following INFO log with Toolbox UI's url will be shown:
|
||||
|
||||
```bash
|
||||
INFO "Toolbox UI is up and running at: http://localhost:5000/ui"
|
||||
```
|
||||
|
||||
## Navigating the Tools Page
|
||||
|
||||
The tools page shows all tools loaded from your configuration file. This corresponds to the default toolset (represented by an empty string). Each tool's name on this page will exactly match its name in the configuration
|
||||
file.
|
||||
|
||||
To view details for a specific tool, click on the tool name. The main content area will be populated
|
||||
with the tool name, description, and available parameters.
|
||||
|
||||

|
||||
|
||||
### Invoking a Tool
|
||||
|
||||
1. Click on a Tool
|
||||
2. Enter appropriate parameters in each parameter field
|
||||
3. Click "Run Tool"
|
||||
4. Done! Your results will appear in the response field
|
||||
5. (Optional) Uncheck "Prettify JSON" to format the response as plain text
|
||||
|
||||

|
||||
|
||||
### Optional Parameters
|
||||
|
||||
Toolbox allows users to add [optional parameters](../../resources/tools/#basic-parameters) with or without a default value.
|
||||
|
||||
To exclude a parameter, uncheck the box to the right of an associated parameter, and that parameter will not be
|
||||
included in the request body. If the parameter is not sent, Toolbox will either use it as `nil` value or the `default` value, if configured. If the parameter is required, Toolbox will throw an error.
|
||||
|
||||
When the box is checked, parameter will be sent exactly as entered in the response field (e.g. empty string).
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
### Editing Headers
|
||||
|
||||
To edit headers, press the "Edit Headers" button to display the header modal. Within this modal,
|
||||
users can make direct edits by typing into the header's text area.
|
||||
|
||||
Toolbox UI validates that the headers are in correct JSON format. Other header-related errors (e.g.,
|
||||
incorrect header names or values required by the tool) will be reported in the Response section
|
||||
after running the tool.
|
||||
|
||||

|
||||
|
||||
#### Google OAuth
|
||||
|
||||
Currently, Toolbox supports Google OAuth 2.0 as an AuthService, which allows tools to utilize
|
||||
authorized parameters. When a tool uses an authorized parameter, the parameter will be displayed
|
||||
but not editable, as it will be populated from the authentication token.
|
||||
|
||||
To provide the token, add your Google OAuth ID Token to the request header using the "Edit Headers"
|
||||
button and modal described above. The key should be the name of your AuthService as defined in
|
||||
your tool configuration file, suffixed with `_token`. The value should be your ID token as a string.
|
||||
|
||||
1. Select a tool that requires [authenticated parameters]()
|
||||
2. The auth parameter's text field is greyed out. This is because it cannot be entered manually and will
|
||||
be parsed from the resolved auth token
|
||||
3. To update request headers with the token, select "Edit Headers"
|
||||
4. Checkout the dropdown "How to extract Google OAuth ID Token manually" for guidance on retrieving ID token
|
||||
5. Paste the request header
|
||||
6. Click "Save"
|
||||
7. Click "Run Tool"
|
||||
|
||||
```json
|
||||
{
|
||||
"Content-Type": "application/json",
|
||||
"my-google-auth_token": "YOUR_ID_TOKEN_HERE"
|
||||
}
|
||||
```
|
||||
|
||||

|
||||
|
||||
## Navigating the Toolsets Page
|
||||
|
||||
Through the toolsets page, users can search for a specific toolset to retrieve tools from. Simply
|
||||
enter the toolset name in the search bar, and press "Enter" to retrieve the associated tools.
|
||||
|
||||
If the toolset name is not defined within the tools configuration file, an error message will be
|
||||
displayed.
|
||||
|
||||

|
||||
BIN
docs/en/how-to/use-toolbox-ui/optional-param-checked.png
Normal file
|
After Width: | Height: | Size: 58 KiB |
BIN
docs/en/how-to/use-toolbox-ui/optional-param-unchecked.png
Normal file
|
After Width: | Height: | Size: 59 KiB |
BIN
docs/en/how-to/use-toolbox-ui/run-tool.gif
Normal file
|
After Width: | Height: | Size: 5.4 MiB |
BIN
docs/en/how-to/use-toolbox-ui/tools.png
Normal file
|
After Width: | Height: | Size: 269 KiB |
BIN
docs/en/how-to/use-toolbox-ui/toolsets.png
Normal file
|
After Width: | Height: | Size: 136 KiB |
@@ -33,6 +33,14 @@ cluster][alloydb-free-trial].
|
||||
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
|
||||
Run parameterized SQL statements in AlloyDB Postgres.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [AlloyDB using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/alloydb_pg_mcp/)
|
||||
Connect your IDE to AlloyDB using Toolbox.
|
||||
|
||||
- [AlloyDB Admin API using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/alloydb_pg_admin_mcp/)
|
||||
Create your AlloyDB database with MCP Toolbox.
|
||||
|
||||
## Requirements
|
||||
|
||||
### IAM Permissions
|
||||
|
||||
@@ -54,6 +54,11 @@ avoiding full table scans or complex filters.
|
||||
- [`bigquery-list-table-ids`](../tools/bigquery/bigquery-list-table-ids.md)
|
||||
List tables in a given dataset.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [BigQuery using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/bigquery_mcp/)
|
||||
Connect your IDE to BigQuery using Toolbox.
|
||||
|
||||
## Requirements
|
||||
|
||||
### IAM Permissions
|
||||
|
||||
@@ -27,6 +27,11 @@ to a database by following these instructions][csql-mssql-connect].
|
||||
- [`mssql-execute-sql`](../tools/mssql/mssql-execute-sql.md)
|
||||
Run parameterized SQL Server queries in Cloud SQL for SQL Server.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [Cloud SQL for SQL Server using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_mssql_mcp/)
|
||||
Connect your IDE to Cloud SQL for SQL Server using Toolbox.
|
||||
|
||||
## Requirements
|
||||
|
||||
### IAM Permissions
|
||||
|
||||
@@ -28,6 +28,11 @@ to a database by following these instructions][csql-mysql-quickstart].
|
||||
- [`mysql-execute-sql`](../tools/mysql/mysql-execute-sql.md)
|
||||
Run parameterized SQL queries in Cloud SQL for MySQL.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [Cloud SQL for MySQL using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_mysql_mcp/)
|
||||
Connect your IDE to Cloud SQL for MySQL using Toolbox.
|
||||
|
||||
## Requirements
|
||||
|
||||
### IAM Permissions
|
||||
|
||||
@@ -28,6 +28,12 @@ to a database by following these instructions][csql-pg-quickstart].
|
||||
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
|
||||
Run parameterized SQL statements in PostgreSQL.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [Cloud SQL for Postgres using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_pg_mcp/)
|
||||
Connect your IDE to Cloud SQL for Postgres using Toolbox.
|
||||
|
||||
|
||||
## Requirements
|
||||
|
||||
### IAM Permissions
|
||||
|
||||
@@ -35,10 +35,213 @@ You can use the following system prompt as "Custom Instructions" in your client
|
||||
application.
|
||||
|
||||
```
|
||||
Whenever you will receive response from dataplex_search_entries tool decide what do to by following these steps:
|
||||
# Objective
|
||||
Your primary objective is to help discover, organize and manage metadata related to data assets.
|
||||
|
||||
# Tone and Style
|
||||
1. Adopt the persona of a senior subject matter expert
|
||||
2. Your communication style must be:
|
||||
1. Concise: Always favor brevity.
|
||||
2. Direct: Avoid greetings (e.g., "Hi there!", "Certainly!"). Get straight to the point.
|
||||
Example (Incorrect): Hi there! I see that you are looking for...
|
||||
Example (Correct): This problem likely stems from...
|
||||
3. Do not reiterate or summarize the question in the answer.
|
||||
4. Crucially, always convey a tone of uncertainty and caution. Since you are interpreting metadata and have no way to externally verify your answers, never express complete confidence. Frame your responses as interpretations based solely on the provided metadata. Use a suggestive tone, not a prescriptive one:
|
||||
Example (Correct): "The entry describes..."
|
||||
Example (Correct): "According to catalog,..."
|
||||
Example (Correct): "Based on the metadata,..."
|
||||
Example (Correct): "Based on the search results,..."
|
||||
5. Do not make assumptions
|
||||
|
||||
# Data Model
|
||||
## Entries
|
||||
Entry represents a specific data asset. Entry acts as a metadata record for something that is managed by Catalog, such as:
|
||||
|
||||
- A BigQuery table or dataset
|
||||
- A Cloud Storage bucket or folder
|
||||
- An on-premises SQL table
|
||||
|
||||
## Aspects
|
||||
While the Entry itself is a container, the rich descriptive information about the asset (e.g., schema, data types, business descriptions, classifications) is stored in associated components called Aspects. Aspects are created based on pre-defined blueprints known as Aspect Types.
|
||||
|
||||
## Aspect Types
|
||||
Aspect Type is a reusable template that defines the schema for a set of metadata fields. Think of an Aspect Type as a structure for the kind of metadata that is organized in the catalog within the Entry.
|
||||
|
||||
Examples:
|
||||
- projects/dataplex-types/locations/global/aspectTypes/analytics-hub-exchange
|
||||
- projects/dataplex-types/locations/global/aspectTypes/analytics-hub
|
||||
- projects/dataplex-types/locations/global/aspectTypes/analytics-hub-listing
|
||||
- projects/dataplex-types/locations/global/aspectTypes/bigquery-connection
|
||||
- projects/dataplex-types/locations/global/aspectTypes/bigquery-data-policy
|
||||
- projects/dataplex-types/locations/global/aspectTypes/bigquery-dataset
|
||||
- projects/dataplex-types/locations/global/aspectTypes/bigquery-model
|
||||
- projects/dataplex-types/locations/global/aspectTypes/bigquery-policy
|
||||
- projects/dataplex-types/locations/global/aspectTypes/bigquery-routine
|
||||
- projects/dataplex-types/locations/global/aspectTypes/bigquery-row-access-policy
|
||||
- projects/dataplex-types/locations/global/aspectTypes/bigquery-table
|
||||
- projects/dataplex-types/locations/global/aspectTypes/bigquery-view
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloud-bigtable-instance
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloud-bigtable-table
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-database
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-instance
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-table
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-view
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-database
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-instance
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-schema
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-table
|
||||
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-view
|
||||
- projects/dataplex-types/locations/global/aspectTypes/contacts
|
||||
- projects/dataplex-types/locations/global/aspectTypes/dataform-code-asset
|
||||
- projects/dataplex-types/locations/global/aspectTypes/dataform-repository
|
||||
- projects/dataplex-types/locations/global/aspectTypes/dataform-workspace
|
||||
- projects/dataplex-types/locations/global/aspectTypes/dataproc-metastore-database
|
||||
- projects/dataplex-types/locations/global/aspectTypes/dataproc-metastore-service
|
||||
- projects/dataplex-types/locations/global/aspectTypes/dataproc-metastore-table
|
||||
- projects/dataplex-types/locations/global/aspectTypes/data-product
|
||||
- projects/dataplex-types/locations/global/aspectTypes/data-quality-scorecard
|
||||
- projects/dataplex-types/locations/global/aspectTypes/external-connection
|
||||
- projects/dataplex-types/locations/global/aspectTypes/overview
|
||||
- projects/dataplex-types/locations/global/aspectTypes/pubsub-topic
|
||||
- projects/dataplex-types/locations/global/aspectTypes/schema
|
||||
- projects/dataplex-types/locations/global/aspectTypes/sensitive-data-protection-job-result
|
||||
- projects/dataplex-types/locations/global/aspectTypes/sensitive-data-protection-profile
|
||||
- projects/dataplex-types/locations/global/aspectTypes/sql-access
|
||||
- projects/dataplex-types/locations/global/aspectTypes/storage-bucket
|
||||
- projects/dataplex-types/locations/global/aspectTypes/storage-folder
|
||||
- projects/dataplex-types/locations/global/aspectTypes/storage
|
||||
- projects/dataplex-types/locations/global/aspectTypes/usage
|
||||
|
||||
## Entry Types
|
||||
Every Entry must conform to an Entry Type. The Entry Type acts as a template, defining the structure, required aspects, and constraints for Entries of that type.
|
||||
|
||||
Examples:
|
||||
- projects/dataplex-types/locations/global/entryTypes/analytics-hub-exchange
|
||||
- projects/dataplex-types/locations/global/entryTypes/analytics-hub-listing
|
||||
- projects/dataplex-types/locations/global/entryTypes/bigquery-connection
|
||||
- projects/dataplex-types/locations/global/entryTypes/bigquery-data-policy
|
||||
- projects/dataplex-types/locations/global/entryTypes/bigquery-dataset
|
||||
- projects/dataplex-types/locations/global/entryTypes/bigquery-model
|
||||
- projects/dataplex-types/locations/global/entryTypes/bigquery-routine
|
||||
- projects/dataplex-types/locations/global/entryTypes/bigquery-row-access-policy
|
||||
- projects/dataplex-types/locations/global/entryTypes/bigquery-table
|
||||
- projects/dataplex-types/locations/global/entryTypes/bigquery-view
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloud-bigtable-instance
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloud-bigtable-table
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloud-spanner-database
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloud-spanner-instance
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloud-spanner-table
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloud-spanner-view
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-database
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-instance
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-table
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-view
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-database
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-instance
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-schema
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-table
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-view
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-database
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-instance
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-schema
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-table
|
||||
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-view
|
||||
- projects/dataplex-types/locations/global/entryTypes/dataform-code-asset
|
||||
- projects/dataplex-types/locations/global/entryTypes/dataform-repository
|
||||
- projects/dataplex-types/locations/global/entryTypes/dataform-workspace
|
||||
- projects/dataplex-types/locations/global/entryTypes/dataproc-metastore-database
|
||||
- projects/dataplex-types/locations/global/entryTypes/dataproc-metastore-service
|
||||
- projects/dataplex-types/locations/global/entryTypes/dataproc-metastore-table
|
||||
- projects/dataplex-types/locations/global/entryTypes/pubsub-topic
|
||||
- projects/dataplex-types/locations/global/entryTypes/storage-bucket
|
||||
- projects/dataplex-types/locations/global/entryTypes/storage-folder
|
||||
- projects/dataplex-types/locations/global/entryTypes/vertexai-dataset
|
||||
- projects/dataplex-types/locations/global/entryTypes/vertexai-feature-group
|
||||
- projects/dataplex-types/locations/global/entryTypes/vertexai-feature-online-store
|
||||
|
||||
## Entry Groups
|
||||
Entries are organized within Entry Groups, which are logical groupings of Entries. An Entry Group acts as a namespace for its Entries.
|
||||
|
||||
## Entry Links
|
||||
Entries can be linked together using EntryLinks to represent relationships between data assets (e.g. foreign keys).
|
||||
|
||||
# Tool instructions
|
||||
## Tool: dataplex_search_entries
|
||||
## General
|
||||
- Do not try to search within search results on your own.
|
||||
- Do not fetch multiple pages of results unless explicitly asked.
|
||||
|
||||
## Search syntax
|
||||
|
||||
### Simple search
|
||||
In its simplest form, a search query consists of a single predicate. Such a predicate can match several pieces of metadata:
|
||||
|
||||
- A substring of a name, display name, or description of a resource
|
||||
- A substring of the type of a resource
|
||||
- A substring of a column name (or nested column name) in the schema of a resource
|
||||
- A substring of a project ID
|
||||
- A string from an overview description
|
||||
|
||||
For example, the predicate foo matches the following resources:
|
||||
- Resource with the name foo.bar
|
||||
- Resource with the display name Foo Bar
|
||||
- Resource with the description This is the foo script
|
||||
- Resource with the exact type foo
|
||||
- Column foo_bar in the schema of a resource
|
||||
- Nested column foo_bar in the schema of a resource
|
||||
- Project prod-foo-bar
|
||||
- Resource with an overview containing the word foo
|
||||
|
||||
|
||||
### Qualified predicates
|
||||
You can qualify a predicate by prefixing it with a key that restricts the matching to a specific piece of metadata:
|
||||
- An equal sign (=) restricts the search to an exact match.
|
||||
- A colon (:) after the key matches the predicate to either a substring or a token within the value in the search results.
|
||||
|
||||
Tokenization splits the stream of text into a series of tokens, with each token usually corresponding to a single word. For example:
|
||||
- name:foo selects resources with names that contain the foo substring, like foo1 and barfoo.
|
||||
- description:foo selects resources with the foo token in the description, like bar and foo.
|
||||
- location=foo matches resources in a specified location with foo as the location name.
|
||||
|
||||
The predicate keys type, system, location, and orgid support only the exact match (=) qualifier, not the substring qualifier (:). For example, type=foo or orgid=number.
|
||||
|
||||
Search syntax supports the following qualifiers:
|
||||
- "name:x" - Matches x as a substring of the resource ID.
|
||||
- "displayname:x" - Match x as a substring of the resource display name.
|
||||
- "column:x" - Matches x as a substring of the column name (or nested column name) in the schema of the resource.
|
||||
- "description:x" - Matches x as a token in the resource description.
|
||||
- "label:bar" - Matches BigQuery resources that have a label (with some value) and the label key has bar as a substring.
|
||||
- "label=bar" - Matches BigQuery resources that have a label (with some value) and the label key equals bar as a string.
|
||||
- "label:bar:x" - Matches x as a substring in the value of a label with a key bar attached to a BigQuery resource.
|
||||
- "label=foo:bar" - Matches BigQuery resources where the key equals foo and the key value equals bar.
|
||||
- "label.foo=bar" - Matches BigQuery resources where the key equals foo and the key value equals bar.
|
||||
- "label.foo" - Matches BigQuery resources that have a label whose key equals foo as a string.
|
||||
- "type=TYPE" - Matches resources of a specific entry type or its type alias.
|
||||
- "projectid:bar" - Matches resources within Google Cloud projects that match bar as a substring in the ID.
|
||||
- "parent:x" - Matches x as a substring of the hierarchical path of a resource. The parent path is a fully_qualified_name of the parent resource.
|
||||
- "orgid=number" - Matches resources within a Google Cloud organization with the exact ID value of the number.
|
||||
- "system=SYSTEM" - Matches resources from a specified system. For example, system=bigquery matches BigQuery resources.
|
||||
- "location=LOCATION" - Matches resources in a specified location with an exact name. For example, location=us-central1 matches assets hosted in Iowa. BigQuery Omni assets support this qualifier by using the BigQuery Omni location name. For example, location=aws-us-east-1 matches BigQuery Omni assets in Northern Virginia.
|
||||
- "createtime" -
|
||||
Finds resources that were created within, before, or after a given date or time. For example "createtime:2019-01-01" matches resources created on 2019-01-01.
|
||||
- "updatetime" - Finds resources that were updated within, before, or after a given date or time. For example "updatetime>2019-01-01" matches resources updated after 2019-01-01.
|
||||
- "fully_qualified_name:x" - Matches x as a substring of fully_qualified_name.
|
||||
- "fully_qualified_name=x" - Matches x as fully_qualified_name.
|
||||
|
||||
### Logical operators
|
||||
A query can consist of several predicates with logical operators. If you don't specify an operator, logical AND is implied. For example, foo bar returns resources that match both predicate foo and predicate bar.
|
||||
Logical AND and logical OR are supported. For example, foo OR bar.
|
||||
|
||||
You can negate a predicate with a - (hyphen) or NOT prefix. For example, -name:foo returns resources with names that don't match the predicate foo.
|
||||
Logical operators aren't case-sensitive. For example, both or and OR are acceptable.
|
||||
|
||||
### Request
|
||||
1. Always try to rewrite the prompt using search syntax.
|
||||
|
||||
### Response
|
||||
1. If there are multiple search results found
|
||||
1.1. Present the list of search results
|
||||
1.2. Format the output in nested ordered list, for example:
|
||||
1. Present the list of search results
|
||||
2. Format the output in nested ordered list, for example:
|
||||
Given
|
||||
```
|
||||
{
|
||||
@@ -75,14 +278,19 @@ Whenever you will receive response from dataplex_search_entries tool decide what
|
||||
- location: us-central1
|
||||
- description: Table contains list of best customers.
|
||||
```
|
||||
1.3. Ask to select one of the presented search results
|
||||
3. Ask to select one of the presented search results
|
||||
2. If there is only one search result found
|
||||
2.1. Present the search result immediately.
|
||||
1. Present the search result immediately.
|
||||
3. If there are no search result found
|
||||
3.1. Explain that no search result was found
|
||||
3.2. Suggest to provide a more specific search query.
|
||||
1. Explain that no search result was found
|
||||
2. Suggest to provide a more specific search query.
|
||||
|
||||
Do not try to search within search results on your own.
|
||||
## Tool: dataplex_lookup_entry
|
||||
### Request
|
||||
1. Always try to limit the size of the response by specifying `aspect_types` parameter. Make sure to include to select view=CUSTOM when using aspect_types parameter.
|
||||
2. If you do not know the name of the entry, use `dataplex_search_entries` tool
|
||||
### Response
|
||||
1. Unless asked for a specific aspect, respond with all aspects attached to the entry.
|
||||
```
|
||||
|
||||
## Reference
|
||||
@@ -90,4 +298,4 @@ Do not try to search within search results on your own.
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|----------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "dataplex". |
|
||||
| project | string | true | Id of the GCP project used for quota and billing purposes (e.g. "my-project-id").|
|
||||
| project | string | true | ID of the GCP project used for quota and billing purposes (e.g. "my-project-id").|
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
---
|
||||
title: DuckDB
|
||||
linkTitle: DuckDB
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
DuckDB is an in-process SQL OLAP database management system designed for analytical query processing.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
[DuckDB](https://duckdb.org/) is an embedded analytical database management system that runs in-process with the client application. It is optimized for analytical workloads, providing high performance for complex queries with minimal setup.
|
||||
|
||||
DuckDB has the following notable characteristics:
|
||||
|
||||
- In-process, serverless database engine
|
||||
- Supports complex SQL queries for analytical processing
|
||||
- Can operate on in-memory or persistent storage
|
||||
- Zero-configuration - no external dependencies or server setup required
|
||||
- Highly optimized for columnar data storage and query execution
|
||||
|
||||
For more details, refer to the [DuckDB Documentation](https://duckdb.org/).
|
||||
|
||||
## Available Tools
|
||||
- [`duckdb-sql`](../tools/duckdb/duckdb-sql.md)
|
||||
Execute pre-defined prepared SQL queries in DuckDB.
|
||||
|
||||
## Requirements
|
||||
|
||||
### Database File
|
||||
|
||||
To use DuckDB, you can either:
|
||||
|
||||
- Specify a file path for a persistent database stored on the filesystem
|
||||
- Omit the file path to use an in-memory database
|
||||
|
||||
## Example
|
||||
|
||||
For a persistent DuckDB database:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-duckdb:
|
||||
kind: "duckdb"
|
||||
dbFilePath: "/path/to/database.db"
|
||||
configuration:
|
||||
memory_limit: "2GB"
|
||||
threads: "4"
|
||||
```
|
||||
|
||||
For an in-memory DuckDB database:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-duckdb-memory:
|
||||
name: "my-duckdb-memory"
|
||||
kind: "duckdb"
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
### Configuration Fields
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------------|:-----------------:|:------------:|---------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "duckdb". |
|
||||
| dbFilePath | string | false | Path to the DuckDB database file. Omit for an in-memory database. |
|
||||
| configuration | map[string]string | false | Additional DuckDB configuration options (e.g., `memory_limit`, `threads`). |
|
||||
|
||||
For a complete list of available configuration options, refer to the [DuckDB Configuration Documentation](https://duckdb.org/docs/stable/configuration/overview.html#local-configuration-options).
|
||||
|
||||
|
||||
For more details on the Go implementation, see the [go-duckdb package documentation](https://pkg.go.dev/github.com/scottlepp/go-duckdb#section-readme).
|
||||
80
docs/en/resources/sources/kuzu.md
Normal file
@@ -0,0 +1,80 @@
|
||||
---
|
||||
title: "Kùzu"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Kùzu is an open-source, embedded graph database built for query speed and scalability, optimized for complex join-heavy analytical workloads using the Cypher query language.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
[Kuzu](https://kuzudb.com/) is an embedded graph database designed for high query speed and scalability, optimized for complex, join-heavy analytical workloads on large graph datasets. It provides a lightweight, in-process integration with applications, making it ideal for scenarios requiring fast and efficient graph data processing.
|
||||
|
||||
Kuzu has the following core features:
|
||||
|
||||
- **Property Graph Data Model and Cypher Query Language**: Supports the property graph model and uses Cypher, a powerful and expressive query language for graph databases.
|
||||
- **Embedded Integration**: Runs in-process with applications, eliminating the need for a separate server.
|
||||
- **Columnar Disk-Based Storage**: Utilizes columnar storage for efficient data access and management.
|
||||
- **Columnar and Compressed Sparse Row-Based (CSR) Adjacency List and Join Indices**: Optimizes storage and query performance for large graphs.
|
||||
- **Vectorized and Factorized Query Processing**: Enhances query execution speed through advanced processing techniques.
|
||||
- **Novel and Efficient Join Algorithms**: Improves performance for complex join operations.
|
||||
- **Multi-Core Query Parallelism**: Leverages multiple cores for faster query execution.
|
||||
- **Serializable ACID Transactions**: Ensures data consistency and reliability with full ACID compliance.
|
||||
|
||||
|
||||
## Available Tools
|
||||
|
||||
- [`kuzu-cypher`](../tools/kuzu/kuzu-cypher.md)
|
||||
Execute pre-defined Cypher queries with placeholder parameters.
|
||||
|
||||
## Requirements
|
||||
|
||||
### Database File
|
||||
|
||||
To use Kuzu, you can either:
|
||||
|
||||
- Specify a file path for a persistent database file stored on the filesystem
|
||||
- Omit the file path to use an in-memory database
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-kuzu-db:
|
||||
kind: "kuzu"
|
||||
database: "/path/to/database.db"
|
||||
bufferPoolSize: 1073741824 # 1GB
|
||||
maxNumThreads: 4
|
||||
enableCompression: true
|
||||
readOnly: false
|
||||
maxDbSize: 5368709120 # 5GB
|
||||
```
|
||||
|
||||
For an in-memory database:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-kuzu-memory-db:
|
||||
kind: "kuzu"
|
||||
bufferPoolSize: 1073741824 # 1GB
|
||||
maxNumThreads: 4
|
||||
enableCompression: true
|
||||
readOnly: false
|
||||
maxDbSize: 5368709120 # 5GB
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
### Configuration Fields
|
||||
|
||||
| **Field** | **Type** | **Required** | **Description** |
|
||||
|--------------------|:--------:|:------------:|---------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "kuzu". |
|
||||
| database | string | false | Path to the database directory. Default is ":memory:" which creates an in-memory database. |
|
||||
| bufferPoolSize | uint64 | false | Size of the buffer pool in bytes (e.g., 1073741824 for 1GB). |
|
||||
| maxNumThreads | uint64 | false | Maximum number of threads for query execution. |
|
||||
| enableCompression | bool | false | Enables or disables data compression. Default is true. |
|
||||
| readOnly | bool | false | Sets the database to read-only mode if true. Default is false. |
|
||||
| maxDbSize | uint64 | false | Maximum database size in bytes (e.g., 5368709120 for 5GB). |
|
||||
|
||||
For a complete list of available configuration options, refer to the [Kuzu SystemConfig options](https://pkg.go.dev/github.com/kuzudb/go-kuzu#SystemConfig).
|
||||
@@ -23,6 +23,11 @@ reputation for reliability, feature robustness, and performance.
|
||||
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
|
||||
Run parameterized SQL statements in PostgreSQL.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [PostgreSQL using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/postgres_mcp/)
|
||||
Connect your IDE to PostgreSQL using Toolbox.
|
||||
|
||||
## Requirements
|
||||
|
||||
### Database User
|
||||
|
||||
@@ -31,6 +31,11 @@ the Google Cloud console][spanner-quickstart].
|
||||
- [`spanner-execute-sql`](../tools/spanner/spanner-execute-sql.md)
|
||||
Run structured and parameterized queries on Spanner.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [Spanner using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/spanner_mcp/)
|
||||
Connect your IDE to Spanner using Toolbox.
|
||||
|
||||
## Requirements
|
||||
|
||||
### IAM Permissions
|
||||
|
||||
81
docs/en/resources/sources/tidb.md
Normal file
@@ -0,0 +1,81 @@
|
||||
---
|
||||
title: "TiDB"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
TiDB is a distributed SQL database that combines the best of traditional RDBMS and NoSQL databases.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
[TiDB][tidb-docs] is an open-source distributed SQL database that supports Hybrid Transactional and Analytical Processing (HTAP) workloads. It is MySQL-compatible and features horizontal scalability, strong consistency, and high availability.
|
||||
|
||||
[tidb-docs]: https://docs.pingcap.com/tidb/stable
|
||||
|
||||
## Requirements
|
||||
|
||||
### Database User
|
||||
|
||||
This source uses standard MySQL protocol authentication. You will need to [create a TiDB user][tidb-users] to login to the database with.
|
||||
|
||||
For TiDB Cloud users, you can create database users through the TiDB Cloud console.
|
||||
|
||||
[tidb-users]: https://docs.pingcap.com/tidb/stable/user-account-management
|
||||
|
||||
## SSL Configuration
|
||||
|
||||
- TiDB Cloud
|
||||
|
||||
For TiDB Cloud instances, SSL is automatically enabled when the hostname matches the TiDB Cloud pattern (`gateway*.*.*.tidbcloud.com`). You don't need to explicitly set `ssl: true` for TiDB Cloud connections.
|
||||
|
||||
- Self-Hosted TiDB
|
||||
|
||||
For self-hosted TiDB instances, you can optionally enable SSL by setting `ssl: true` in your configuration.
|
||||
|
||||
## Example
|
||||
|
||||
- TiDB Cloud
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-tidb-cloud-source:
|
||||
kind: tidb
|
||||
host: gateway01.us-west-2.prod.aws.tidbcloud.com
|
||||
port: 4000
|
||||
database: my_db
|
||||
user: ${TIDB_USERNAME}
|
||||
password: ${TIDB_PASSWORD}
|
||||
# SSL is automatically enabled for TiDB Cloud
|
||||
```
|
||||
|
||||
- Self-Hosted TiDB
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-tidb-source:
|
||||
kind: tidb
|
||||
host: 127.0.0.1
|
||||
port: 4000
|
||||
database: my_db
|
||||
user: ${TIDB_USERNAME}
|
||||
password: ${TIDB_PASSWORD}
|
||||
# ssl: true # Optional: enable SSL for secure connections
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
Use environment variable replacement with the format ${ENV_NAME}
|
||||
instead of hardcoding your secrets into the configuration file.
|
||||
{{< /notice >}}
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "tidb". |
|
||||
| host | string | true | IP address or hostname to connect to (e.g. "127.0.0.1" or "gateway01.*.tidbcloud.com"). |
|
||||
| port | string | true | Port to connect to (typically "4000" for TiDB). |
|
||||
| database | string | true | Name of the TiDB database to connect to (e.g. "my_db"). |
|
||||
| user | string | true | Name of the TiDB user to connect as (e.g. "my-tidb-user"). |
|
||||
| password | string | true | Password of the TiDB user (e.g. "my-password"). |
|
||||
| ssl | boolean | false | Whether to use SSL/TLS encryption. Automatically enabled for TiDB Cloud instances. |
|
||||
@@ -75,6 +75,12 @@ visible to the LLM.
|
||||
|
||||
[alloydb-psv]: https://cloud.google.com/alloydb/docs/parameterized-secure-views-overview
|
||||
|
||||
{{< notice tip >}} Make sure to enable the `parameterized_views` extension before running this tool. You can do so by running this command in the AlloyDB studio:
|
||||
```sql
|
||||
CREATE EXTENSION IF NOT EXISTS parameterized_views;
|
||||
```
|
||||
{{< /notice >}}
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
@@ -95,7 +101,6 @@ tools:
|
||||
- name: my_google_service
|
||||
field: email
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|
||||
@@ -15,8 +15,9 @@ It's compatible with the following sources:
|
||||
|
||||
- [bigquery](../../sources/bigquery.md)
|
||||
|
||||
`bigquery-execute-sql` takes one input parameter `sql` and runs the sql
|
||||
statement against the `source`.
|
||||
`bigquery-execute-sql` takes a required `sql` input parameter and runs the SQL
|
||||
statement against the configured `source`. It also supports an optional `dry_run`
|
||||
parameter to validate a query without executing it.
|
||||
|
||||
## Example
|
||||
|
||||
|
||||
60
docs/en/resources/tools/dataplex/dataplex-lookup-entry.md
Normal file
@@ -0,0 +1,60 @@
|
||||
---
|
||||
title: "dataplex-lookup-entry"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "dataplex-lookup-entry" tool returns details of a particular entry in Dataplex Catalog.
|
||||
aliases:
|
||||
- /resources/tools/dataplex-lookup-entry
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `dataplex-lookup-entry` tool returns details of a particular entry in Dataplex Catalog.
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [dataplex](../sources/dataplex.md)
|
||||
|
||||
`dataplex-lookup-entry` takes a required `name` parameter which contains the project and location to which the request should be attributed in the following form: projects/{project}/locations/{location} and also a required `entry` parameter which is the resource name of the entry in the following form: projects/{project}/locations/{location}/entryGroups/{entryGroup}/entries/{entry}. It also optionally accepts following parameters:
|
||||
- `view` - View to control which parts of an entry the service should return. It takes integer values from 1-4 corresponding to type of view - BASIC, FULL, CUSTOM, ALL
|
||||
- `aspectTypes` - Limits the aspects returned to the provided aspect types in the format `projects/{project}/locations/{location}/aspectTypes/{aspectType}`. It only works for CUSTOM view.
|
||||
- `paths` - Limits the aspects returned to those associated with the provided paths within the Entry. It only works for CUSTOM view.
|
||||
|
||||
## Requirements
|
||||
|
||||
### IAM Permissions
|
||||
|
||||
Dataplex uses [Identity and Access Management (IAM)][iam-overview] to control
|
||||
user and group access to Dataplex resources. Toolbox will use your
|
||||
[Application Default Credentials (ADC)][adc] to authorize and authenticate when
|
||||
interacting with [Dataplex][dataplex-docs].
|
||||
|
||||
In addition to [setting the ADC for your server][set-adc], you need to ensure
|
||||
the IAM identity has been given the correct IAM permissions for the tasks you
|
||||
intend to perform. See [Dataplex Universal Catalog IAM permissions][iam-permissions]
|
||||
and [Dataplex Universal Catalog IAM roles][iam-roles] for more information on
|
||||
applying IAM permissions and roles to an identity.
|
||||
|
||||
[iam-overview]: https://cloud.google.com/dataplex/docs/iam-and-access-control
|
||||
[adc]: https://cloud.google.com/docs/authentication#adc
|
||||
[set-adc]: https://cloud.google.com/docs/authentication/provide-credentials-adc
|
||||
[iam-permissions]: https://cloud.google.com/dataplex/docs/iam-permissions
|
||||
[iam-roles]: https://cloud.google.com/dataplex/docs/iam-roles
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
lookup_entry:
|
||||
kind: dataplex-lookup-entry
|
||||
source: my-dataplex-source
|
||||
description: Use this tool to retrieve a specific entry in Dataplex Catalog.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "dataplex-lookup-entry". |
|
||||
| source | string | true | Name of the source the tool should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -1,7 +0,0 @@
|
||||
---
|
||||
title: "DuckDB"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with DuckDB Sources.
|
||||
---
|
||||
@@ -1,80 +0,0 @@
|
||||
---
|
||||
title: "duckdb-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Execute SQL statements against a DuckDB database using the DuckDB SQL tools configuration.
|
||||
aliases:
|
||||
- /resources/tools/duckdb-sql
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `duckdb-sql` tool executes a pre-defined SQL statement against a [DuckDB](https://duckdb.org/) database. It is compatible with any DuckDB source configuration as defined in the [DuckDB source documentation](../../sources/duckdb.md).
|
||||
|
||||
The specified SQL statement is executed as a prepared statement, and parameters are inserted according to their position: e.g., `$1` is the first parameter, `$2` is the second, and so on. If template parameters are included, they are resolved before execution of the prepared statement.
|
||||
|
||||
DuckDB's SQL dialect closely follows the conventions of the PostgreSQL dialect, with a few exceptions listed in the [DuckDB PostgreSQL Compatibility documentation](https://duckdb.org/docs/stable/sql/dialect/postgresql_compatibility.html). For an introduction to DuckDB's SQL dialect, refer to the [DuckDB SQL Introduction](https://duckdb.org/docs/stable/sql/introduction).
|
||||
|
||||
### Concepts
|
||||
|
||||
DuckDB is a relational database management system (RDBMS). Data is stored in relations (tables), where each table is a named collection of rows. Each row in a table has the same set of named columns, each with a specific data type. Tables are stored within schemas, and a collection of schemas constitutes the entire database.
|
||||
|
||||
For more details, see the [DuckDB SQL Introduction](https://duckdb.org/docs/stable/sql/introduction).
|
||||
|
||||
## Example
|
||||
|
||||
> **Note:** This tool uses parameterized queries to prevent SQL injections. Query parameters can be used as substitutes for arbitrary expressions but cannot be used for identifiers, column names, table names, or other parts of the query.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search-users:
|
||||
kind: duckdb-sql
|
||||
source: my-duckdb
|
||||
description: Search users by name and age
|
||||
statement: SELECT * FROM users WHERE name LIKE $1 AND age >= $2
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: The name to search for
|
||||
- name: min_age
|
||||
type: integer
|
||||
description: Minimum age
|
||||
```
|
||||
|
||||
## Example with Template Parameters
|
||||
|
||||
> **Note:** Template parameters allow direct modifications to the SQL statement, including identifiers, column names, and table names, which makes them more vulnerable to SQL injections. Using basic parameters (see above) is recommended for performance and safety. For more details, see the [templateParameters](../#template-parameters) section.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_table:
|
||||
kind: duckdb-sql
|
||||
source: my-duckdb
|
||||
statement: |
|
||||
SELECT * FROM {{.tableName}};
|
||||
description: |
|
||||
Use this tool to list all information from a specific table.
|
||||
Example:
|
||||
{{
|
||||
"tableName": "flights",
|
||||
}}
|
||||
templateParameters:
|
||||
- name: tableName
|
||||
type: string
|
||||
description: Table to select from
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
### Configuration Fields
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------------|:-------------------------------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "duckdb-sql". |
|
||||
| source | string | true | Name of the DuckDB source configuration (see [DuckDB source documentation](../../sources/duckdb.md)). |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | The SQL statement to execute. |
|
||||
| authRequired | []string | false | List of authentication requirements for the tool (if any). |
|
||||
| parameters | [parameters](../#specifying-parameters) | false | List of parameters that will be inserted into the SQL statement |
|
||||
| templateParameters | [templateParameters](../#template-parameters) | false | List of template parameters that will be inserted into the SQL statement before executing the prepared statement. |
|
||||
7
docs/en/resources/tools/kuzu/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "Kuzu"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with Kuzu Sources.
|
||||
---
|
||||
110
docs/en/resources/tools/kuzu/kuzu-cypher.md
Normal file
@@ -0,0 +1,110 @@
|
||||
---
|
||||
title: "kuzu-cypher"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "kuzu-cypher" tool executes a pre-defined cypher statement against a Kuzu
|
||||
database.
|
||||
aliases:
|
||||
- /resources/tools/kuzu-cypher
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `kuzu-cypher` tool executes a pre-defined Cypher statement against a Kuzu graph database. It is designed to work with Kuzu's embedded graph database, optimized for high query speed and scalability. The tool is compatible with the following sources:
|
||||
|
||||
- [kuzu](../../sources/kuzu.md)
|
||||
|
||||
The specified Cypher statement is executed as a [parameterized statement][kuzu-parameters], with parameters referenced by their name (e.g., `$id`). This approach ensures security by preventing Cypher injection attacks.
|
||||
|
||||
> **Note:** This tool uses parameterized queries to prevent Cypher injections. \
|
||||
> Query parameters can be used as substitutes for arbitrary expressions but cannot replace identifiers, node labels, relationship types, or other structural parts of the query.
|
||||
|
||||
[kuzu-parameters]:
|
||||
https://docs.kuzudb.com/get-started/prepared-statements/
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
find_collaborators:
|
||||
kind: kuzu-cypher
|
||||
source: my-kuzu-social-network
|
||||
statement: |
|
||||
MATCH (p1:Person)-[:Collaborated_With]->(p2:Person)
|
||||
WHERE p1.name = $name AND p2.age > $min_age
|
||||
RETURN p2.name, p2.age
|
||||
LIMIT 10
|
||||
description: |
|
||||
Use this tool to find collaborators for a specific person in a social network, filtered by a minimum age.
|
||||
Takes a full person name (e.g., "Alice Smith") and a minimum age (e.g., 25) and returns a list of collaborator names and their ages.
|
||||
Do NOT use this tool with incomplete names or arbitrary values. Do NOT guess a name or age.
|
||||
A person name is a fully qualified name with first and last name separated by a space.
|
||||
For example, if given "Smith, Alice" the person name is "Alice Smith".
|
||||
If multiple results are returned, prioritize those with the closest collaboration ties.
|
||||
Example:
|
||||
{{
|
||||
"name": "Bob Johnson",
|
||||
"min_age": 30
|
||||
}}
|
||||
Example:
|
||||
{{
|
||||
"name": "Emma Davis",
|
||||
"min_age": 25
|
||||
}}
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: Full person name, "firstname lastname"
|
||||
- name: min_age
|
||||
type: integer
|
||||
description: Minimum age as a positive integer
|
||||
```
|
||||
### Example with Template Parameters
|
||||
|
||||
> **Note:** This tool allows direct modifications to the Cypher statement,
|
||||
> including identifiers, column names, and table names. **This makes it more
|
||||
> vulnerable to Cypher injections**. Using basic parameters only (see above) is
|
||||
> recommended for performance and safety reasons. For more details, please check
|
||||
> [templateParameters](../#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
find_friends:
|
||||
kind: kuzu-cypher
|
||||
source: my-kuzu-social-network
|
||||
statement: |
|
||||
MATCH (p1:{{.nodeLabel}})-[:friends_with]->(p2:{{.nodeLabel}})
|
||||
WHERE p1.name = $name
|
||||
RETURN p2.name
|
||||
LIMIT 5
|
||||
description: |
|
||||
Use this tool to find friends of a specific person in a social network.
|
||||
Takes a node label (e.g., "Person") and a full person name (e.g., "Alice Smith") and returns a list of friend names.
|
||||
Do NOT use with incomplete names. A person name is a full name with first and last name separated by a space.
|
||||
Example:
|
||||
{
|
||||
"nodeLabel": "Person",
|
||||
"name": "Bob Johnson"
|
||||
}
|
||||
templateParameters:
|
||||
- name: nodeLabel
|
||||
type: string
|
||||
description: Node label for the table to query, e.g., "Person"
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: Full person name, "firstname lastname"
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **Field** | **Type** | **Required** | **Description** |
|
||||
|----------------------|:-------------------------------------:|:------------:|---------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "kuzu-cypher". |
|
||||
| source | string | true | Name of the Kuzu source the Cypher query should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM for context. |
|
||||
| statement | string | true | Cypher statement to execute. |
|
||||
| authRequired | []string | false | List of authentication requirements for executing the query (if applicable). |
|
||||
| parameters | [parameters](../#specifying-parameters) | false | List of parameters used with the Cypher statement. |
|
||||
| templateParameters | [templateParameters](../#template-parameters) | false | List of [templateParameters](../#template-parameters) that will be inserted into the Cypher statement before executing prepared statement. |
|
||||
7
docs/en/resources/tools/tidb/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "TiDB"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with TiDB Sources, such as TiDB Cloud and self-hosted TiDB.
|
||||
---
|
||||
41
docs/en/resources/tools/tidb/tidb-execute-sql.md
Normal file
@@ -0,0 +1,41 @@
|
||||
---
|
||||
title: "tidb-execute-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "tidb-execute-sql" tool executes a SQL statement against a TiDB
|
||||
database.
|
||||
aliases:
|
||||
- /resources/tools/tidb-execute-sql
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `tidb-execute-sql` tool executes a SQL statement against a TiDB
|
||||
database. It's compatible with the following source:
|
||||
|
||||
- [tidb](../sources/tidb.md)
|
||||
|
||||
`tidb-execute-sql` takes one input parameter `sql` and run the sql
|
||||
statement against the `source`.
|
||||
|
||||
> **Note:** This tool is intended for developer assistant workflows with
|
||||
> human-in-the-loop and shouldn't be used for production agents.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
execute_sql_tool:
|
||||
kind: tidb-execute-sql
|
||||
source: my-tidb-instance
|
||||
description: Use this tool to execute sql statement.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "tidb-execute-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
105
docs/en/resources/tools/tidb/tidb-sql.md
Normal file
@@ -0,0 +1,105 @@
|
||||
---
|
||||
title: "tidb-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "tidb-sql" tool executes a pre-defined SQL statement against a TiDB
|
||||
database.
|
||||
aliases:
|
||||
- /resources/tools/tidb-sql
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `tidb-sql` tool executes a pre-defined SQL statement against a TiDB
|
||||
database. It's compatible with the following source:
|
||||
|
||||
- [tidb](../sources/tidb.md)
|
||||
|
||||
The specified SQL statement is executed as a [prepared statement][tidb-prepare],
|
||||
and expects parameters in the SQL query to be in the form of placeholders `?`.
|
||||
|
||||
[tidb-prepare]: https://docs.pingcap.com/tidb/stable/sql-prepared-plan-cache
|
||||
|
||||
## Example
|
||||
|
||||
> **Note:** This tool uses parameterized queries to prevent SQL injections.
|
||||
> Query parameters can be used as substitutes for arbitrary expressions.
|
||||
> Parameters cannot be used as substitutes for identifiers, column names, table
|
||||
> names, or other parts of the query.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search_flights_by_number:
|
||||
kind: tidb-sql
|
||||
source: my-tidb-instance
|
||||
statement: |
|
||||
SELECT * FROM flights
|
||||
WHERE airline = ?
|
||||
AND flight_number = ?
|
||||
LIMIT 10
|
||||
description: |
|
||||
Use this tool to get information for a specific flight.
|
||||
Takes an airline code and flight number and returns info on the flight.
|
||||
Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number.
|
||||
A airline code is a code for an airline service consisting of two-character
|
||||
airline designator and followed by flight number, which is 1 to 4 digit number.
|
||||
For example, if given CY 0123, the airline is "CY", and flight_number is "123".
|
||||
Another example for this is DL 1234, the airline is "DL", and flight_number is "1234".
|
||||
If the tool returns more than one option choose the date closes to today.
|
||||
Example:
|
||||
{{
|
||||
"airline": "CY",
|
||||
"flight_number": "888",
|
||||
}}
|
||||
Example:
|
||||
{{
|
||||
"airline": "DL",
|
||||
"flight_number": "1234",
|
||||
}}
|
||||
parameters:
|
||||
- name: airline
|
||||
type: string
|
||||
description: Airline unique 2 letter identifier
|
||||
- name: flight_number
|
||||
type: string
|
||||
description: 1 to 4 digit number
|
||||
```
|
||||
|
||||
### Example with Template Parameters
|
||||
|
||||
> **Note:** This tool allows direct modifications to the SQL statement,
|
||||
> including identifiers, column names, and table names. **This makes it more
|
||||
> vulnerable to SQL injections**. Using basic parameters only (see above) is
|
||||
> recommended for performance and safety reasons. For more details, please check
|
||||
> [templateParameters](_index#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_table:
|
||||
kind: tidb-sql
|
||||
source: my-tidb-instance
|
||||
statement: |
|
||||
SELECT * FROM {{.tableName}};
|
||||
description: |
|
||||
Use this tool to list all information from a specific table.
|
||||
Example:
|
||||
{{
|
||||
"tableName": "flights",
|
||||
}}
|
||||
templateParameters:
|
||||
- name: tableName
|
||||
type: string
|
||||
description: Table to select from
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------------|:------------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "tidb-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute on. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
@@ -114,7 +114,7 @@ In this section, we will download and install the Toolbox binary.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
export VERSION="0.10.0"
|
||||
export VERSION="0.11.0"
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -220,7 +220,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.10.0\" # x-release-please-version\n",
|
||||
"version = \"0.11.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -179,7 +179,7 @@ to use BigQuery, and then run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.11.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
27
go.mod
@@ -11,7 +11,7 @@ require (
|
||||
cloud.google.com/go/cloudsqlconn v1.17.3
|
||||
cloud.google.com/go/dataplex v1.26.0
|
||||
cloud.google.com/go/firestore v1.18.0
|
||||
cloud.google.com/go/spanner v1.83.0
|
||||
cloud.google.com/go/spanner v1.84.0
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.29.0
|
||||
github.com/couchbase/gocb/v2 v2.10.1
|
||||
@@ -20,6 +20,7 @@ require (
|
||||
github.com/go-chi/chi/v5 v5.2.2
|
||||
github.com/go-chi/httplog/v2 v2.1.1
|
||||
github.com/go-chi/render v1.0.3
|
||||
github.com/go-goquery/goquery v1.0.1
|
||||
github.com/go-playground/validator/v10 v10.27.0
|
||||
github.com/go-sql-driver/mysql v1.9.3
|
||||
github.com/goccy/go-yaml v1.18.0
|
||||
@@ -27,10 +28,11 @@ require (
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/jackc/pgx/v5 v5.7.5
|
||||
github.com/json-iterator/go v1.1.12
|
||||
github.com/kuzudb/go-kuzu v0.11.0
|
||||
github.com/looker-open-source/sdk-codegen/go v0.25.10
|
||||
github.com/microsoft/go-mssqldb v1.9.2
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.1
|
||||
github.com/redis/go-redis/v9 v9.11.0
|
||||
github.com/redis/go-redis/v9 v9.12.0
|
||||
github.com/spf13/cobra v1.9.1
|
||||
github.com/thlib/go-timezone-local v0.0.7
|
||||
github.com/valkey-io/valkey-go v1.0.63
|
||||
@@ -44,25 +46,19 @@ require (
|
||||
go.opentelemetry.io/otel/sdk/metric v1.37.0
|
||||
go.opentelemetry.io/otel/trace v1.37.0
|
||||
golang.org/x/oauth2 v0.30.0
|
||||
google.golang.org/api v0.244.0
|
||||
google.golang.org/api v0.245.0
|
||||
modernc.org/sqlite v1.38.2
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/duckdb/duckdb-go-bindings v0.1.17 // indirect
|
||||
github.com/duckdb/duckdb-go-bindings/darwin-amd64 v0.1.12 // indirect
|
||||
github.com/duckdb/duckdb-go-bindings/darwin-arm64 v0.1.12 // indirect
|
||||
github.com/duckdb/duckdb-go-bindings/linux-amd64 v0.1.12 // indirect
|
||||
github.com/duckdb/duckdb-go-bindings/linux-arm64 v0.1.12 // indirect
|
||||
github.com/duckdb/duckdb-go-bindings/windows-amd64 v0.1.12 // indirect
|
||||
github.com/marcboeker/go-duckdb/arrowmapping v0.0.10 // indirect
|
||||
github.com/marcboeker/go-duckdb/mapping v0.0.11 // indirect
|
||||
github.com/andybalholm/cascadia v1.3.3 // indirect
|
||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect
|
||||
gonum.org/v1/gonum v0.16.0 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
cel.dev/expr v0.24.0 // indirect
|
||||
cloud.google.com/go v0.121.2 // indirect
|
||||
cloud.google.com/go v0.121.4 // indirect
|
||||
cloud.google.com/go/alloydb v1.18.0 // indirect
|
||||
cloud.google.com/go/auth v0.16.3 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
@@ -75,8 +71,8 @@ require (
|
||||
github.com/GoogleCloudPlatform/grpc-gcp-go/grpcgcp v1.5.3 // indirect
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0 // indirect
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0 // indirect
|
||||
github.com/PuerkitoBio/goquery v1.10.3 // indirect
|
||||
github.com/ajg/form v1.5.1 // indirect
|
||||
github.com/apache/arrow-go/v18 v18.4.0 // indirect
|
||||
github.com/apache/arrow/go/v15 v15.0.2 // indirect
|
||||
github.com/cenkalti/backoff/v5 v5.0.2 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
@@ -97,7 +93,6 @@ require (
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/go-viper/mapstructure/v2 v2.3.0 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect
|
||||
github.com/golang-sql/sqlexp v0.1.0 // indirect
|
||||
@@ -117,7 +112,6 @@ require (
|
||||
github.com/klauspost/compress v1.18.0 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.11 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/marcboeker/go-duckdb/v2 v2.3.4
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
@@ -126,6 +120,7 @@ require (
|
||||
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
||||
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/shopspring/decimal v1.4.0 // indirect
|
||||
github.com/spf13/pflag v1.0.6 // indirect
|
||||
github.com/spiffe/go-spiffe/v2 v2.5.0 // indirect
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
||||
@@ -157,7 +152,7 @@ require (
|
||||
golang.org/x/tools v0.34.0 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect
|
||||
google.golang.org/genproto v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250728155136-f173205681a0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0 // indirect
|
||||
google.golang.org/grpc v1.74.2 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
|
||||
97
go.sum
@@ -38,8 +38,8 @@ cloud.google.com/go v0.104.0/go.mod h1:OO6xxXdJyvuJPcEPBLN9BJPD+jep5G1+2U5B5gkRY
|
||||
cloud.google.com/go v0.105.0/go.mod h1:PrLgOJNe5nfE9UMxKxgXj4mD3voiP+YQ6gdt6KMFOKM=
|
||||
cloud.google.com/go v0.107.0/go.mod h1:wpc2eNrD7hXUTy8EKS10jkxpZBjASrORK7goS+3YX2I=
|
||||
cloud.google.com/go v0.110.0/go.mod h1:SJnCLqQ0FCFGSZMUNUf84MV3Aia54kn7pi8st7tMzaY=
|
||||
cloud.google.com/go v0.121.2 h1:v2qQpN6Dx9x2NmwrqlesOt3Ys4ol5/lFZ6Mg1B7OJCg=
|
||||
cloud.google.com/go v0.121.2/go.mod h1:nRFlrHq39MNVWu+zESP2PosMWA0ryJw8KUBZ2iZpxbw=
|
||||
cloud.google.com/go v0.121.4 h1:cVvUiY0sX0xwyxPwdSU2KsF9knOVmtRyAMt8xou0iTs=
|
||||
cloud.google.com/go v0.121.4/go.mod h1:XEBchUiHFJbz4lKBZwYBDHV/rSyfFktk737TLDU089s=
|
||||
cloud.google.com/go/accessapproval v1.4.0/go.mod h1:zybIuC3KpDOvotz59lFe5qxRZx6C75OtwbisN56xYB4=
|
||||
cloud.google.com/go/accessapproval v1.5.0/go.mod h1:HFy3tuiGvMdcd/u+Cu5b9NkO1pEICJ46IR82PoUdplw=
|
||||
cloud.google.com/go/accessapproval v1.6.0/go.mod h1:R0EiYnwV5fsRFiKZkPHr6mwyk2wxUJ30nL4j2pcFY2E=
|
||||
@@ -544,8 +544,8 @@ cloud.google.com/go/shell v1.6.0/go.mod h1:oHO8QACS90luWgxP3N9iZVuEiSF84zNyLytb+
|
||||
cloud.google.com/go/spanner v1.41.0/go.mod h1:MLYDBJR/dY4Wt7ZaMIQ7rXOTLjYrmxLE/5ve9vFfWos=
|
||||
cloud.google.com/go/spanner v1.44.0/go.mod h1:G8XIgYdOK+Fbcpbs7p2fiprDw4CaZX63whnSMLVBxjk=
|
||||
cloud.google.com/go/spanner v1.45.0/go.mod h1:FIws5LowYz8YAE1J8fOS7DJup8ff7xJeetWEo5REA2M=
|
||||
cloud.google.com/go/spanner v1.83.0 h1:AH3QIoSIa01l3WbeTppkwCEYFNK1AER6drcYhPmwhxY=
|
||||
cloud.google.com/go/spanner v1.83.0/go.mod h1:QSWcjxszT0WRHNd8zyGI0WctrYA1N7j0yTFsWyol9Yw=
|
||||
cloud.google.com/go/spanner v1.84.0 h1:McPTc6g7hmZfvBlY2mwxl4nJ3+oVtDOl3yGvxIyDTKk=
|
||||
cloud.google.com/go/spanner v1.84.0/go.mod h1:Klo9oQcfxr32vh+2iuA34c6rHjjSpMcU23Jy6DArz7I=
|
||||
cloud.google.com/go/speech v1.6.0/go.mod h1:79tcr4FHCimOp56lwC01xnt/WPJZc4v3gzyT7FoBkCM=
|
||||
cloud.google.com/go/speech v1.7.0/go.mod h1:KptqL+BAQIhMsj1kOP2la5DSEEerPDuOP/2mmkhHhZQ=
|
||||
cloud.google.com/go/speech v1.8.0/go.mod h1:9bYIl1/tjsAnMgKGHKmBZzXKEkGgtU+MpdDPTE9f7y0=
|
||||
@@ -563,8 +563,8 @@ cloud.google.com/go/storage v1.23.0/go.mod h1:vOEEDNFnciUMhBeT6hsJIn3ieU5cFRmzeL
|
||||
cloud.google.com/go/storage v1.27.0/go.mod h1:x9DOL8TK/ygDUMieqwfhdpQryTeEkhGKMi80i/iqR2s=
|
||||
cloud.google.com/go/storage v1.28.1/go.mod h1:Qnisd4CqDdo6BGs2AD5LLnEsmSQ80wQ5ogcBBKhU86Y=
|
||||
cloud.google.com/go/storage v1.29.0/go.mod h1:4puEjyTKnku6gfKoTfNOU/W+a9JyuVNxjpS5GBrB8h4=
|
||||
cloud.google.com/go/storage v1.53.0 h1:gg0ERZwL17pJ+Cz3cD2qS60w1WMDnwcm5YPAIQBHUAw=
|
||||
cloud.google.com/go/storage v1.53.0/go.mod h1:7/eO2a/srr9ImZW9k5uufcNahT2+fPb8w5it1i5boaA=
|
||||
cloud.google.com/go/storage v1.55.0 h1:NESjdAToN9u1tmhVqhXCaCwYBuvEhZLLv0gBr+2znf0=
|
||||
cloud.google.com/go/storage v1.55.0/go.mod h1:ztSmTTwzsdXe5syLVS0YsbFxXuvEmEyZj7v7zChEmuY=
|
||||
cloud.google.com/go/storagetransfer v1.5.0/go.mod h1:dxNzUopWy7RQevYFHewchb29POFv3/AaBgnhqzqiK0w=
|
||||
cloud.google.com/go/storagetransfer v1.6.0/go.mod h1:y77xm4CQV/ZhFZH75PLEXY0ROiS7Gh6pSKrM8dJyg6I=
|
||||
cloud.google.com/go/storagetransfer v1.7.0/go.mod h1:8Giuj1QNb1kfLAiWM1bN6dHzfdlDAVC9rv9abHot2W4=
|
||||
@@ -665,6 +665,8 @@ github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapp
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0/go.mod h1:cSgYe11MCNYunTnRXrKiR/tHc0eoKjICUuWpNZoVCOo=
|
||||
github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c/go.mod h1:X0CRv0ky0k6m906ixxpzmDRLvX58TFUKS2eePweuyxk=
|
||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||
github.com/PuerkitoBio/goquery v1.10.3 h1:pFYcNSqHxBD06Fpj/KsbStFRsgRATgnf3LeXiUkhzPo=
|
||||
github.com/PuerkitoBio/goquery v1.10.3/go.mod h1:tMUX0zDMHXYlAQk6p35XxQMqMweEKB7iK7iLNd4RH4Y=
|
||||
github.com/ajg/form v1.5.1 h1:t9c7v8JUKu/XxOGBU0yjNpaMloxGEJhUkqFRq0ibGeU=
|
||||
github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY=
|
||||
github.com/ajstarks/deck v0.0.0-20200831202436-30c9fc6549a9/go.mod h1:JynElWSGnm/4RlzPXRlREEwqTHAN3T56Bv2ITsFT3gY=
|
||||
@@ -672,18 +674,14 @@ github.com/ajstarks/deck/generate v0.0.0-20210309230005-c3f852c02e19/go.mod h1:T
|
||||
github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw=
|
||||
github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b/go.mod h1:1KcenG0jGWcpt8ov532z81sp/kMMUG485J2InIOyADM=
|
||||
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
|
||||
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
|
||||
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
|
||||
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
|
||||
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||
github.com/apache/arrow-go/v18 v18.4.0 h1:/RvkGqH517iY8bZKc4FD5/kkdwXJGjxf28JIXbJ/oB0=
|
||||
github.com/apache/arrow-go/v18 v18.4.0/go.mod h1:Aawvwhj8x2jURIzD9Moy72cF0FyJXOpkYpdmGRHcw14=
|
||||
github.com/apache/arrow/go/v10 v10.0.1/go.mod h1:YvhnlEePVnBS4+0z3fhPfUy7W1Ikj0Ih0vcRo/gZ1M0=
|
||||
github.com/apache/arrow/go/v11 v11.0.0/go.mod h1:Eg5OsL5H+e299f7u5ssuXsuHQVEGC4xei5aX110hRiI=
|
||||
github.com/apache/arrow/go/v15 v15.0.2 h1:60IliRbiyTWCWjERBCkO1W4Qun9svcYoZrSLcyOsMLE=
|
||||
github.com/apache/arrow/go/v15 v15.0.2/go.mod h1:DGXsR3ajT524njufqf95822i+KTh+yea1jass9YXgjA=
|
||||
github.com/apache/thrift v0.16.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU=
|
||||
github.com/apache/thrift v0.22.0 h1:r7mTJdj51TMDe6RtcmNdQxgn9XcyfGDOzegMDRg47uc=
|
||||
github.com/apache/thrift v0.22.0/go.mod h1:1e7J/O1Ae6ZQMTYdy9xa3w9k+XHWPfRvdPyJeynQ+/g=
|
||||
github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
|
||||
github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
|
||||
github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
|
||||
@@ -745,18 +743,6 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8Yc
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
||||
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
|
||||
github.com/duckdb/duckdb-go-bindings v0.1.17 h1:SjpRwrJ7v0vqnIvLeVFHlhuS72+Lp8xxQ5jIER2LZP4=
|
||||
github.com/duckdb/duckdb-go-bindings v0.1.17/go.mod h1:pBnfviMzANT/9hi4bg+zW4ykRZZPCXlVuvBWEcZofkc=
|
||||
github.com/duckdb/duckdb-go-bindings/darwin-amd64 v0.1.12 h1:8CLBnsq9YDhi2Gmt3sjSUeXxMzyMQAKefjqUy9zVPFk=
|
||||
github.com/duckdb/duckdb-go-bindings/darwin-amd64 v0.1.12/go.mod h1:Ezo7IbAfB8NP7CqPIN8XEHKUg5xdRRQhcPPlCXImXYA=
|
||||
github.com/duckdb/duckdb-go-bindings/darwin-arm64 v0.1.12 h1:wjO4I0GhMh2xIpiUgRpzuyOT4KxXLoUS/rjU7UUVvCE=
|
||||
github.com/duckdb/duckdb-go-bindings/darwin-arm64 v0.1.12/go.mod h1:eS7m/mLnPQgVF4za1+xTyorKRBuK0/BA44Oy6DgrGXI=
|
||||
github.com/duckdb/duckdb-go-bindings/linux-amd64 v0.1.12 h1:HzKQi2C+1jzmwANsPuYH6x9Sfw62SQTjNAEq3OySKFI=
|
||||
github.com/duckdb/duckdb-go-bindings/linux-amd64 v0.1.12/go.mod h1:1GOuk1PixiESxLaCGFhag+oFi7aP+9W8byymRAvunBk=
|
||||
github.com/duckdb/duckdb-go-bindings/linux-arm64 v0.1.12 h1:YGSR7AFLw2gJ7IbgLE6DkKYmgKv1LaRSd/ZKF1yh2oE=
|
||||
github.com/duckdb/duckdb-go-bindings/linux-arm64 v0.1.12/go.mod h1:o7crKMpT2eOIi5/FY6HPqaXcvieeLSqdXXaXbruGX7w=
|
||||
github.com/duckdb/duckdb-go-bindings/windows-amd64 v0.1.12 h1:2aduW6fnFnT2Q45PlIgHbatsPOxV9WSZ5B2HzFfxaxA=
|
||||
github.com/duckdb/duckdb-go-bindings/windows-amd64 v0.1.12/go.mod h1:IlOhJdVKUJCAPj3QsDszUo8DVdvp1nBFp4TUJVdw99s=
|
||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
@@ -806,6 +792,8 @@ github.com/go-fonts/stix v0.1.0/go.mod h1:w/c1f0ldAUlJmLBvlbkvVXLAD+tAMqobIIQpmn
|
||||
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-goquery/goquery v1.0.1 h1:kpchVA1LdOFWdRpkDPESVdlb1JQI6ixsJ5MiNUITO7U=
|
||||
github.com/go-goquery/goquery v1.0.1/go.mod h1:W5s8OWbqWf6lG0LkXWBeh7U1Y/X5XTI0Br65MHF8uJk=
|
||||
github.com/go-jose/go-jose/v4 v4.0.5 h1:M6T8+mKZl/+fNNuFHvGIzDz7BTLQPIounk/b9dw3AaE=
|
||||
github.com/go-jose/go-jose/v4 v4.0.5/go.mod h1:s3P1lRrkT8igV8D9OjyL4WRyHvjB6a4JSllnOrmmBOA=
|
||||
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
|
||||
@@ -830,8 +818,6 @@ github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAu
|
||||
github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo=
|
||||
github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
github.com/go-viper/mapstructure/v2 v2.3.0 h1:27XbWsHIqhbdR5TIC911OfYvgSaW93HM+dX7970Q7jk=
|
||||
github.com/go-viper/mapstructure/v2 v2.3.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
|
||||
github.com/goccy/go-json v0.9.11/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
@@ -910,6 +896,7 @@ github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
||||
github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
|
||||
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
@@ -1014,7 +1001,6 @@ github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
|
||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4=
|
||||
github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE=
|
||||
github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU=
|
||||
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
||||
@@ -1030,6 +1016,8 @@ github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NB
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/kuzudb/go-kuzu v0.11.0 h1:7nH5zabXH+IBZruyyML6YIi4tayqg3diwbXmXmnZE8k=
|
||||
github.com/kuzudb/go-kuzu v0.11.0/go.mod h1:s2NvXX3fB2QZfWGf6SjJSYawgTPE17a7WHZmzfLIZtU=
|
||||
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
||||
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
|
||||
@@ -1039,12 +1027,6 @@ github.com/looker-open-source/sdk-codegen/go v0.25.10/go.mod h1:YM/IYSsTPk7I54j4
|
||||
github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
|
||||
github.com/lyft/protoc-gen-star v0.6.1/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
|
||||
github.com/lyft/protoc-gen-star/v2 v2.0.1/go.mod h1:RcCdONR2ScXaYnQC5tUzxzlpA3WVYF7/opLeUgcQs/o=
|
||||
github.com/marcboeker/go-duckdb/arrowmapping v0.0.10 h1:G1W+GVnUefR8uy7jHdNO+CRMsmFG5mFPIHVAespfFCA=
|
||||
github.com/marcboeker/go-duckdb/arrowmapping v0.0.10/go.mod h1:jccUb8TYD0p5TsEEeN4SXuslNJHo23QaKOqKD+U6uFU=
|
||||
github.com/marcboeker/go-duckdb/mapping v0.0.11 h1:fusN1b1l7Myxafifp596I6dNLNhN5Uv/rw31qAqBwqw=
|
||||
github.com/marcboeker/go-duckdb/mapping v0.0.11/go.mod h1:aYBjFLgfKO0aJIbDtXPiaL5/avRQISveX/j9tMf9JhU=
|
||||
github.com/marcboeker/go-duckdb/v2 v2.3.4 h1:o98wrefPbH0IdJRix4pF0+jZiXoFQ+FSR8InMsCUZD0=
|
||||
github.com/marcboeker/go-duckdb/v2 v2.3.4/go.mod h1:8adNrftF4Ye29XMrpIl5NYNosTVsZu1mz3C82WdVvrk=
|
||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
@@ -1052,9 +1034,7 @@ github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D
|
||||
github.com/mattn/go-sqlite3 v1.14.14/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||
github.com/microsoft/go-mssqldb v1.9.2 h1:nY8TmFMQOHpm2qVWo6y4I2mAmVdZqlGiMGAYt64Ibbs=
|
||||
github.com/microsoft/go-mssqldb v1.9.2/go.mod h1:GBbW9ASTiDC+mpgWDGKdm3FnFLTUsLYN3iFL90lQ+PA=
|
||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs=
|
||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY=
|
||||
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8DFdX7uMikMLXX4oubIzJF4kv/wI=
|
||||
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
@@ -1091,8 +1071,8 @@ github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w=
|
||||
github.com/redis/go-redis/v9 v9.11.0 h1:E3S08Gl/nJNn5vkxd2i78wZxWAPNZgUNTp8WIJUAiIs=
|
||||
github.com/redis/go-redis/v9 v9.11.0/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw=
|
||||
github.com/redis/go-redis/v9 v9.12.0 h1:XlVPGlflh4nxfhsNXPA8Qp6EmEfTo0rp8oaBzPipXnU=
|
||||
github.com/redis/go-redis/v9 v9.12.0/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||
@@ -1103,6 +1083,8 @@ github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/f
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w=
|
||||
github.com/ruudk/golang-pdf417 v0.0.0-20201230142125-a7e3863a1245/go.mod h1:pQAZKsJ8yyVxGRWYNEm9oFB8ieLgKFnamEyDmSA0BRk=
|
||||
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
|
||||
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
|
||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||
github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4=
|
||||
@@ -1226,6 +1208,10 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh
|
||||
golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM=
|
||||
golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY=
|
||||
golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
@@ -1287,6 +1273,9 @@ golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91
|
||||
golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w=
|
||||
golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
@@ -1346,6 +1335,11 @@ golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
|
||||
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
|
||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||
golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs=
|
||||
golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
@@ -1395,6 +1389,10 @@ golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJ
|
||||
golang.org/x/sync v0.0.0-20220819030929-7fc1605a5dde/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
|
||||
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@@ -1477,8 +1475,14 @@ golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA=
|
||||
golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
|
||||
@@ -1487,6 +1491,11 @@ golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U=
|
||||
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
|
||||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
|
||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
@@ -1503,6 +1512,10 @@ golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4=
|
||||
golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
@@ -1577,6 +1590,8 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc
|
||||
golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s=
|
||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/tools v0.34.0 h1:qIpSLOxeCYGg9TrcJokLBG4KFA6d795g0xkBkiESGlo=
|
||||
golang.org/x/tools v0.34.0/go.mod h1:pAP9OwEaY1CAW3HOmg3hLZC5Z0CCmzjAF2UQMSqNARg=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
@@ -1656,8 +1671,8 @@ google.golang.org/api v0.108.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/
|
||||
google.golang.org/api v0.110.0/go.mod h1:7FC4Vvx1Mooxh8C5HWjzZHcavuS2f6pmJpZx60ca7iI=
|
||||
google.golang.org/api v0.111.0/go.mod h1:qtFHvU9mhgTJegR31csQ+rwxyUTHOKFqCKWp1J0fdw0=
|
||||
google.golang.org/api v0.114.0/go.mod h1:ifYI2ZsFK6/uGddGfAD5BMxlnkBqCmqHSDUVi45N5Yg=
|
||||
google.golang.org/api v0.244.0 h1:lpkP8wVibSKr++NCD36XzTk/IzeKJ3klj7vbj+XU5pE=
|
||||
google.golang.org/api v0.244.0/go.mod h1:dMVhVcylamkirHdzEBAIQWUCgqY885ivNeZYd7VAVr8=
|
||||
google.golang.org/api v0.245.0 h1:YliGvz1rjXB+sTLNIST6Ffeji9WlRdLQ+LPl9ruSa5Y=
|
||||
google.golang.org/api v0.245.0/go.mod h1:dMVhVcylamkirHdzEBAIQWUCgqY885ivNeZYd7VAVr8=
|
||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
@@ -1800,8 +1815,8 @@ google.golang.org/genproto v0.0.0-20230331144136-dcfb400f0633/go.mod h1:UUQDJDOl
|
||||
google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU=
|
||||
google.golang.org/genproto v0.0.0-20250603155806-513f23925822 h1:rHWScKit0gvAPuOnu87KpaYtjK5zBMLcULh7gxkCXu4=
|
||||
google.golang.org/genproto v0.0.0-20250603155806-513f23925822/go.mod h1:HubltRL7rMh0LfnQPkMH4NPDFEWp0jw3vixw7jEM53s=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 h1:oWVWY3NzT7KJppx2UKhKmzPq4SRe0LdCijVRwvGeikY=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822/go.mod h1:h3c4v36UTKzUiuaOKQ6gr3S+0hovBtUrXzTG/i3+XEc=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250728155136-f173205681a0 h1:0UOBWO4dC+e51ui0NFKSPbkHHiQ4TmrEfEZMLDyRmY8=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250728155136-f173205681a0/go.mod h1:8ytArBbtOy2xfht+y2fqKd5DRDJRUQhqbyEnQ4bDChs=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0 h1:MAKi5q709QWfnkkpNQ0M12hYJ1+e8qYVDyowc4U1XZM=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
|
||||
@@ -72,26 +72,36 @@ tools:
|
||||
SELECT
|
||||
ti.schema_name,
|
||||
ti.table_name AS object_name,
|
||||
json_build_object(
|
||||
'schema_name', ti.schema_name,
|
||||
'object_name', ti.table_name,
|
||||
'object_type', CASE ti.object_kind
|
||||
WHEN 'r' THEN 'TABLE'
|
||||
WHEN 'p' THEN 'PARTITIONED TABLE'
|
||||
ELSE ti.object_kind::text -- Should not happen due to WHERE clause
|
||||
END,
|
||||
'owner', ti.table_owner,
|
||||
'comment', ti.table_comment,
|
||||
'columns', COALESCE((SELECT json_agg(json_build_object('column_name',ci.column_name,'data_type',ci.data_type,'ordinal_position',ci.column_ordinal_position,'is_not_nullable',ci.is_not_nullable,'column_default',ci.column_default,'column_comment',ci.column_comment) ORDER BY ci.column_ordinal_position) FROM columns_info ci WHERE ci.table_oid = ti.table_oid), '[]'::json),
|
||||
'constraints', COALESCE((SELECT json_agg(json_build_object('constraint_name',cons.constraint_name,'constraint_type',cons.constraint_type,'constraint_definition',cons.constraint_definition,'constraint_columns',cons.constraint_columns,'foreign_key_referenced_table',cons.foreign_key_referenced_table,'foreign_key_referenced_columns',cons.foreign_key_referenced_columns)) FROM constraints_info cons WHERE cons.table_oid = ti.table_oid), '[]'::json),
|
||||
'indexes', COALESCE((SELECT json_agg(json_build_object('index_name',ii.index_name,'index_definition',ii.index_definition,'is_unique',ii.is_unique,'is_primary',ii.is_primary,'index_method',ii.index_method,'index_columns',ii.index_columns)) FROM indexes_info ii WHERE ii.table_oid = ti.table_oid), '[]'::json),
|
||||
'triggers', COALESCE((SELECT json_agg(json_build_object('trigger_name',tri.trigger_name,'trigger_definition',tri.trigger_definition,'trigger_enabled_state',tri.trigger_enabled_state)) FROM triggers_info tri WHERE tri.table_oid = ti.table_oid), '[]'::json)
|
||||
) AS object_details
|
||||
CASE
|
||||
WHEN $2 = 'simple' THEN
|
||||
-- IF format is 'simple', return basic JSON
|
||||
json_build_object('name', ti.table_name)
|
||||
ELSE
|
||||
json_build_object(
|
||||
'schema_name', ti.schema_name,
|
||||
'object_name', ti.table_name,
|
||||
'object_type', CASE ti.object_kind
|
||||
WHEN 'r' THEN 'TABLE'
|
||||
WHEN 'p' THEN 'PARTITIONED TABLE'
|
||||
ELSE ti.object_kind::text -- Should not happen due to WHERE clause
|
||||
END,
|
||||
'owner', ti.table_owner,
|
||||
'comment', ti.table_comment,
|
||||
'columns', COALESCE((SELECT json_agg(json_build_object('column_name',ci.column_name,'data_type',ci.data_type,'ordinal_position',ci.column_ordinal_position,'is_not_nullable',ci.is_not_nullable,'column_default',ci.column_default,'column_comment',ci.column_comment) ORDER BY ci.column_ordinal_position) FROM columns_info ci WHERE ci.table_oid = ti.table_oid), '[]'::json),
|
||||
'constraints', COALESCE((SELECT json_agg(json_build_object('constraint_name',cons.constraint_name,'constraint_type',cons.constraint_type,'constraint_definition',cons.constraint_definition,'constraint_columns',cons.constraint_columns,'foreign_key_referenced_table',cons.foreign_key_referenced_table,'foreign_key_referenced_columns',cons.foreign_key_referenced_columns)) FROM constraints_info cons WHERE cons.table_oid = ti.table_oid), '[]'::json),
|
||||
'indexes', COALESCE((SELECT json_agg(json_build_object('index_name',ii.index_name,'index_definition',ii.index_definition,'is_unique',ii.is_unique,'is_primary',ii.is_primary,'index_method',ii.index_method,'index_columns',ii.index_columns)) FROM indexes_info ii WHERE ii.table_oid = ti.table_oid), '[]'::json),
|
||||
'triggers', COALESCE((SELECT json_agg(json_build_object('trigger_name',tri.trigger_name,'trigger_definition',tri.trigger_definition,'trigger_enabled_state',tri.trigger_enabled_state)) FROM triggers_info tri WHERE tri.table_oid = ti.table_oid), '[]'::json)
|
||||
)
|
||||
END AS object_details
|
||||
FROM table_info ti ORDER BY ti.schema_name, ti.table_name;
|
||||
parameters:
|
||||
- name: table_names
|
||||
type: string
|
||||
description: "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed."
|
||||
- name: output_format
|
||||
type: string
|
||||
description: "Optional: Use 'simple' to return table names only or use 'detailed' to return the full information schema."
|
||||
default: "detailed"
|
||||
|
||||
toolsets:
|
||||
cloud-sql-postgres-database-tools:
|
||||
|
||||
@@ -7,9 +7,13 @@ tools:
|
||||
dataplex_search_entries:
|
||||
kind: dataplex-search-entries
|
||||
source: dataplex-source
|
||||
description: |
|
||||
Use this tool to search for entries in Dataplex Catalog that represent data assets (e.g. tables, views, models) based on the provided search query.
|
||||
description: Use this tool to search for entries in Dataplex Catalog based on the provided search query.
|
||||
dataplex_lookup_entry:
|
||||
kind: dataplex-lookup-entry
|
||||
source: dataplex-source
|
||||
description: Use this tool to retrieve a specific entry from Dataplex Catalog.
|
||||
|
||||
toolsets:
|
||||
dataplex-tools:
|
||||
- dataplex_search_entries
|
||||
- dataplex_search_entries
|
||||
- dataplex_lookup_entry
|
||||
@@ -55,6 +55,8 @@ type ServerConfig struct {
|
||||
Stdio bool
|
||||
// DisableReload indicates if the user has disabled dynamic reloading for Toolbox.
|
||||
DisableReload bool
|
||||
// UI indicates if Toolbox UI endpoints (/ui) are available
|
||||
UI bool
|
||||
}
|
||||
|
||||
type logFormat string
|
||||
|
||||
@@ -214,7 +214,7 @@ func (s *stdioSession) write(ctx context.Context, response any) error {
|
||||
func mcpRouter(s *Server) (chi.Router, error) {
|
||||
r := chi.NewRouter()
|
||||
|
||||
r.Use(middleware.AllowContentType("application/json"))
|
||||
r.Use(middleware.AllowContentType("application/json", "application/json-rpc", "application/jsonrequest"))
|
||||
r.Use(middleware.StripSlashes)
|
||||
r.Use(render.SetContentType(render.ContentTypeJSON))
|
||||
|
||||
|
||||
@@ -330,6 +330,13 @@ func NewServer(ctx context.Context, cfg ServerConfig) (*Server, error) {
|
||||
return nil, err
|
||||
}
|
||||
r.Mount("/mcp", mcpR)
|
||||
if cfg.UI {
|
||||
webR, err := webRouter()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r.Mount("/ui", webR)
|
||||
}
|
||||
// default endpoint for validating server is running
|
||||
r.Get("/", func(w http.ResponseWriter, r *http.Request) {
|
||||
_, _ = w.Write([]byte("🧰 Hello, World! 🧰"))
|
||||
|
||||
BIN
internal/server/static/assets/mcptoolboxlogo.png
Normal file
|
After Width: | Height: | Size: 57 KiB |
683
internal/server/static/css/style.css
Normal file
@@ -0,0 +1,683 @@
|
||||
:root {
|
||||
--toolbox-blue: #4285f4;
|
||||
--text-primary-gray: #444444;
|
||||
--text-secondary-gray: #6e6e6e;
|
||||
--button-primary: var(--toolbox-blue);
|
||||
--button-secondary: #616161;
|
||||
--section-border: #e0e0e0;
|
||||
}
|
||||
|
||||
body {
|
||||
display: flex;
|
||||
height: 100vh;
|
||||
margin: 0;
|
||||
font-family: 'Trebuchet MS';
|
||||
background-color: #f8f9fa;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
*, *:before, *:after {
|
||||
box-sizing: inherit;
|
||||
}
|
||||
|
||||
#navbar-container {
|
||||
flex: 0 0 250px;
|
||||
height: 100%;
|
||||
position: relative;
|
||||
z-index: 10;
|
||||
}
|
||||
|
||||
#main-content-container {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
min-width: 0;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
|
||||
.left-nav {
|
||||
background-color: #fff;
|
||||
box-shadow: 4px 0px 12px rgba(0, 0, 0, 0.15);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
padding: 15px;
|
||||
align-items: center;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
z-index: 3;
|
||||
|
||||
ul {
|
||||
font-family: 'Verdana';
|
||||
list-style: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
width: 100%;
|
||||
|
||||
li {
|
||||
margin-bottom: 5px;
|
||||
|
||||
a {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 12px;
|
||||
text-decoration: none;
|
||||
color: #333;
|
||||
border-radius: 0;
|
||||
|
||||
&:hover {
|
||||
background-color: #e9e9e9;
|
||||
border-radius: 35px;
|
||||
}
|
||||
|
||||
&.active {
|
||||
background-color: #d0d0d0;
|
||||
font-weight: bold;
|
||||
border-radius: 35px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.second-nav {
|
||||
flex: 0 0 250px;
|
||||
background-color: #fff;
|
||||
box-shadow: 4px 0px 12px rgba(0, 0, 0, 0.15);
|
||||
z-index: 2;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
padding: 15px;
|
||||
align-items: center;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.nav-logo {
|
||||
width: 90%;
|
||||
margin-bottom: 40px;
|
||||
flex-shrink: 0;
|
||||
|
||||
img {
|
||||
max-width: 100%;
|
||||
height: auto;
|
||||
display: block;
|
||||
}
|
||||
}
|
||||
|
||||
.main-content-area {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
min-width: 0;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
|
||||
.top-bar {
|
||||
background-color: #fff;
|
||||
padding: 30px 30px;
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
align-items: center;
|
||||
border-bottom: 1px solid #eee;
|
||||
}
|
||||
|
||||
.content {
|
||||
padding: 20px;
|
||||
flex-grow: 1;
|
||||
overflow-y: auto;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 10px 20px;
|
||||
color: white;
|
||||
border: none;
|
||||
border-radius: 30px;
|
||||
font: inherit;
|
||||
font-size: 1em;
|
||||
font-weight: bolder;
|
||||
cursor: pointer;
|
||||
|
||||
&:hover {
|
||||
opacity: 0.8;
|
||||
}
|
||||
}
|
||||
|
||||
.btn--run {
|
||||
background-color: var(--button-primary);
|
||||
}
|
||||
|
||||
.btn--editHeaders {
|
||||
background-color: var(--button-secondary)
|
||||
}
|
||||
|
||||
.btn--saveHeaders {
|
||||
background-color: var(--button-primary)
|
||||
}
|
||||
|
||||
.btn--closeHeaders {
|
||||
background-color: var(--button-secondary)
|
||||
}
|
||||
|
||||
.btn--setup-gis {
|
||||
background-color: white;
|
||||
color: var(--text-primary-gray);
|
||||
border: 2px solid var(--text-primary-gray);
|
||||
}
|
||||
|
||||
.tool-button {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 12px;
|
||||
text-decoration: none;
|
||||
color: #333;
|
||||
background-color: transparent;
|
||||
border: none;
|
||||
border-radius: 0;
|
||||
width: 100%;
|
||||
text-align: left;
|
||||
cursor: pointer;
|
||||
font-family: inherit;
|
||||
font-size: inherit;
|
||||
|
||||
transition: background-color 0.1s ease-in-out, border-radius 0.1s ease-in-out;
|
||||
|
||||
&:hover {
|
||||
background-color: #e9e9e9;
|
||||
border-radius: 35px;
|
||||
}
|
||||
|
||||
&:focus {
|
||||
outline: none;
|
||||
box-shadow: 0 0 0 2px rgba(208, 208, 208, 0.5);
|
||||
}
|
||||
|
||||
&.active {
|
||||
background-color: #d0d0d0;
|
||||
font-weight: bold;
|
||||
border-radius: 35px;
|
||||
|
||||
&:hover {
|
||||
background-color: #d0d0d0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#secondary-panel-content {
|
||||
flex: 1;
|
||||
overflow-y: auto;
|
||||
width: 100%;
|
||||
min-height: 0;
|
||||
|
||||
ul {
|
||||
list-style: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
|
||||
.tool-details-grid {
|
||||
display: grid;
|
||||
grid-template-columns: 1fr 2fr;
|
||||
gap: 20px;
|
||||
margin: 0 0 20px 0;
|
||||
align-items: start;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.tool-info {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 15px;
|
||||
}
|
||||
|
||||
.tool-execution-area {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.tool-params {
|
||||
background-color: #ffffff;
|
||||
padding: 15px;
|
||||
border-radius: 4px;
|
||||
border: 1px solid #ddd;
|
||||
|
||||
h5 {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
}
|
||||
|
||||
.tool-box {
|
||||
background-color: #ffffff;
|
||||
padding: 15px;
|
||||
border-radius: 4px;
|
||||
border: 1px solid #eee;
|
||||
|
||||
h5 {
|
||||
color: var(--toolbox-blue);
|
||||
margin-top: 0;
|
||||
font-weight: bold;
|
||||
}
|
||||
}
|
||||
|
||||
.params-header {
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
margin-bottom: 8px;
|
||||
padding-right: 6px;
|
||||
font-weight: bold;
|
||||
font-size: 0.9em;
|
||||
color: var(--text-secondary-gray);
|
||||
}
|
||||
|
||||
.params-disclaimer {
|
||||
font-style: italic;
|
||||
color: var(--text-secondary-gray);
|
||||
font-size: 0.8em;
|
||||
margin-bottom: 10px;
|
||||
width: 100%;
|
||||
word-wrap: break-word;
|
||||
}
|
||||
|
||||
.param-item {
|
||||
margin-bottom: 12px;
|
||||
|
||||
label {
|
||||
display: block;
|
||||
margin-bottom: 4px;
|
||||
font-family: inherit;
|
||||
}
|
||||
|
||||
&.disabled-param {
|
||||
> label {
|
||||
color: #888;
|
||||
text-decoration: line-through;
|
||||
}
|
||||
|
||||
.param-input-element {
|
||||
background-color: #f5f5f5;
|
||||
opacity: 0.6;
|
||||
}
|
||||
}
|
||||
|
||||
input[type="text"],
|
||||
input[type="number"],
|
||||
select,
|
||||
textarea {
|
||||
width: calc(100% - 12px);
|
||||
padding: 6px;
|
||||
border: 1px solid #ccc;
|
||||
border-radius: 4px;
|
||||
font-family: inherit;
|
||||
}
|
||||
|
||||
input[type="checkbox"].param-input-element {
|
||||
width: auto;
|
||||
padding: 0;
|
||||
border: initial;
|
||||
border-radius: initial;
|
||||
vertical-align: middle;
|
||||
margin-right: 4px;
|
||||
accent-color: var(--toolbox-blue);
|
||||
flex-grow: 0;
|
||||
}
|
||||
}
|
||||
|
||||
.input-checkbox-wrapper {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
}
|
||||
|
||||
.param-input-element-container {
|
||||
flex-grow: 1;
|
||||
}
|
||||
|
||||
.param-input-element {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.include-param-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
white-space: nowrap;
|
||||
|
||||
input[type="checkbox"] {
|
||||
width: auto;
|
||||
padding: 0;
|
||||
border: initial;
|
||||
border-radius: initial;
|
||||
vertical-align: middle;
|
||||
margin-right: 0;
|
||||
accent-color: var(--toolbox-blue);
|
||||
}
|
||||
}
|
||||
|
||||
.include-param-container input[type="checkbox"] {
|
||||
width: auto;
|
||||
padding: 0;
|
||||
border: initial;
|
||||
border-radius: initial;
|
||||
vertical-align: middle;
|
||||
margin: 0;
|
||||
accent-color: var(--toolbox-blue);
|
||||
}
|
||||
|
||||
.checkbox-bool-label {
|
||||
margin-left: 5px;
|
||||
font-style: italic;
|
||||
color: var(--text-primary-gray);
|
||||
}
|
||||
|
||||
.checkbox-bool-label.disabled {
|
||||
color: #aaa;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.param-label-extras {
|
||||
font-style: italic;
|
||||
font-weight: lighter;
|
||||
color: var(--text-secondary-gray);
|
||||
}
|
||||
|
||||
.auth-param-input {
|
||||
background-color: var(--section-border);
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.run-button-container {
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
gap: 20px;
|
||||
}
|
||||
|
||||
.header-modal {
|
||||
display: none;
|
||||
position: fixed;
|
||||
z-index: 1000;
|
||||
left: 0;
|
||||
top: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
overflow: auto;
|
||||
background-color: rgba(0,0,0,0.4);
|
||||
|
||||
li {
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.header-modal-content {
|
||||
background-color: #fefefe;
|
||||
margin: 10% auto;
|
||||
padding: 20px;
|
||||
border: 1px solid #888;
|
||||
width: 80%;
|
||||
max-width: 50%;
|
||||
border-radius: 8px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 15px;
|
||||
align-items: center;
|
||||
|
||||
h5 {
|
||||
margin-top: 0;
|
||||
font-size: 1.2em;
|
||||
}
|
||||
|
||||
.headers-textarea {
|
||||
width: calc(100% - 16px);
|
||||
padding: 8px;
|
||||
font-family: monospace;
|
||||
border: 1px solid #ccc;
|
||||
border-radius: 4px;
|
||||
min-height: 150px;
|
||||
}
|
||||
|
||||
.header-modal-actions {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
gap: 30px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.auth-token-details {
|
||||
width: 100%;
|
||||
max-width: calc(100% - 16px);
|
||||
margin-left: 8px;
|
||||
margin-right: 8px;
|
||||
|
||||
summary {
|
||||
cursor: pointer;
|
||||
text-align: left;
|
||||
padding: 5px 0;
|
||||
}
|
||||
|
||||
.auth-token-content {
|
||||
padding: 10px;
|
||||
border: 1px solid #eee;
|
||||
margin-top: 5px;
|
||||
background-color: #f9f9f9;
|
||||
text-align: left;
|
||||
max-width: 100%;
|
||||
overflow-wrap: break-word;
|
||||
|
||||
.auth-tab-group {
|
||||
display: flex;
|
||||
border-bottom: 1px solid #ccc;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.auth-tab-picker {
|
||||
padding: 8px 12px;
|
||||
cursor: pointer;
|
||||
border: 1px solid transparent;
|
||||
border-bottom: 1px solid transparent;
|
||||
margin-bottom: -1px;
|
||||
background-color: #f0f0f0;
|
||||
|
||||
&.active {
|
||||
background-color: #fff;
|
||||
border-color: #ccc;
|
||||
border-bottom-color: #fff;
|
||||
font-weight: bold;
|
||||
}
|
||||
}
|
||||
|
||||
.auth-tab-content {
|
||||
display: none;
|
||||
overflow-wrap: break-word;
|
||||
word-wrap: break-word;
|
||||
max-width: 100%;
|
||||
|
||||
&.active {
|
||||
display: block;
|
||||
}
|
||||
|
||||
pre {
|
||||
white-space: pre-wrap;
|
||||
word-wrap: break-word;
|
||||
overflow-x: auto;
|
||||
background-color: #f5f5f5;
|
||||
padding: 10px;
|
||||
border: 1px solid #ccc;
|
||||
border-radius: 4px;
|
||||
max-width: 100%;
|
||||
|
||||
code {
|
||||
display: block;
|
||||
word-wrap: break-word;
|
||||
color: inherit;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.auth-method-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
padding: 8px 12px;
|
||||
}
|
||||
|
||||
.auth-method-label {
|
||||
font-weight: 500;
|
||||
color: var(--text-primary-gray);
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
.auth-helper-section {
|
||||
border: 1px solid var(--section-border);
|
||||
background-color: transparent;
|
||||
padding: 16px;
|
||||
border-radius: 8px;
|
||||
margin-top: 20px;
|
||||
width: 80%;
|
||||
}
|
||||
|
||||
.auth-method-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.auth-method-details {
|
||||
padding: 16px;
|
||||
border: 1px solid var(--section-border);
|
||||
border-radius: 4px;
|
||||
margin-bottom: 16px;
|
||||
background-color: #fff;
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
.auth-controls {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
align-items: flex-start;
|
||||
}
|
||||
|
||||
.auth-input-row {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 6px;
|
||||
|
||||
& label {
|
||||
font-size: 14px;
|
||||
color: var(--text-primary-gray);
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
}
|
||||
|
||||
.auth-input {
|
||||
padding: 8px 8px;
|
||||
border: 1px solid #bdc1c6;
|
||||
border-radius: 4px;
|
||||
font-size: 14px;
|
||||
width: 100%;
|
||||
box-sizing: border-box;
|
||||
|
||||
&:focus {
|
||||
outline: none;
|
||||
border-color: var(--toolbox-blue);
|
||||
box-shadow: 0 0 0 1px #1a73e8;
|
||||
}
|
||||
}
|
||||
|
||||
.auth-method-actions {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.auth-instructions {
|
||||
font-size: 0.8em;
|
||||
margin-top: 5px;
|
||||
color: var(--text-secondary-gray);
|
||||
}
|
||||
|
||||
.tool-response {
|
||||
margin: 20px 0 0 0;
|
||||
|
||||
textarea {
|
||||
width: 100%;
|
||||
min-height: 150px;
|
||||
padding: 12px;
|
||||
border: 1px solid #ddd;
|
||||
border-radius: 4px;
|
||||
font-family: monospace;
|
||||
}
|
||||
}
|
||||
|
||||
.search-container {
|
||||
display: flex;
|
||||
width: 100%;
|
||||
margin-bottom: 15px;
|
||||
|
||||
#toolset-search-input {
|
||||
flex-grow: 1;
|
||||
padding: 10px 12px;
|
||||
border: 1px solid #ccc;
|
||||
border-radius: 20px 0 0 20px;
|
||||
border-right: none;
|
||||
font-family: inherit;
|
||||
font-size: 0.9em;
|
||||
color: var(--text-primary-gray);
|
||||
|
||||
&:focus {
|
||||
outline: none;
|
||||
border-color: var(--toolbox-blue);
|
||||
box-shadow: 0 0 0 2px rgba(66, 133, 244, 0.3);
|
||||
}
|
||||
|
||||
&::placeholder {
|
||||
color: var(--text-secondary-gray);
|
||||
}
|
||||
}
|
||||
|
||||
#toolset-search-button {
|
||||
padding: 10px 15px;
|
||||
border: 1px solid var(--button-primary);
|
||||
background-color: var(--button-primary);
|
||||
color: white;
|
||||
border-radius: 0 20px 20px 0;
|
||||
cursor: pointer;
|
||||
font-family: inherit;
|
||||
font-size: 0.9em;
|
||||
font-weight: bold;
|
||||
transition: opacity 0.2s ease-in-out;
|
||||
flex-shrink: 0;
|
||||
line-height: 1;
|
||||
|
||||
&:hover {
|
||||
opacity: 0.8;
|
||||
}
|
||||
|
||||
&:focus {
|
||||
outline: none;
|
||||
box-shadow: 0 0 0 2px rgba(66, 133, 244, 0.3);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.toggle-details-tab {
|
||||
background-color: transparent;
|
||||
color: var(--toolbox-blue);
|
||||
border: none;
|
||||
padding: 8px 12px;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
font-size: 1em;
|
||||
font-weight: bold;
|
||||
|
||||
&:hover {
|
||||
opacity: 0.8;
|
||||
}
|
||||
}
|
||||
24
internal/server/static/index.html
Normal file
@@ -0,0 +1,24 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Toolbox UI</title>
|
||||
<link rel="stylesheet" href="/ui/css/style.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="navbar-container" data-active-nav=""></div>
|
||||
<div id="main-content-container"></div>
|
||||
|
||||
<script src="/ui/js/navbar.js"></script>
|
||||
<script src="/ui/js/mainContent.js"></script>
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
const navbarContainer = document.getElementById('navbar-container');
|
||||
const activeNav = navbarContainer.getAttribute('data-active-nav');
|
||||
renderNavbar('navbar-container', activeNav);
|
||||
renderMainContent('main-content-container', '')
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
173
internal/server/static/js/auth.js
Normal file
@@ -0,0 +1,173 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
/**
|
||||
* Renders the Google Sign-In button using the GIS library.
|
||||
* @param {string} toolId The ID of the tool.
|
||||
* @param {string} clientId The Google OAuth Client ID.
|
||||
* @param {string} authProfileName The name of the auth service in tools file.
|
||||
*/
|
||||
function renderGoogleSignInButton(toolId, clientId, authProfileName) {
|
||||
const UNIQUE_ID_BASE = `${toolId}-${authProfileName}`;
|
||||
const GIS_CONTAINER_ID = `gisContainer-${UNIQUE_ID_BASE}`;
|
||||
const gisContainer = document.getElementById(GIS_CONTAINER_ID);
|
||||
const setupGisBtn = document.querySelector(`#google-auth-details-${UNIQUE_ID_BASE} .btn--setup-gis`);
|
||||
|
||||
if (!gisContainer) {
|
||||
console.error('GIS container not found:', GIS_CONTAINER_ID);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!clientId) {
|
||||
alert('Please enter an OAuth Client ID first.');
|
||||
return;
|
||||
}
|
||||
|
||||
// hide the setup button and show the container for the GIS button
|
||||
if (setupGisBtn) setupGisBtn.style.display = 'none';
|
||||
gisContainer.innerHTML = '';
|
||||
gisContainer.style.display = 'flex';
|
||||
if (window.google && window.google.accounts && window.google.accounts.id) {
|
||||
try {
|
||||
const handleResponse = (response) => handleCredentialResponse(response, toolId, authProfileName);
|
||||
window.google.accounts.id.initialize({
|
||||
client_id: clientId,
|
||||
callback: handleResponse,
|
||||
auto_select: false
|
||||
});
|
||||
window.google.accounts.id.renderButton(
|
||||
gisContainer,
|
||||
{ theme: "outline", size: "large", text: "signin_with" }
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Error initializing Google Sign-In:", error);
|
||||
alert("Error initializing Google Sign-In. Check the Client ID and browser console.");
|
||||
gisContainer.innerHTML = '<p style="color: red;">Error loading Sign-In button.</p>';
|
||||
if (setupGisBtn) setupGisBtn.style.display = '';
|
||||
}
|
||||
} else {
|
||||
console.error("GIS library not fully loaded yet.");
|
||||
alert("Google Identity Services library not ready. Please try again in a moment.");
|
||||
gisContainer.innerHTML = '<p style="color: red;">GIS library not ready.</p>';
|
||||
if (setupGisBtn) setupGisBtn.style.display = '';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles the credential response from the Google Sign-In library.
|
||||
* @param {!CredentialResponse} response The credential response object from GIS.
|
||||
* @param {string} toolId The ID of the tool.
|
||||
* @param {string} authProfileName The name of the auth service in tools file.
|
||||
*/
|
||||
function handleCredentialResponse(response, toolId, authProfileName) {
|
||||
console.debug("handleCredentialResponse called with:", { response, toolId, authProfileName });
|
||||
const headersTextarea = document.getElementById(`headers-textarea-${toolId}`);
|
||||
if (!headersTextarea) {
|
||||
console.error('Headers textarea not found for toolId:', toolId);
|
||||
return;
|
||||
}
|
||||
|
||||
const UNIQUE_ID_BASE = `${toolId}-${authProfileName}`;
|
||||
const setupGisBtn = document.querySelector(`#google-auth-details-${UNIQUE_ID_BASE} .setup-gis-btn`);
|
||||
const gisContainer = document.getElementById(`gisContainer-${UNIQUE_ID_BASE}`);
|
||||
|
||||
if (response.credential) {
|
||||
const idToken = response.credential;
|
||||
|
||||
try {
|
||||
let currentHeaders = {};
|
||||
if (headersTextarea.value) {
|
||||
currentHeaders = JSON.parse(headersTextarea.value);
|
||||
}
|
||||
const HEADER_KEY = `${authProfileName}_token`;
|
||||
currentHeaders[HEADER_KEY] = `${idToken}`;
|
||||
headersTextarea.value = JSON.stringify(currentHeaders, null, 2);
|
||||
|
||||
if (gisContainer) gisContainer.style.display = 'none';
|
||||
if (setupGisBtn) setupGisBtn.style.display = '';
|
||||
|
||||
} catch (e) {
|
||||
alert('Headers are not valid JSON. Please correct and try again.');
|
||||
console.error("Header JSON parse error:", e);
|
||||
}
|
||||
} else {
|
||||
console.error("Error: No credential in response", response);
|
||||
alert('Error: No ID Token received. Check console for details.');
|
||||
|
||||
if (gisContainer) gisContainer.style.display = 'none';
|
||||
if (setupGisBtn) setupGisBtn.style.display = '';
|
||||
}
|
||||
}
|
||||
|
||||
// creates the Google Auth method dropdown
|
||||
export function createGoogleAuthMethodItem(toolId, authProfileName) {
|
||||
const UNIQUE_ID_BASE = `${toolId}-${authProfileName}`;
|
||||
const item = document.createElement('div');
|
||||
|
||||
item.className = 'auth-method-item';
|
||||
item.innerHTML = `
|
||||
<div class="auth-method-header">
|
||||
<span class="auth-method-label">Google ID Token (${authProfileName})</span>
|
||||
<button class="toggle-details-tab">Auto Setup</button>
|
||||
</div>
|
||||
<div class="auth-method-details" id="google-auth-details-${UNIQUE_ID_BASE}" style="display: none;">
|
||||
<div class="auth-controls">
|
||||
<div class="auth-input-row">
|
||||
<label for="clientIdInput-${UNIQUE_ID_BASE}">OAuth Client ID:</label>
|
||||
<input type="text" id="clientIdInput-${UNIQUE_ID_BASE}" placeholder="Enter Client ID" class="auth-input">
|
||||
</div>
|
||||
<div class="auth-instructions">
|
||||
<strong>Action Required:</strong> Add this URL (e.g., http://localhost:PORT) to the Client ID's <strong>Authorized JavaScript origins</strong> to avoid a 401 error. If using Google OAuth,
|
||||
navigate to <a href="https://console.cloud.google.com/apis/credentials" target="_blank">Google Cloud Console</a> for this setting.
|
||||
</div>
|
||||
<div class="auth-method-actions">
|
||||
<button class="btn btn--setup-gis">Continue</button>
|
||||
<div id="gisContainer-${UNIQUE_ID_BASE}" class="auth-interactive-element gis-container" style="display: none;"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
|
||||
const toggleBtn = item.querySelector('.toggle-details-tab');
|
||||
const detailsDiv = item.querySelector(`#google-auth-details-${UNIQUE_ID_BASE}`);
|
||||
const setupGisBtn = item.querySelector('.btn--setup-gis');
|
||||
const clientIdInput = item.querySelector(`#clientIdInput-${UNIQUE_ID_BASE}`);
|
||||
const gisContainer = item.querySelector(`#gisContainer-${UNIQUE_ID_BASE}`);
|
||||
|
||||
toggleBtn.addEventListener('click', () => {
|
||||
const isVisible = detailsDiv.style.display === 'flex';
|
||||
detailsDiv.style.display = isVisible ? 'none' : 'flex';
|
||||
toggleBtn.textContent = isVisible ? 'Auto Setup' : 'Close';
|
||||
if (!isVisible) {
|
||||
if (gisContainer) {
|
||||
gisContainer.innerHTML = '';
|
||||
gisContainer.style.display = 'none';
|
||||
}
|
||||
if (setupGisBtn) {
|
||||
setupGisBtn.style.display = '';
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
setupGisBtn.addEventListener('click', () => {
|
||||
const clientId = clientIdInput.value;
|
||||
if (!clientId) {
|
||||
alert('Please enter an OAuth Client ID first.');
|
||||
return;
|
||||
}
|
||||
renderGoogleSignInButton(toolId, clientId, authProfileName);
|
||||
});
|
||||
|
||||
return item;
|
||||
}
|
||||
174
internal/server/static/js/loadTools.js
Normal file
@@ -0,0 +1,174 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { renderToolInterface } from "./toolDisplay.js";
|
||||
|
||||
let toolDetailsAbortController = null;
|
||||
|
||||
/**
|
||||
* Fetches a toolset from the /api/toolset endpoint and initiates creating the tool list.
|
||||
* @param {!HTMLElement} secondNavContent The HTML element where the tool list will be rendered.
|
||||
* @param {!HTMLElement} toolDisplayArea The HTML element where the details of a selected tool will be displayed.
|
||||
* @param {string} toolsetName The name of the toolset to load (empty string loads all tools).
|
||||
* @returns {!Promise<void>} A promise that resolves when the tools are loaded and rendered, or rejects on error.
|
||||
*/
|
||||
export async function loadTools(secondNavContent, toolDisplayArea, toolsetName) {
|
||||
secondNavContent.innerHTML = '<p>Fetching tools...</p>';
|
||||
try {
|
||||
const response = await fetch(`/api/toolset/${toolsetName}`);
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
const apiResponse = await response.json();
|
||||
renderToolList(apiResponse, secondNavContent, toolDisplayArea);
|
||||
} catch (error) {
|
||||
console.error('Failed to load tools:', error);
|
||||
secondNavContent.innerHTML = `<p class="error">Failed to load tools: <pre><code>${error}</code></pre></p>`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders the list of tools as buttons within the provided HTML element.
|
||||
* @param {?{tools: ?Object<string,*>} } apiResponse The API response object containing the tools.
|
||||
* @param {!HTMLElement} secondNavContent The HTML element to render the tool list into.
|
||||
* @param {!HTMLElement} toolDisplayArea The HTML element for displaying tool details (passed to event handlers).
|
||||
*/
|
||||
function renderToolList(apiResponse, secondNavContent, toolDisplayArea) {
|
||||
secondNavContent.innerHTML = '';
|
||||
|
||||
if (!apiResponse || typeof apiResponse.tools !== 'object' || apiResponse.tools === null) {
|
||||
console.error('Error: Expected an object with a "tools" property, but received:', apiResponse);
|
||||
secondNavContent.textContent = 'Error: Invalid response format from toolset API.';
|
||||
return;
|
||||
}
|
||||
|
||||
const toolsObject = apiResponse.tools;
|
||||
const toolNames = Object.keys(toolsObject);
|
||||
|
||||
if (toolNames.length === 0) {
|
||||
secondNavContent.textContent = 'No tools found.';
|
||||
return;
|
||||
}
|
||||
|
||||
const ul = document.createElement('ul');
|
||||
toolNames.forEach(toolName => {
|
||||
const li = document.createElement('li');
|
||||
const button = document.createElement('button');
|
||||
button.textContent = toolName;
|
||||
button.dataset.toolname = toolName;
|
||||
button.classList.add('tool-button');
|
||||
button.addEventListener('click', (event) => handleToolClick(event, secondNavContent, toolDisplayArea));
|
||||
li.appendChild(button);
|
||||
ul.appendChild(li);
|
||||
});
|
||||
secondNavContent.appendChild(ul);
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles the click event on a tool button.
|
||||
* @param {!Event} event The click event object.
|
||||
* @param {!HTMLElement} secondNavContent The parent element containing the tool buttons.
|
||||
* @param {!HTMLElement} toolDisplayArea The HTML element where tool details will be shown.
|
||||
*/
|
||||
function handleToolClick(event, secondNavContent, toolDisplayArea) {
|
||||
const toolName = event.target.dataset.toolname;
|
||||
if (toolName) {
|
||||
const currentActive = secondNavContent.querySelector('.tool-button.active');
|
||||
if (currentActive) {
|
||||
currentActive.classList.remove('active');
|
||||
}
|
||||
event.target.classList.add('active');
|
||||
fetchToolDetails(toolName, toolDisplayArea);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches details for a specific tool /api/tool endpoint.
|
||||
* It aborts any previous in-flight request for tool details to stop race condition.
|
||||
* @param {string} toolName The name of the tool to fetch details for.
|
||||
* @param {!HTMLElement} toolDisplayArea The HTML element to display the tool interface in.
|
||||
* @returns {!Promise<void>} A promise that resolves when the tool details are fetched and rendered, or rejects on error.
|
||||
*/
|
||||
async function fetchToolDetails(toolName, toolDisplayArea) {
|
||||
if (toolDetailsAbortController) {
|
||||
toolDetailsAbortController.abort();
|
||||
console.debug("Aborted previous tool fetch.");
|
||||
}
|
||||
|
||||
toolDetailsAbortController = new AbortController();
|
||||
const signal = toolDetailsAbortController.signal;
|
||||
|
||||
toolDisplayArea.innerHTML = '<p>Loading tool details...</p>';
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/tool/${encodeURIComponent(toolName)}`, { signal });
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
const apiResponse = await response.json();
|
||||
|
||||
if (!apiResponse.tools || !apiResponse.tools[toolName]) {
|
||||
throw new Error(`Tool "${toolName}" data not found in API response.`);
|
||||
}
|
||||
const toolObject = apiResponse.tools[toolName];
|
||||
console.debug("Received tool object: ", toolObject)
|
||||
|
||||
const toolInterfaceData = {
|
||||
id: toolName,
|
||||
name: toolName,
|
||||
description: toolObject.description || "No description provided.",
|
||||
authRequired: toolObject.authRequired || [],
|
||||
parameters: (toolObject.parameters || []).map(param => {
|
||||
let inputType = 'text';
|
||||
const apiType = param.type ? param.type.toLowerCase() : 'string';
|
||||
let valueType = 'string';
|
||||
let label = param.description || param.name;
|
||||
|
||||
if (apiType === 'integer' || apiType === 'float') {
|
||||
inputType = 'number';
|
||||
valueType = 'number';
|
||||
} else if (apiType === 'boolean') {
|
||||
inputType = 'checkbox';
|
||||
valueType = 'boolean';
|
||||
} else if (apiType === 'array') {
|
||||
inputType = 'textarea';
|
||||
const itemType = param.items && param.items.type ? param.items.type.toLowerCase() : 'string';
|
||||
valueType = `array<${itemType}>`;
|
||||
label += ' (Array)';
|
||||
}
|
||||
|
||||
return {
|
||||
name: param.name,
|
||||
type: inputType,
|
||||
valueType: valueType,
|
||||
label: label,
|
||||
authServices: param.authSources,
|
||||
required: param.required || false,
|
||||
// defaultValue: param.default, can't do this yet bc tool manifest doesn't have default
|
||||
};
|
||||
})
|
||||
};
|
||||
|
||||
console.debug("Transformed toolInterfaceData:", toolInterfaceData);
|
||||
|
||||
renderToolInterface(toolInterfaceData, toolDisplayArea);
|
||||
} catch (error) {
|
||||
if (error.name === 'AbortError') {
|
||||
console.debug("Previous fetch was aborted, expected behavior.");
|
||||
} else {
|
||||
console.error(`Failed to load details for tool "${toolName}":`, error);
|
||||
toolDisplayArea.innerHTML = `<p class="error">Failed to load details for ${toolName}. ${error.message}</p>`;
|
||||
}
|
||||
}
|
||||
}
|
||||
40
internal/server/static/js/mainContent.js
Normal file
@@ -0,0 +1,40 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
/**
|
||||
* Renders the main content area into the HTML.
|
||||
* @param {string} containerId The ID of the DOM element to inject the content into.
|
||||
* @param {string} idString The id of the item inside the main content area.
|
||||
*/
|
||||
function renderMainContent(containerId, idString) {
|
||||
const mainContentContainer = document.getElementById(containerId);
|
||||
if (!mainContentContainer) {
|
||||
console.error(`Content container with ID "${containerId}" not found.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const idAttribute = idString ? `id="${idString}"` : '';
|
||||
const contentHTML = `
|
||||
<div class="main-content-area">
|
||||
<div class="top-bar">
|
||||
</div>
|
||||
<main class="content" ${idAttribute}">
|
||||
<h1>Welcome to MCP Toolbox UI</h1>
|
||||
<p>This is the main content area. Click a tab on the left to navigate.</p>
|
||||
</main>
|
||||
</div>
|
||||
`;
|
||||
|
||||
mainContentContainer.innerHTML = contentHTML;
|
||||
}
|
||||
53
internal/server/static/js/navbar.js
Normal file
@@ -0,0 +1,53 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
/**
|
||||
* Renders the navigation bar HTML content into the specified container element.
|
||||
* @param {string} containerId The ID of the DOM element to inject the navbar into.
|
||||
* @param {string | null} activePath The active tab from the navbar.
|
||||
*/
|
||||
function renderNavbar(containerId, activePath) {
|
||||
const navbarContainer = document.getElementById(containerId);
|
||||
if (!navbarContainer) {
|
||||
console.error(`Navbar container with ID "${containerId}" not found.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const navbarHTML = `
|
||||
<nav class="left-nav">
|
||||
<div class="nav-logo">
|
||||
<img src="/ui/assets/mcptoolboxlogo.png" alt="App Logo">
|
||||
</div>
|
||||
<ul>
|
||||
<!--<li><a href="/ui/sources">Sources</a></li>-->
|
||||
<!--<li><a href="/ui/authservices">Auth Services</a></li>-->
|
||||
<li><a href="/ui/tools">Tools</a></li>
|
||||
<li><a href="/ui/toolsets">Toolsets</a></li>
|
||||
</ul>
|
||||
</nav>
|
||||
`;
|
||||
|
||||
navbarContainer.innerHTML = navbarHTML;
|
||||
if (activePath) {
|
||||
const navLinks = navbarContainer.querySelectorAll('.left-nav ul li a');
|
||||
navLinks.forEach(link => {
|
||||
const linkPath = new URL(link.href).pathname;
|
||||
if (linkPath === activePath) {
|
||||
link.classList.add('active');
|
||||
} else {
|
||||
link.classList.remove('active');
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
162
internal/server/static/js/runTool.js
Normal file
@@ -0,0 +1,162 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { isParamIncluded } from "./toolDisplay.js";
|
||||
|
||||
/**
|
||||
* Runs a specific tool using the /api/tools/toolName/invoke endpoint
|
||||
* @param {string} toolId The unique identifier for the tool.
|
||||
* @param {!HTMLFormElement} form The form element containing parameter inputs.
|
||||
* @param {!HTMLTextAreaElement} responseArea The textarea to display results or errors.
|
||||
* @param {!Array<!Object>} parameters An array of parameter definition objects
|
||||
* @param {!HTMLInputElement} prettifyCheckbox The checkbox to control JSON formatting.
|
||||
* @param {function(?Object): void} updateLastResults Callback to store the last results.
|
||||
*/
|
||||
export async function handleRunTool(toolId, form, responseArea, parameters, prettifyCheckbox, updateLastResults, headers) {
|
||||
const formData = new FormData(form);
|
||||
const typedParams = {};
|
||||
responseArea.value = 'Running tool...';
|
||||
updateLastResults(null);
|
||||
|
||||
for (const param of parameters) {
|
||||
const NAME = param.name;
|
||||
const VALUE_TYPE = param.valueType;
|
||||
const RAW_VALUE = formData.get(NAME);
|
||||
const INCLUDE_CHECKED = isParamIncluded(toolId, NAME)
|
||||
|
||||
try {
|
||||
if (!INCLUDE_CHECKED) {
|
||||
console.debug(`Param ${NAME} was intentionally skipped.`)
|
||||
// if param was purposely unchecked, don't include it in body
|
||||
continue;
|
||||
}
|
||||
|
||||
if (VALUE_TYPE === 'boolean') {
|
||||
typedParams[NAME] = RAW_VALUE !== null;
|
||||
console.debug(`Parameter ${NAME} (boolean) set to: ${typedParams[NAME]}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// process remaining types
|
||||
if (VALUE_TYPE && VALUE_TYPE.startsWith('array<')) {
|
||||
typedParams[NAME] = parseArrayParameter(RAW_VALUE, VALUE_TYPE, NAME);
|
||||
} else {
|
||||
switch (VALUE_TYPE) {
|
||||
case 'number':
|
||||
if (RAW_VALUE === "") {
|
||||
console.debug(`Param ${NAME} was empty, setting to empty string.`)
|
||||
typedParams[NAME] = "";
|
||||
} else {
|
||||
const num = Number(RAW_VALUE);
|
||||
if (isNaN(num)) {
|
||||
throw new Error(`Invalid number input for ${NAME}: ${RAW_VALUE}`);
|
||||
}
|
||||
typedParams[NAME] = num;
|
||||
}
|
||||
break;
|
||||
case 'string':
|
||||
default:
|
||||
typedParams[NAME] = RAW_VALUE;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error processing parameter:', NAME, error);
|
||||
responseArea.value = `Error for ${NAME}: ${error.message}`;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
console.debug('Running tool:', toolId, 'with typed params:', typedParams);
|
||||
try {
|
||||
const response = await fetch(`/api/tool/${toolId}/invoke`, {
|
||||
method: 'POST',
|
||||
headers: headers,
|
||||
body: JSON.stringify(typedParams)
|
||||
});
|
||||
if (!response.ok) {
|
||||
const errorBody = await response.text();
|
||||
throw new Error(`HTTP error ${response.status}: ${errorBody}`);
|
||||
}
|
||||
const results = await response.json();
|
||||
updateLastResults(results);
|
||||
displayResults(results, responseArea, prettifyCheckbox.checked);
|
||||
} catch (error) {
|
||||
console.error('Error running tool:', error);
|
||||
responseArea.value = `Error: ${error.message}`;
|
||||
updateLastResults(null);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and validates a single array parameter from a raw string value.
|
||||
* @param {string} rawValue The raw string value from FormData.
|
||||
* @param {string} valueType The full array type string (e.g., "array<number>").
|
||||
* @param {string} paramName The name of the parameter for error messaging.
|
||||
* @return {!Array<*>} The parsed array.
|
||||
* @throws {Error} If parsing or type validation fails.
|
||||
*/
|
||||
function parseArrayParameter(rawValue, valueType, paramName) {
|
||||
const ELEMENT_TYPE = valueType.substring(6, valueType.length - 1);
|
||||
let parsedArray;
|
||||
try {
|
||||
parsedArray = JSON.parse(rawValue);
|
||||
} catch (e) {
|
||||
throw new Error(`Invalid JSON format for ${paramName}. Expected an array. ${e.message}`);
|
||||
}
|
||||
|
||||
if (!Array.isArray(parsedArray)) {
|
||||
throw new Error(`Input for ${paramName} must be a JSON array (e.g., ["a", "b"]).`);
|
||||
}
|
||||
|
||||
return parsedArray.map((item, index) => {
|
||||
switch (ELEMENT_TYPE) {
|
||||
case 'number':
|
||||
const NUM = Number(item);
|
||||
if (isNaN(NUM)) {
|
||||
throw new Error(`Invalid number "${item}" found in array for ${paramName} at index ${index}.`);
|
||||
}
|
||||
return NUM;
|
||||
case 'boolean':
|
||||
return item === true || String(item).toLowerCase() === 'true';
|
||||
case 'string':
|
||||
default:
|
||||
return item;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Displays the results from the tool run in the response area.
|
||||
*/
|
||||
export function displayResults(results, responseArea, prettify) {
|
||||
if (results === null || results === undefined) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const resultJson = JSON.parse(results.result);
|
||||
if (prettify) {
|
||||
responseArea.value = JSON.stringify(resultJson, null, 2);
|
||||
} else {
|
||||
responseArea.value = JSON.stringify(resultJson);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error parsing or stringifying results:", error);
|
||||
if (typeof results.result === 'string') {
|
||||
responseArea.value = results.result;
|
||||
} else {
|
||||
responseArea.value = "Error displaying results. Invalid format.";
|
||||
}
|
||||
}
|
||||
}
|
||||
531
internal/server/static/js/toolDisplay.js
Normal file
@@ -0,0 +1,531 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { handleRunTool, displayResults } from './runTool.js';
|
||||
import { createGoogleAuthMethodItem } from './auth.js'
|
||||
|
||||
/**
|
||||
* Helper function to create form inputs for parameters.
|
||||
*/
|
||||
function createParamInput(param, toolId) {
|
||||
const paramItem = document.createElement('div');
|
||||
paramItem.className = 'param-item';
|
||||
|
||||
const label = document.createElement('label');
|
||||
const INPUT_ID = `param-${toolId}-${param.name}`;
|
||||
const NAME_TEXT = document.createTextNode(param.name);
|
||||
label.setAttribute('for', INPUT_ID);
|
||||
label.appendChild(NAME_TEXT);
|
||||
|
||||
const IS_AUTH_PARAM = param.authServices && param.authServices.length > 0;
|
||||
let additionalLabelText = '';
|
||||
if (IS_AUTH_PARAM) {
|
||||
additionalLabelText += ' (auth)';
|
||||
}
|
||||
if (!param.required) {
|
||||
additionalLabelText += ' (optional)';
|
||||
}
|
||||
|
||||
if (additionalLabelText) {
|
||||
const additionalSpan = document.createElement('span');
|
||||
additionalSpan.textContent = additionalLabelText;
|
||||
additionalSpan.classList.add('param-label-extras');
|
||||
label.appendChild(additionalSpan);
|
||||
}
|
||||
paramItem.appendChild(label);
|
||||
|
||||
const inputCheckboxWrapper = document.createElement('div');
|
||||
const inputContainer = document.createElement('div');
|
||||
inputCheckboxWrapper.className = 'input-checkbox-wrapper';
|
||||
inputContainer.className = 'param-input-element-container';
|
||||
|
||||
// Build parameter's value input box.
|
||||
const PLACEHOLDER_LABEL = param.label;
|
||||
let inputElement;
|
||||
let boolValueLabel = null;
|
||||
|
||||
if (param.type === 'textarea') {
|
||||
inputElement = document.createElement('textarea');
|
||||
inputElement.rows = 3;
|
||||
inputContainer.appendChild(inputElement);
|
||||
} else if(param.type === 'checkbox') {
|
||||
inputElement = document.createElement('input');
|
||||
inputElement.type = 'checkbox';
|
||||
inputElement.title = PLACEHOLDER_LABEL;
|
||||
inputElement.checked = false;
|
||||
|
||||
// handle true/false label for boolean params
|
||||
boolValueLabel = document.createElement('span');
|
||||
boolValueLabel.className = 'checkbox-bool-label';
|
||||
boolValueLabel.textContent = inputElement.checked ? ' true' : ' false';
|
||||
|
||||
inputContainer.appendChild(inputElement);
|
||||
inputContainer.appendChild(boolValueLabel);
|
||||
|
||||
inputElement.addEventListener('change', () => {
|
||||
boolValueLabel.textContent = inputElement.checked ? ' true' : ' false';
|
||||
});
|
||||
} else {
|
||||
inputElement = document.createElement('input');
|
||||
inputElement.type = param.type;
|
||||
inputContainer.appendChild(inputElement);
|
||||
}
|
||||
|
||||
inputElement.id = INPUT_ID;
|
||||
inputElement.name = param.name;
|
||||
inputElement.classList.add('param-input-element');
|
||||
|
||||
if (IS_AUTH_PARAM) {
|
||||
inputElement.disabled = true;
|
||||
inputElement.classList.add('auth-param-input');
|
||||
if (param.type !== 'checkbox') {
|
||||
inputElement.placeholder = param.authServices;
|
||||
}
|
||||
} else if (param.type !== 'checkbox') {
|
||||
inputElement.placeholder = PLACEHOLDER_LABEL ? PLACEHOLDER_LABEL.trim() : '';
|
||||
}
|
||||
inputCheckboxWrapper.appendChild(inputContainer);
|
||||
|
||||
// create the "Include Param" checkbox
|
||||
const INCLUDE_CHECKBOX_ID = `include-${INPUT_ID}`;
|
||||
const includeContainer = document.createElement('div');
|
||||
const includeCheckbox = document.createElement('input');
|
||||
|
||||
includeContainer.className = 'include-param-container';
|
||||
includeCheckbox.type = 'checkbox';
|
||||
includeCheckbox.id = INCLUDE_CHECKBOX_ID;
|
||||
includeCheckbox.name = `include-${param.name}`;
|
||||
includeCheckbox.title = 'Include this parameter'; // Add a tooltip
|
||||
|
||||
// default to checked, unless it's an optional parameter
|
||||
includeCheckbox.checked = param.required;
|
||||
|
||||
includeContainer.appendChild(includeCheckbox);
|
||||
inputCheckboxWrapper.appendChild(includeContainer);
|
||||
|
||||
paramItem.appendChild(inputCheckboxWrapper);
|
||||
|
||||
// function to update UI based on checkbox state
|
||||
const updateParamIncludedState = () => {
|
||||
const isIncluded = includeCheckbox.checked;
|
||||
if (isIncluded) {
|
||||
paramItem.classList.remove('disabled-param');
|
||||
if (!IS_AUTH_PARAM) {
|
||||
inputElement.disabled = false;
|
||||
}
|
||||
if (boolValueLabel) {
|
||||
boolValueLabel.classList.remove('disabled');
|
||||
}
|
||||
} else {
|
||||
paramItem.classList.add('disabled-param');
|
||||
inputElement.disabled = true;
|
||||
if (boolValueLabel) {
|
||||
boolValueLabel.classList.add('disabled');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// add event listener to the include checkbox
|
||||
includeCheckbox.addEventListener('change', updateParamIncludedState);
|
||||
updateParamIncludedState();
|
||||
|
||||
return paramItem;
|
||||
}
|
||||
|
||||
/**
|
||||
* Function to create the header editor popup modal.
|
||||
* @param {string} toolId The unique identifier for the tool.
|
||||
* @param {!Object<string, string>} currentHeaders The current headers.
|
||||
* @param {function(!Object<string, string>): void} saveCallback A function to be
|
||||
* called when the "Save" button is clicked and the headers are successfully
|
||||
* parsed. The function receives the updated headers object as its argument.
|
||||
* @return {!HTMLDivElement} The outermost div element of the created modal.
|
||||
*/
|
||||
function createHeaderEditorModal(toolId, currentHeaders, toolParameters, authRequired, saveCallback) {
|
||||
const MODAL_ID = `header-modal-${toolId}`;
|
||||
let modal = document.getElementById(MODAL_ID);
|
||||
|
||||
if (modal) {
|
||||
modal.remove();
|
||||
}
|
||||
|
||||
modal = document.createElement('div');
|
||||
modal.id = MODAL_ID;
|
||||
modal.className = 'header-modal';
|
||||
|
||||
const modalContent = document.createElement('div');
|
||||
const modalHeader = document.createElement('h5');
|
||||
const headersTextarea = document.createElement('textarea');
|
||||
|
||||
modalContent.className = 'header-modal-content';
|
||||
modalHeader.textContent = 'Edit Request Headers';
|
||||
headersTextarea.id = `headers-textarea-${toolId}`;
|
||||
headersTextarea.className = 'headers-textarea';
|
||||
headersTextarea.rows = 10;
|
||||
headersTextarea.value = JSON.stringify(currentHeaders, null, 2);
|
||||
|
||||
// handle authenticated params
|
||||
const authProfileNames = new Set();
|
||||
toolParameters.forEach(param => {
|
||||
const isAuthParam = param.authServices && param.authServices.length > 0;
|
||||
if (isAuthParam && param.authServices) {
|
||||
param.authServices.forEach(name => authProfileNames.add(name));
|
||||
}
|
||||
});
|
||||
|
||||
// handle authorized invocations
|
||||
if (authRequired && authRequired.length > 0) {
|
||||
authRequired.forEach(name => authProfileNames.add(name));
|
||||
}
|
||||
|
||||
modalContent.appendChild(modalHeader);
|
||||
modalContent.appendChild(headersTextarea);
|
||||
|
||||
if (authProfileNames.size > 0 || authRequired.length > 0) {
|
||||
const authHelperSection = document.createElement('div');
|
||||
authHelperSection.className = 'auth-helper-section';
|
||||
const authList = document.createElement('div');
|
||||
authList.className = 'auth-method-list';
|
||||
|
||||
authProfileNames.forEach(profileName => {
|
||||
const authItem = createGoogleAuthMethodItem(toolId, profileName);
|
||||
authList.appendChild(authItem);
|
||||
});
|
||||
authHelperSection.appendChild(authList);
|
||||
modalContent.appendChild(authHelperSection);
|
||||
}
|
||||
|
||||
const modalActions = document.createElement('div');
|
||||
const closeButton = document.createElement('button');
|
||||
const saveButton = document.createElement('button');
|
||||
const authTokenDropdown = createAuthTokenInfoDropdown();
|
||||
|
||||
modalActions.className = 'header-modal-actions';
|
||||
closeButton.textContent = 'Close';
|
||||
closeButton.className = 'btn btn--closeHeaders';
|
||||
closeButton.addEventListener('click', () => closeHeaderEditor(toolId));
|
||||
saveButton.textContent = 'Save';
|
||||
saveButton.className = 'btn btn--saveHeaders';
|
||||
saveButton.addEventListener('click', () => {
|
||||
try {
|
||||
const updatedHeaders = JSON.parse(headersTextarea.value);
|
||||
saveCallback(updatedHeaders);
|
||||
closeHeaderEditor(toolId);
|
||||
} catch (e) {
|
||||
alert('Invalid JSON format for headers.');
|
||||
console.error("Header JSON parse error:", e);
|
||||
}
|
||||
});
|
||||
|
||||
modalActions.appendChild(closeButton);
|
||||
modalActions.appendChild(saveButton);
|
||||
modalContent.appendChild(modalActions);
|
||||
modalContent.appendChild(authTokenDropdown);
|
||||
modal.appendChild(modalContent);
|
||||
|
||||
return modal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Function to open the header popup.
|
||||
*/
|
||||
function openHeaderEditor(toolId) {
|
||||
const modal = document.getElementById(`header-modal-${toolId}`);
|
||||
if (modal) {
|
||||
modal.style.display = 'block';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Function to close the header popup.
|
||||
*/
|
||||
function closeHeaderEditor(toolId) {
|
||||
const modal = document.getElementById(`header-modal-${toolId}`);
|
||||
if (modal) {
|
||||
modal.style.display = 'none';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a dropdown element showing information on how to extract Google auth tokens.
|
||||
* @return {HTMLDetailsElement} The details element representing the dropdown.
|
||||
*/
|
||||
function createAuthTokenInfoDropdown() {
|
||||
const details = document.createElement('details');
|
||||
const summary = document.createElement('summary');
|
||||
const content = document.createElement('div');
|
||||
|
||||
details.className = 'auth-token-details';
|
||||
details.appendChild(summary);
|
||||
summary.textContent = 'How to extract Google OAuth ID Token manually';
|
||||
content.className = 'auth-token-content';
|
||||
|
||||
// auth instruction dropdown
|
||||
const tabButtons = document.createElement('div');
|
||||
const leftTab = document.createElement('button');
|
||||
const rightTab = document.createElement('button');
|
||||
|
||||
tabButtons.className = 'auth-tab-group';
|
||||
leftTab.className = 'auth-tab-picker active';
|
||||
leftTab.textContent = 'With Standard Account';
|
||||
leftTab.setAttribute('data-tab', 'standard');
|
||||
rightTab.className = 'auth-tab-picker';
|
||||
rightTab.textContent = 'With Service Account';
|
||||
rightTab.setAttribute('data-tab', 'service');
|
||||
|
||||
tabButtons.appendChild(leftTab);
|
||||
tabButtons.appendChild(rightTab);
|
||||
content.appendChild(tabButtons);
|
||||
|
||||
const tabContentContainer = document.createElement('div');
|
||||
const standardAccInstructions = document.createElement('div');
|
||||
const serviceAccInstructions = document.createElement('div');
|
||||
|
||||
standardAccInstructions.id = 'auth-tab-standard';
|
||||
standardAccInstructions.className = 'auth-tab-content active';
|
||||
standardAccInstructions.innerHTML = AUTH_TOKEN_INSTRUCTIONS_STANDARD;
|
||||
serviceAccInstructions.id = 'auth-tab-service';
|
||||
serviceAccInstructions.className = 'auth-tab-content';
|
||||
serviceAccInstructions.innerHTML = AUTH_TOKEN_INSTRUCTIONS_SERVICE_ACCOUNT;
|
||||
|
||||
tabContentContainer.appendChild(standardAccInstructions);
|
||||
tabContentContainer.appendChild(serviceAccInstructions);
|
||||
content.appendChild(tabContentContainer);
|
||||
|
||||
// switching tabs logic
|
||||
const tabBtns = [leftTab, rightTab];
|
||||
const tabContents = [standardAccInstructions, serviceAccInstructions];
|
||||
|
||||
tabBtns.forEach(btn => {
|
||||
btn.addEventListener('click', () => {
|
||||
// deactivate all buttons and contents
|
||||
tabBtns.forEach(b => b.classList.remove('active'));
|
||||
tabContents.forEach(c => c.classList.remove('active'));
|
||||
|
||||
btn.classList.add('active');
|
||||
|
||||
const tabId = btn.getAttribute('data-tab');
|
||||
const activeContent = content.querySelector(`#auth-tab-${tabId}`);
|
||||
if (activeContent) {
|
||||
activeContent.classList.add('active');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
details.appendChild(content);
|
||||
return details;
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders the tool display area.
|
||||
*/
|
||||
export function renderToolInterface(tool, containerElement) {
|
||||
const TOOL_ID = tool.id;
|
||||
containerElement.innerHTML = '';
|
||||
|
||||
let lastResults = null;
|
||||
let currentHeaders = {
|
||||
"Content-Type": "application/json"
|
||||
};
|
||||
|
||||
// function to update lastResults so we can toggle json
|
||||
const updateLastResults = (newResults) => {
|
||||
lastResults = newResults;
|
||||
};
|
||||
const updateCurrentHeaders = (newHeaders) => {
|
||||
currentHeaders = newHeaders;
|
||||
const newModal = createHeaderEditorModal(TOOL_ID, currentHeaders, tool.parameters, tool.authRequired, updateCurrentHeaders);
|
||||
containerElement.appendChild(newModal);
|
||||
};
|
||||
|
||||
const gridContainer = document.createElement('div');
|
||||
gridContainer.className = 'tool-details-grid';
|
||||
|
||||
const toolInfoContainer = document.createElement('div');
|
||||
const nameBox = document.createElement('div');
|
||||
const descBox = document.createElement('div');
|
||||
|
||||
nameBox.className = 'tool-box tool-name';
|
||||
nameBox.innerHTML = `<h5>Name:</h5><p>${tool.name}</p>`;
|
||||
descBox.className = 'tool-box tool-description';
|
||||
descBox.innerHTML = `<h5>Description:</h5><p>${tool.description}</p>`;
|
||||
|
||||
toolInfoContainer.className = 'tool-info';
|
||||
toolInfoContainer.appendChild(nameBox);
|
||||
toolInfoContainer.appendChild(descBox);
|
||||
gridContainer.appendChild(toolInfoContainer);
|
||||
|
||||
const DISLCAIMER_INFO = "*Checked parameters are sent with the value from their text field. Empty fields will be sent as an empty string. To exclude a parameter, uncheck it."
|
||||
const paramsContainer = document.createElement('div');
|
||||
const form = document.createElement('form');
|
||||
const paramsHeader = document.createElement('div');
|
||||
const disclaimerText = document.createElement('div');
|
||||
|
||||
paramsContainer.className = 'tool-params tool-box';
|
||||
paramsContainer.innerHTML = '<h5>Parameters:</h5>';
|
||||
paramsHeader.className = 'params-header';
|
||||
paramsContainer.appendChild(paramsHeader);
|
||||
disclaimerText.textContent = DISLCAIMER_INFO;
|
||||
disclaimerText.className = 'params-disclaimer';
|
||||
paramsContainer.appendChild(disclaimerText);
|
||||
|
||||
form.id = `tool-params-form-${TOOL_ID}`;
|
||||
|
||||
tool.parameters.forEach(param => {
|
||||
form.appendChild(createParamInput(param, TOOL_ID));
|
||||
});
|
||||
paramsContainer.appendChild(form);
|
||||
gridContainer.appendChild(paramsContainer);
|
||||
|
||||
containerElement.appendChild(gridContainer);
|
||||
|
||||
const RESPONSE_AREA_ID = `tool-response-area-${TOOL_ID}`;
|
||||
const runButtonContainer = document.createElement('div');
|
||||
const editHeadersButton = document.createElement('button');
|
||||
const runButton = document.createElement('button');
|
||||
|
||||
editHeadersButton.className = 'btn btn--editHeaders';
|
||||
editHeadersButton.textContent = 'Edit Headers';
|
||||
editHeadersButton.addEventListener('click', () => openHeaderEditor(TOOL_ID));
|
||||
runButtonContainer.className = 'run-button-container';
|
||||
runButtonContainer.appendChild(editHeadersButton);
|
||||
|
||||
runButton.className = 'btn btn--run';
|
||||
runButton.textContent = 'Run Tool';
|
||||
runButtonContainer.appendChild(runButton);
|
||||
containerElement.appendChild(runButtonContainer);
|
||||
|
||||
// response Area (bottom)
|
||||
const responseContainer = document.createElement('div');
|
||||
const responseHeaderControls = document.createElement('div');
|
||||
const responseHeader = document.createElement('h5');
|
||||
const responseArea = document.createElement('textarea');
|
||||
|
||||
responseContainer.className = 'tool-response tool-box';
|
||||
responseHeaderControls.className = 'response-header-controls';
|
||||
responseHeader.textContent = 'Response:';
|
||||
responseHeaderControls.appendChild(responseHeader);
|
||||
|
||||
// prettify box
|
||||
const PRETTIFY_ID = `prettify-${TOOL_ID}`;
|
||||
const prettifyDiv = document.createElement('div');
|
||||
const prettifyLabel = document.createElement('label');
|
||||
const prettifyCheckbox = document.createElement('input');
|
||||
|
||||
prettifyDiv.className = 'prettify-container';
|
||||
prettifyLabel.setAttribute('for', PRETTIFY_ID);
|
||||
prettifyLabel.textContent = 'Prettify JSON';
|
||||
prettifyLabel.className = 'prettify-label';
|
||||
|
||||
prettifyCheckbox.type = 'checkbox';
|
||||
prettifyCheckbox.id = PRETTIFY_ID;
|
||||
prettifyCheckbox.checked = true;
|
||||
prettifyCheckbox.className = 'prettify-checkbox';
|
||||
|
||||
prettifyDiv.appendChild(prettifyLabel);
|
||||
prettifyDiv.appendChild(prettifyCheckbox);
|
||||
|
||||
responseHeaderControls.appendChild(prettifyDiv);
|
||||
responseContainer.appendChild(responseHeaderControls);
|
||||
|
||||
responseArea.id = RESPONSE_AREA_ID;
|
||||
responseArea.readOnly = true;
|
||||
responseArea.placeholder = 'Results will appear here...';
|
||||
responseArea.className = 'tool-response-area';
|
||||
responseArea.rows = 10;
|
||||
responseContainer.appendChild(responseArea);
|
||||
|
||||
containerElement.appendChild(responseContainer);
|
||||
|
||||
// create and append the header editor modal
|
||||
const headerModal = createHeaderEditorModal(TOOL_ID, currentHeaders, tool.parameters, tool.authRequired, updateCurrentHeaders);
|
||||
containerElement.appendChild(headerModal);
|
||||
|
||||
prettifyCheckbox.addEventListener('change', () => {
|
||||
if (lastResults) {
|
||||
displayResults(lastResults, responseArea, prettifyCheckbox.checked);
|
||||
}
|
||||
});
|
||||
|
||||
runButton.addEventListener('click', (event) => {
|
||||
event.preventDefault();
|
||||
handleRunTool(TOOL_ID, form, responseArea, tool.parameters, prettifyCheckbox, updateLastResults, currentHeaders);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a specific parameter is marked as included for a given tool.
|
||||
* @param {string} toolId The ID of the tool.
|
||||
* @param {string} paramName The name of the parameter.
|
||||
* @return {boolean|null} True if the parameter's include checkbox is checked,
|
||||
* False if unchecked, Null if the checkbox element is not found.
|
||||
*/
|
||||
export function isParamIncluded(toolId, paramName) {
|
||||
const inputId = `param-${toolId}-${paramName}`;
|
||||
const includeCheckboxId = `include-${inputId}`;
|
||||
const includeCheckbox = document.getElementById(includeCheckboxId);
|
||||
|
||||
if (includeCheckbox && includeCheckbox.type === 'checkbox') {
|
||||
return includeCheckbox.checked;
|
||||
}
|
||||
|
||||
console.warn(`Include checkbox not found for ID: ${includeCheckboxId}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Templates for inserting token retrieval instructions into edit header modal
|
||||
const AUTH_TOKEN_INSTRUCTIONS_SERVICE_ACCOUNT = `
|
||||
<p>To obtain a Google OAuth ID token using a service account:</p>
|
||||
<ol>
|
||||
<li>Make sure you are on the intended SERVICE account (typically contain iam.gserviceaccount.com). Verify by running the command below.
|
||||
<pre><code>gcloud auth list</code></pre>
|
||||
</li>
|
||||
<li>Print an id token with the audience set to your clientID defined in tools file:
|
||||
<pre><code>gcloud auth print-identity-token --audiences=YOUR_CLIENT_ID_HERE</code></pre>
|
||||
</li>
|
||||
<li>Copy the output token.</li>
|
||||
<li>Paste this token into the header in JSON editor. The key should be the name of your auth service followed by <code>_token</code>
|
||||
<pre><code>{
|
||||
"Content-Type": "application/json",
|
||||
"my-google-auth_token": "YOUR_ID_TOKEN_HERE"
|
||||
} </code></pre>
|
||||
</li>
|
||||
</ol>
|
||||
<p>This token is typically short-lived.</p>`;
|
||||
|
||||
const AUTH_TOKEN_INSTRUCTIONS_STANDARD = `
|
||||
<p>To obtain a Google OAuth ID token using a standard account:</p>
|
||||
<ol>
|
||||
<li>Make sure you are on your intended standard account. Verify by running the command below.
|
||||
<pre><code>gcloud auth list</code></pre>
|
||||
</li>
|
||||
<li>Within your Cloud Console, add the following link to the "Authorized Redirect URIs".</li>
|
||||
<pre><code>https://developers.google.com/oauthplayground</code></pre>
|
||||
<li>Go to the Google OAuth Playground site: <a href="https://developers.google.com/oauthplayground/" target="_blank">https://developers.google.com/oauthplayground/</a></li>
|
||||
<li>In the top right settings menu, select "Use your own OAuth Credentials".</li>
|
||||
<li>Input your clientID (from tools file), along with the client secret from Cloud Console.</li>
|
||||
<li>Inside the Google OAuth Playground, select "Google OAuth2 API v2.</li>
|
||||
<ul>
|
||||
<li>Select "Authorize APIs".</li>
|
||||
<li>Select "Exchange Authorization codes for tokens"</li>
|
||||
<li>Copy the id_token field provided in the response.</li>
|
||||
</ul>
|
||||
<li>Paste this token into the header in JSON editor. The key should be the name of your auth service followed by <code>_token</code>
|
||||
<pre><code>{
|
||||
"Content-Type": "application/json",
|
||||
"my-google-auth_token": "YOUR_ID_TOKEN_HERE"
|
||||
} </code></pre>
|
||||
</li>
|
||||
</ol>
|
||||
<p>This token is typically short-lived.</p>`;
|
||||
32
internal/server/static/js/tools.js
Normal file
@@ -0,0 +1,32 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { loadTools } from "./loadTools.js";
|
||||
|
||||
/**
|
||||
* These functions runs after the browser finishes loading and parsing HTML structure.
|
||||
* This ensures that elements can be safely accessed.
|
||||
*/
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
const toolDisplayArea = document.getElementById('tool-display-area');
|
||||
const secondaryPanelContent = document.getElementById('secondary-panel-content');
|
||||
const DEFAULT_TOOLSET = ""; // will return all toolsets
|
||||
|
||||
if (!secondaryPanelContent || !toolDisplayArea) {
|
||||
console.error('Required DOM elements not found.');
|
||||
return;
|
||||
}
|
||||
|
||||
loadTools(secondaryPanelContent, toolDisplayArea, DEFAULT_TOOLSET);
|
||||
});
|
||||
51
internal/server/static/js/toolsets.js
Normal file
@@ -0,0 +1,51 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { loadTools } from "./loadTools.js";
|
||||
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
const searchInput = document.getElementById('toolset-search-input');
|
||||
const searchButton = document.getElementById('toolset-search-button');
|
||||
const secondNavContent = document.getElementById('secondary-panel-content');
|
||||
const toolDisplayArea = document.getElementById('tool-display-area');
|
||||
|
||||
if (!searchInput || !searchButton || !secondNavContent || !toolDisplayArea) {
|
||||
console.error('Required DOM elements not found.');
|
||||
return;
|
||||
}
|
||||
|
||||
// Event listener for search button click
|
||||
searchButton.addEventListener('click', () => {
|
||||
toolDisplayArea.innerHTML = '';
|
||||
const toolsetName = searchInput.value.trim();
|
||||
if (toolsetName) {
|
||||
loadTools(secondNavContent, toolDisplayArea, toolsetName)
|
||||
} else {
|
||||
secondNavContent.innerHTML = '<p>Please enter a toolset name to see available tools. <br><br>To view the default toolset that consists of all tools, please select the "Tools" tab.</p>';
|
||||
}
|
||||
});
|
||||
|
||||
// Event listener for Enter key in search input
|
||||
searchInput.addEventListener('keypress', (event) => {
|
||||
toolDisplayArea.innerHTML = '';
|
||||
if (event.key === 'Enter') {
|
||||
const toolsetName = searchInput.value.trim();
|
||||
if (toolsetName) {
|
||||
loadTools(secondNavContent, toolDisplayArea, toolsetName);
|
||||
} else {
|
||||
secondNavContent.innerHTML = '<p>Please enter a toolset name to see available tools. <br><br>To view the default toolset that consists of all tools, please select the "Tools" tab.</p>';
|
||||
}
|
||||
}
|
||||
});
|
||||
})
|
||||
34
internal/server/static/tools.html
Normal file
@@ -0,0 +1,34 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Tools View</title>
|
||||
<link rel="stylesheet" href="/ui/css/style.css">
|
||||
<script src="https://accounts.google.com/gsi/client" async defer></script>
|
||||
</head>
|
||||
<body>
|
||||
<div id="navbar-container" data-active-nav="/ui/tools"></div>
|
||||
|
||||
<aside class="second-nav">
|
||||
<h4>My Tools</h4>
|
||||
<div id="secondary-panel-content">
|
||||
<p>Fetching tools...</p>
|
||||
</div>
|
||||
</aside>
|
||||
|
||||
<div id="main-content-container"></div>
|
||||
|
||||
<script type="module" src="/ui/js/tools.js"></script>
|
||||
<script src="/ui/js/navbar.js"></script>
|
||||
<script src="/ui/js/mainContent.js"></script>
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
const navbarContainer = document.getElementById('navbar-container');
|
||||
const activeNav = navbarContainer.getAttribute('data-active-nav');
|
||||
renderNavbar('navbar-container', activeNav);
|
||||
renderMainContent('main-content-container', 'tool-display-area')
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
41
internal/server/static/toolsets.html
Normal file
@@ -0,0 +1,41 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Toolsets View</title>
|
||||
<link rel="stylesheet" href="/ui/css/style.css">
|
||||
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
|
||||
<script src="https://accounts.google.com/gsi/client" async defer></script>
|
||||
</head>
|
||||
<body>
|
||||
<div id="navbar-container" data-active-nav="/ui/toolsets"></div>
|
||||
|
||||
<aside class="second-nav">
|
||||
<h4>Retrieve Toolset</h4>
|
||||
<div class="search-container">
|
||||
<input type="text" id="toolset-search-input" placeholder="Enter toolset name...">
|
||||
<button id="toolset-search-button" aria-label="Retrieve Tools">
|
||||
<span class="material-icons">search</span>
|
||||
</button>
|
||||
</div>
|
||||
<div id="secondary-panel-content">
|
||||
<p>Retrieve toolset to see available tools.</p>
|
||||
</div>
|
||||
</aside>
|
||||
|
||||
<div id="main-content-container"></div>
|
||||
|
||||
<script type="module" src="/ui/js/toolsets.js"></script>
|
||||
<script src="/ui/js/navbar.js"></script>
|
||||
<script src="/ui/js/mainContent.js"></script>
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
const navbarContainer = document.getElementById('navbar-container');
|
||||
const activeNav = navbarContainer.getAttribute('data-active-nav');
|
||||
renderNavbar('navbar-container', activeNav);
|
||||
renderMainContent('main-content-container', 'tool-display-area');
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
54
internal/server/web.go
Normal file
@@ -0,0 +1,54 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"embed"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
"net/http"
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/go-chi/chi/v5/middleware"
|
||||
)
|
||||
|
||||
//go:embed all:static
|
||||
var staticContent embed.FS
|
||||
|
||||
// webRouter creates a router that represents the routes under /ui
|
||||
func webRouter() (chi.Router, error) {
|
||||
r := chi.NewRouter()
|
||||
r.Use(middleware.StripSlashes)
|
||||
|
||||
// direct routes for html pages to provide clean URLs
|
||||
r.Get("/", func(w http.ResponseWriter, r *http.Request) { serveHTML(w, r, "static/index.html") })
|
||||
r.Get("/tools", func(w http.ResponseWriter, r *http.Request) { serveHTML(w, r, "static/tools.html") })
|
||||
r.Get("/toolsets", func(w http.ResponseWriter, r *http.Request) { serveHTML(w, r, "static/toolsets.html") })
|
||||
|
||||
// handler for all other static files/assets
|
||||
staticFS, _ := fs.Sub(staticContent, "static")
|
||||
r.Handle("/*", http.StripPrefix("/ui", http.FileServer(http.FS(staticFS))))
|
||||
|
||||
return r, nil
|
||||
}
|
||||
|
||||
func serveHTML(w http.ResponseWriter, r *http.Request, filepath string) {
|
||||
file, err := staticContent.Open(filepath)
|
||||
if err != nil {
|
||||
http.Error(w, "File not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
fileBytes, err := io.ReadAll(file)
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("Error reading file: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
fileInfo, err := file.Stat()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
http.ServeContent(w, r, fileInfo.Name(), fileInfo.ModTime(), bytes.NewReader(fileBytes))
|
||||
}
|
||||
179
internal/server/web_test.go
Normal file
@@ -0,0 +1,179 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"net/url"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/go-goquery/goquery"
|
||||
)
|
||||
|
||||
// TestWebEndpoint tests the routes defined in webRouter mounted under /ui.
|
||||
func TestWebEndpoint(t *testing.T) {
|
||||
mainRouter := chi.NewRouter()
|
||||
webR, err := webRouter()
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create webRouter: %v", err)
|
||||
}
|
||||
mainRouter.Mount("/ui", webR)
|
||||
|
||||
ts := httptest.NewServer(mainRouter)
|
||||
defer ts.Close()
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
path string
|
||||
wantStatus int
|
||||
wantContentType string
|
||||
wantPageTitle string
|
||||
}{
|
||||
{
|
||||
name: "web index page",
|
||||
path: "/ui",
|
||||
wantStatus: http.StatusOK,
|
||||
wantContentType: "text/html",
|
||||
wantPageTitle: "Toolbox UI",
|
||||
},
|
||||
{
|
||||
name: "web index page with trailing slash",
|
||||
path: "/ui/",
|
||||
wantStatus: http.StatusOK,
|
||||
wantContentType: "text/html",
|
||||
wantPageTitle: "Toolbox UI",
|
||||
},
|
||||
{
|
||||
name: "web tools page",
|
||||
path: "/ui/tools",
|
||||
wantStatus: http.StatusOK,
|
||||
wantContentType: "text/html",
|
||||
wantPageTitle: "Tools View",
|
||||
},
|
||||
{
|
||||
name: "web tools page with trailing slash",
|
||||
path: "/ui/tools/",
|
||||
wantStatus: http.StatusOK,
|
||||
wantContentType: "text/html",
|
||||
wantPageTitle: "Tools View",
|
||||
},
|
||||
{
|
||||
name: "web toolsets page",
|
||||
path: "/ui/toolsets",
|
||||
wantStatus: http.StatusOK,
|
||||
wantContentType: "text/html",
|
||||
wantPageTitle: "Toolsets View",
|
||||
},
|
||||
{
|
||||
name: "web toolsets page with trailing slash",
|
||||
path: "/ui/toolsets/",
|
||||
wantStatus: http.StatusOK,
|
||||
wantContentType: "text/html",
|
||||
wantPageTitle: "Toolsets View",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
reqURL := ts.URL + tc.path
|
||||
req, err := http.NewRequest(http.MethodGet, reqURL, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create request: %v", err)
|
||||
}
|
||||
|
||||
client := ts.Client()
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to send request: %v", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != tc.wantStatus {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
t.Fatalf("Unexpected status code for %s: got %d, want %d, body: %s", tc.path, resp.StatusCode, tc.wantStatus, string(body))
|
||||
}
|
||||
|
||||
contentType := resp.Header.Get("Content-Type")
|
||||
if !strings.HasPrefix(contentType, tc.wantContentType) {
|
||||
t.Errorf("Unexpected Content-Type header for %s: got %s, want prefix %s", tc.path, contentType, tc.wantContentType)
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read response body: %v", err)
|
||||
}
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(strings.NewReader(string(body)))
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to parse HTML: %v", err)
|
||||
}
|
||||
|
||||
gotPageTitle := doc.Find("title").Text()
|
||||
if gotPageTitle != tc.wantPageTitle {
|
||||
t.Errorf("Unexpected page title for %s: got %q, want %q", tc.path, gotPageTitle, tc.wantPageTitle)
|
||||
}
|
||||
|
||||
pageURL := resp.Request.URL
|
||||
verifyLinkedResources(t, ts, pageURL, doc)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// verifyLinkedResources checks that resources linked in the HTML are served correctly.
|
||||
func verifyLinkedResources(t *testing.T, ts *httptest.Server, pageURL *url.URL, doc *goquery.Document) {
|
||||
t.Helper()
|
||||
|
||||
selectors := map[string]string{
|
||||
"stylesheet": "link[rel=stylesheet]",
|
||||
"script": "script[src]",
|
||||
}
|
||||
|
||||
attrMap := map[string]string{
|
||||
"stylesheet": "href",
|
||||
"script": "src",
|
||||
}
|
||||
|
||||
foundResource := false
|
||||
for resourceType, selector := range selectors {
|
||||
doc.Find(selector).Each(func(i int, s *goquery.Selection) {
|
||||
foundResource = true
|
||||
attrName := attrMap[resourceType]
|
||||
resourcePath, exists := s.Attr(attrName)
|
||||
if !exists || resourcePath == "" {
|
||||
t.Errorf("Resource element %s is missing attribute %s on page %s", selector, attrName, pageURL.String())
|
||||
return
|
||||
}
|
||||
|
||||
// Resolve the URL relative to the page URL
|
||||
resURL, err := url.Parse(resourcePath)
|
||||
if err != nil {
|
||||
t.Errorf("Failed to parse resource path %q on page %s: %v", resourcePath, pageURL.String(), err)
|
||||
return
|
||||
}
|
||||
absoluteResourceURL := pageURL.ResolveReference(resURL)
|
||||
|
||||
// Skip external hosts
|
||||
if absoluteResourceURL.Host != pageURL.Host {
|
||||
t.Logf("Skipping resource on different host: %s", absoluteResourceURL.String())
|
||||
return
|
||||
}
|
||||
|
||||
resp, err := ts.Client().Get(absoluteResourceURL.String())
|
||||
if err != nil {
|
||||
t.Errorf("Failed to GET %s resource %s: %v", resourceType, absoluteResourceURL.String(), err)
|
||||
return
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
t.Errorf("Resource %s %s: expected status OK (200), but got %d", resourceType, absoluteResourceURL.String(), resp.StatusCode)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if !foundResource {
|
||||
t.Logf("No stylesheet or script resources found to check on page %s", pageURL.String())
|
||||
}
|
||||
}
|
||||
122
internal/sources/kuzu/kuzu.go
Normal file
@@ -0,0 +1,122 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package kuzu
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/kuzudb/go-kuzu"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
var SourceKind string = "kuzu"
|
||||
|
||||
func init() {
|
||||
if !sources.Register(SourceKind, newConfig) {
|
||||
panic(fmt.Sprintf("source kind %q already registered", SourceKind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources.SourceConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required" `
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Database string `yaml:"database" default:":memory:"`
|
||||
BufferPoolSize uint64 `yaml:"bufferPoolSize"`
|
||||
MaxNumThreads uint64 `yaml:"maxNumThreads"`
|
||||
EnableCompression bool `yaml:"enableCompression"`
|
||||
ReadOnly bool `yaml:"readOnly"`
|
||||
MaxDbSize uint64 `yaml:"maxDbSize"`
|
||||
}
|
||||
|
||||
func (cfg Config) SourceConfigKind() string {
|
||||
return SourceKind
|
||||
}
|
||||
|
||||
func (cfg Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
|
||||
conn, err := initKuzuConnection(ctx, tracer, cfg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to open a database connection: %w", err)
|
||||
}
|
||||
|
||||
source := &Source{
|
||||
Name: cfg.Name,
|
||||
Kind: SourceKind,
|
||||
Connection: conn,
|
||||
}
|
||||
return source, nil
|
||||
}
|
||||
|
||||
var _ sources.SourceConfig = Config{}
|
||||
|
||||
type Source struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
Connection *kuzu.Connection
|
||||
}
|
||||
|
||||
// SourceKind implements sources.Source.
|
||||
func (s *Source) SourceKind() string {
|
||||
return SourceKind
|
||||
}
|
||||
|
||||
func (s *Source) KuzuDB() *kuzu.Connection {
|
||||
return s.Connection
|
||||
}
|
||||
|
||||
var _ sources.Source = &Source{}
|
||||
|
||||
func initKuzuConnection(ctx context.Context, tracer trace.Tracer, config Config) (*kuzu.Connection, error) {
|
||||
//nolint:all // Reassigned ctx
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, config.Name)
|
||||
defer span.End()
|
||||
systemConfig := kuzu.DefaultSystemConfig()
|
||||
if config.BufferPoolSize != 0 {
|
||||
systemConfig.BufferPoolSize = config.BufferPoolSize
|
||||
}
|
||||
if config.EnableCompression {
|
||||
systemConfig.EnableCompression = config.EnableCompression
|
||||
}
|
||||
if config.MaxDbSize != 0 {
|
||||
systemConfig.MaxDbSize = config.MaxDbSize
|
||||
}
|
||||
if config.ReadOnly {
|
||||
systemConfig.ReadOnly = config.ReadOnly
|
||||
}
|
||||
if config.MaxNumThreads != 0 {
|
||||
systemConfig.MaxNumThreads = config.MaxNumThreads
|
||||
}
|
||||
|
||||
db, err := kuzu.OpenDatabase(config.Database, systemConfig)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to connect to database: %w", err)
|
||||
}
|
||||
|
||||
conn, err := kuzu.OpenConnection(db)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to open a database connection: %w", err)
|
||||
}
|
||||
return conn, nil
|
||||
}
|
||||
@@ -12,22 +12,20 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package duckdb_test
|
||||
package kuzu_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources/duckdb"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources/kuzu"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
)
|
||||
|
||||
func TestParserFromYamlDuckDb(t *testing.T) {
|
||||
config := make(map[string]string)
|
||||
config["access_mode"] = "READ_ONLY"
|
||||
config["threads"] = "4"
|
||||
func TestParseFromYamlKuzu(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
@@ -36,32 +34,39 @@ func TestParserFromYamlDuckDb(t *testing.T) {
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
sources:
|
||||
my-duckdb:
|
||||
kind: duckdb
|
||||
`,
|
||||
sources:
|
||||
my-kuzu-db:
|
||||
kind: kuzu
|
||||
database: /path/to/database.db
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-duckdb": duckdb.Config{
|
||||
Name: "my-duckdb",
|
||||
Kind: duckdb.SourceKind,
|
||||
"my-kuzu-db": kuzu.Config{
|
||||
Name: "my-kuzu-db",
|
||||
Kind: kuzu.SourceKind,
|
||||
Database: "/path/to/database.db",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with custom configuration",
|
||||
desc: "with configuration",
|
||||
in: `
|
||||
sources:
|
||||
my-duckdb:
|
||||
kind: duckdb
|
||||
configuration:
|
||||
access_mode: READ_ONLY
|
||||
threads: 4
|
||||
`,
|
||||
my-kuzu-db:
|
||||
kind: kuzu
|
||||
database: /path/to/database.db
|
||||
maxNumThreads: 10
|
||||
readOnly: true
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-duckdb": duckdb.Config{
|
||||
Name: "my-duckdb",
|
||||
Kind: duckdb.SourceKind,
|
||||
Configuration: config,
|
||||
"my-kuzu-db": kuzu.Config{
|
||||
Name: "my-kuzu-db",
|
||||
Kind: kuzu.SourceKind,
|
||||
Database: "/path/to/database.db",
|
||||
MaxNumThreads: 10,
|
||||
ReadOnly: true,
|
||||
MaxDbSize: 0,
|
||||
BufferPoolSize: 0,
|
||||
EnableCompression: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -12,22 +12,25 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package duckdb
|
||||
package tidb
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strings"
|
||||
"regexp"
|
||||
|
||||
_ "github.com/go-sql-driver/mysql"
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
_ "github.com/marcboeker/go-duckdb/v2"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
const SourceKind string = "duckdb"
|
||||
const SourceKind string = "tidb"
|
||||
const TiDBCloudHostPattern string = `gateway\d{2}\.(.+)\.(prod|dev|staging)\.(.+)\.tidbcloud\.com`
|
||||
|
||||
// validate interface
|
||||
var _ sources.SourceConfig = Config{}
|
||||
|
||||
func init() {
|
||||
if !sources.Register(SourceKind, newConfig) {
|
||||
@@ -40,32 +43,24 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// If the host is a TiDB Cloud instance, force to use SSL
|
||||
if IsTiDBCloudHost(actual.Host) {
|
||||
actual.UseSSL = true
|
||||
}
|
||||
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type Source struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
Db *sql.DB
|
||||
}
|
||||
|
||||
// SourceKind implements sources.Source.
|
||||
func (s *Source) SourceKind() string {
|
||||
return SourceKind
|
||||
}
|
||||
|
||||
func (s *Source) DuckDb() *sql.DB {
|
||||
return s.Db
|
||||
}
|
||||
|
||||
// validate Source
|
||||
var _ sources.Source = &Source{}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
DatabaseFile string `yaml:"dbFilePath,omitempty"`
|
||||
Configuration map[string]string `yaml:"configuration,omitempty"`
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Host string `yaml:"host" validate:"required"`
|
||||
Port string `yaml:"port" validate:"required"`
|
||||
User string `yaml:"user" validate:"required"`
|
||||
Password string `yaml:"password" validate:"required"`
|
||||
Database string `yaml:"database" validate:"required"`
|
||||
UseSSL bool `yaml:"ssl"`
|
||||
}
|
||||
|
||||
func (r Config) SourceConfigKind() string {
|
||||
@@ -73,56 +68,61 @@ func (r Config) SourceConfigKind() string {
|
||||
}
|
||||
|
||||
func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
|
||||
db, err := initDuckDbConnection(ctx, tracer, r.Name, r.DatabaseFile, r.Configuration)
|
||||
pool, err := initTiDBConnectionPool(ctx, tracer, r.Name, r.Host, r.Port, r.User, r.Password, r.Database, r.UseSSL)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to create db connection: %w", err)
|
||||
return nil, fmt.Errorf("unable to create pool: %w", err)
|
||||
}
|
||||
|
||||
err = db.PingContext(context.Background())
|
||||
err = pool.PingContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to connect sucessfully: %w", err)
|
||||
return nil, fmt.Errorf("unable to connect successfully: %w", err)
|
||||
}
|
||||
|
||||
s := &Source{
|
||||
Name: r.Name,
|
||||
Kind: r.Kind,
|
||||
Db: db,
|
||||
Kind: SourceKind,
|
||||
Pool: pool,
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ sources.SourceConfig = Config{}
|
||||
var _ sources.Source = &Source{}
|
||||
|
||||
func initDuckDbConnection(ctx context.Context, tracer trace.Tracer, name string, dbFilePath string, duckdbConfiguration map[string]string) (*sql.DB, error) {
|
||||
type Source struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
Pool *sql.DB
|
||||
}
|
||||
|
||||
func (s *Source) SourceKind() string {
|
||||
return SourceKind
|
||||
}
|
||||
|
||||
func (s *Source) TiDBPool() *sql.DB {
|
||||
return s.Pool
|
||||
}
|
||||
|
||||
func IsTiDBCloudHost(host string) bool {
|
||||
pattern := `gateway\d{2}\.(.+)\.(prod|dev|staging)\.(.+)\.tidbcloud\.com`
|
||||
match, err := regexp.MatchString(pattern, host)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return match
|
||||
}
|
||||
|
||||
func initTiDBConnectionPool(ctx context.Context, tracer trace.Tracer, name, host, port, user, pass, dbname string, useSSL bool) (*sql.DB, error) {
|
||||
//nolint:all // Reassigned ctx
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
defer span.End()
|
||||
|
||||
var configStr string = getDuckDbConfiguration(dbFilePath, duckdbConfiguration)
|
||||
// Configure the driver to connect to the database
|
||||
dsn := fmt.Sprintf("%s:%s@tcp(%s:%s)/%s?parseTime=true&charset=utf8mb4&tls=%t", user, pass, host, port, dbname, useSSL)
|
||||
|
||||
//Open database connection
|
||||
db, err := sql.Open("duckdb", configStr)
|
||||
// Interact with the driver directly as you normally would
|
||||
pool, err := sql.Open("mysql", dsn)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to open duckdb connection: %w", err)
|
||||
return nil, fmt.Errorf("sql.Open: %w", err)
|
||||
}
|
||||
return db, nil
|
||||
}
|
||||
|
||||
func getDuckDbConfiguration(dbFilePath string, duckdbConfiguration map[string]string) string {
|
||||
if len(duckdbConfiguration) == 0 {
|
||||
return dbFilePath
|
||||
}
|
||||
|
||||
params := url.Values{}
|
||||
for key, value := range duckdbConfiguration {
|
||||
params.Set(key, value)
|
||||
}
|
||||
|
||||
var configStr strings.Builder
|
||||
configStr.WriteString(dbFilePath)
|
||||
configStr.WriteString("?")
|
||||
configStr.WriteString(params.Encode())
|
||||
|
||||
return configStr.String()
|
||||
return pool, nil
|
||||
}
|
||||
258
internal/sources/tidb/tidb_test.go
Normal file
@@ -0,0 +1,258 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package tidb_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources/tidb"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
)
|
||||
|
||||
func TestParseFromYamlTiDB(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.SourceConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
sources:
|
||||
my-tidb-instance:
|
||||
kind: tidb
|
||||
host: 0.0.0.0
|
||||
port: my-port
|
||||
database: my_db
|
||||
user: my_user
|
||||
password: my_pass
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-tidb-instance": tidb.Config{
|
||||
Name: "my-tidb-instance",
|
||||
Kind: tidb.SourceKind,
|
||||
Host: "0.0.0.0",
|
||||
Port: "my-port",
|
||||
Database: "my_db",
|
||||
User: "my_user",
|
||||
Password: "my_pass",
|
||||
UseSSL: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with SSL enabled",
|
||||
in: `
|
||||
sources:
|
||||
my-tidb-cloud:
|
||||
kind: tidb
|
||||
host: gateway01.us-west-2.prod.aws.tidbcloud.com
|
||||
port: 4000
|
||||
database: test_db
|
||||
user: cloud_user
|
||||
password: cloud_pass
|
||||
ssl: true
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-tidb-cloud": tidb.Config{
|
||||
Name: "my-tidb-cloud",
|
||||
Kind: tidb.SourceKind,
|
||||
Host: "gateway01.us-west-2.prod.aws.tidbcloud.com",
|
||||
Port: "4000",
|
||||
Database: "test_db",
|
||||
User: "cloud_user",
|
||||
Password: "cloud_pass",
|
||||
UseSSL: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "Change SSL enabled due to TiDB Cloud host",
|
||||
in: `
|
||||
sources:
|
||||
my-tidb-cloud:
|
||||
kind: tidb
|
||||
host: gateway01.us-west-2.prod.aws.tidbcloud.com
|
||||
port: 4000
|
||||
database: test_db
|
||||
user: cloud_user
|
||||
password: cloud_pass
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-tidb-cloud": tidb.Config{
|
||||
Name: "my-tidb-cloud",
|
||||
Kind: tidb.SourceKind,
|
||||
Host: "gateway01.us-west-2.prod.aws.tidbcloud.com",
|
||||
Port: "4000",
|
||||
Database: "test_db",
|
||||
User: "cloud_user",
|
||||
Password: "cloud_pass",
|
||||
UseSSL: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Sources server.SourceConfigs `yaml:"sources"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if !cmp.Equal(tc.want, got.Sources) {
|
||||
t.Fatalf("incorrect parse: want %v, got %v", tc.want, got.Sources)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestFailParseFromYaml(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
err string
|
||||
}{
|
||||
{
|
||||
desc: "extra field",
|
||||
in: `
|
||||
sources:
|
||||
my-tidb-instance:
|
||||
kind: tidb
|
||||
host: 0.0.0.0
|
||||
port: my-port
|
||||
database: my_db
|
||||
user: my_user
|
||||
password: my_pass
|
||||
ssl: false
|
||||
foo: bar
|
||||
`,
|
||||
err: "unable to parse source \"my-tidb-instance\" as \"tidb\": [2:1] unknown field \"foo\"\n 1 | database: my_db\n> 2 | foo: bar\n ^\n 3 | host: 0.0.0.0\n 4 | kind: tidb\n 5 | password: my_pass\n 6 | ",
|
||||
},
|
||||
{
|
||||
desc: "missing required field",
|
||||
in: `
|
||||
sources:
|
||||
my-tidb-instance:
|
||||
kind: tidb
|
||||
port: my-port
|
||||
database: my_db
|
||||
user: my_user
|
||||
password: my_pass
|
||||
ssl: false
|
||||
`,
|
||||
err: "unable to parse source \"my-tidb-instance\" as \"tidb\": Key: 'Config.Host' Error:Field validation for 'Host' failed on the 'required' tag",
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Sources server.SourceConfigs `yaml:"sources"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
|
||||
if err == nil {
|
||||
t.Fatalf("expect parsing to fail")
|
||||
}
|
||||
errStr := err.Error()
|
||||
if errStr != tc.err {
|
||||
t.Fatalf("unexpected error: got %q, want %q", errStr, tc.err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsTiDBCloudHost(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
host string
|
||||
want bool
|
||||
}{
|
||||
{
|
||||
desc: "valid TiDB Cloud host - ap-southeast-1",
|
||||
host: "gateway01.ap-southeast-1.prod.aws.tidbcloud.com",
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
desc: "invalid TiDB Cloud host - wrong domain",
|
||||
host: "gateway01.ap-southeast-1.prod.aws.tdbcloud.com",
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
desc: "local IP address",
|
||||
host: "127.0.0.1",
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
desc: "valid TiDB Cloud host - us-west-2",
|
||||
host: "gateway01.us-west-2.prod.aws.tidbcloud.com",
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
desc: "valid TiDB Cloud host - dev environment",
|
||||
host: "gateway02.eu-west-1.dev.aws.tidbcloud.com",
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
desc: "valid TiDB Cloud host - staging environment",
|
||||
host: "gateway03.us-east-1.staging.aws.tidbcloud.com",
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
desc: "invalid - wrong gateway format",
|
||||
host: "gateway1.us-west-2.prod.aws.tidbcloud.com",
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
desc: "invalid - missing environment",
|
||||
host: "gateway01.us-west-2.aws.tidbcloud.com",
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
desc: "invalid - wrong subdomain",
|
||||
host: "gateway01.us-west-2.prod.aws.tidbcloud.org",
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
desc: "invalid - localhost",
|
||||
host: "localhost",
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
desc: "invalid - private IP",
|
||||
host: "192.168.1.1",
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
desc: "invalid - empty string",
|
||||
host: "",
|
||||
want: false,
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := tidb.IsTiDBCloudHost(tc.host)
|
||||
if got != tc.want {
|
||||
t.Fatalf("isTiDBCloudHost(%q) = %v, want %v", tc.host, got, tc.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -16,6 +16,7 @@ package bigqueryexecutesql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
bigqueryapi "cloud.google.com/go/bigquery"
|
||||
@@ -23,6 +24,7 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
bigqueryds "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
bigqueryrestapi "google.golang.org/api/bigquery/v2"
|
||||
"google.golang.org/api/iterator"
|
||||
)
|
||||
@@ -82,7 +84,13 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
}
|
||||
|
||||
sqlParameter := tools.NewStringParameter("sql", "The sql to execute.")
|
||||
parameters := tools.Parameters{sqlParameter}
|
||||
dryRunParameter := tools.NewBooleanParameterWithDefault(
|
||||
"dry_run",
|
||||
false,
|
||||
"If set to true, the query will be validated and information about the execution "+
|
||||
"will be returned without running the query. Defaults to false.",
|
||||
)
|
||||
parameters := tools.Parameters{sqlParameter, dryRunParameter}
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
@@ -119,10 +127,14 @@ type Tool struct {
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
sliceParams := params.AsSlice()
|
||||
sql, ok := sliceParams[0].(string)
|
||||
paramsMap := params.AsMap()
|
||||
sql, ok := paramsMap["sql"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unable to get cast %s", sliceParams[0])
|
||||
return nil, fmt.Errorf("unable to cast sql parameter %s", paramsMap["sql"])
|
||||
}
|
||||
dryRun, ok := paramsMap["dry_run"].(bool)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unable to cast dry_run parameter %s", paramsMap["dry_run"])
|
||||
}
|
||||
|
||||
dryRunJob, err := dryRunQuery(ctx, t.RestService, t.Client.Project(), t.Client.Location, sql)
|
||||
@@ -130,29 +142,29 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error)
|
||||
return nil, fmt.Errorf("query validation failed during dry run: %w", err)
|
||||
}
|
||||
|
||||
if dryRun {
|
||||
if dryRunJob != nil {
|
||||
jobJSON, err := json.MarshalIndent(dryRunJob, "", " ")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal dry run job to JSON: %w", err)
|
||||
}
|
||||
return string(jobJSON), nil
|
||||
}
|
||||
// This case should not be reached, but as a fallback, we return a message.
|
||||
return "Dry run was requested, but no job information was returned.", nil
|
||||
}
|
||||
|
||||
statementType := dryRunJob.Statistics.Query.StatementType
|
||||
// JobStatistics.QueryStatistics.StatementType
|
||||
query := t.Client.Query(sql)
|
||||
query.Location = t.Client.Location
|
||||
|
||||
// This block handles Data Manipulation Language (DML) and Data Definition Language (DDL) statements.
|
||||
// These statements (e.g., INSERT, UPDATE, CREATE TABLE) do not return a row set.
|
||||
// Instead, we execute them as a job, wait for completion, and return a success
|
||||
// message, including the number of affected rows for DML operations.
|
||||
if statementType != "SELECT" {
|
||||
job, err := query.Run(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to start DML/DDL job: %w", err)
|
||||
}
|
||||
status, err := job.Wait(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to wait for DML/DDL job to complete: %w", err)
|
||||
}
|
||||
if err := status.Err(); err != nil {
|
||||
return nil, fmt.Errorf("DML/DDL job failed with error: %w", err)
|
||||
}
|
||||
return "Operation completed successfully.", nil
|
||||
// Log the query executed for debugging.
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error getting logger: %s", err)
|
||||
}
|
||||
logger.DebugContext(ctx, "executing `%s` tool query: %s", kind, sql)
|
||||
|
||||
// This block handles SELECT statements, which return a row set.
|
||||
// We iterate through the results, convert each row into a map of
|
||||
@@ -177,10 +189,21 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error)
|
||||
}
|
||||
out = append(out, vMap)
|
||||
}
|
||||
if out == nil {
|
||||
// If the query returned any rows, return them directly.
|
||||
if len(out) > 0 {
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// This handles the standard case for a SELECT query that successfully
|
||||
// executes but returns zero rows.
|
||||
if statementType == "SELECT" {
|
||||
return "The query returned 0 rows.", nil
|
||||
}
|
||||
return out, nil
|
||||
// This is the fallback for a successful query that doesn't return content.
|
||||
// In most cases, this will be for DML/DDL statements like INSERT, UPDATE, CREATE, etc.
|
||||
// However, it is also possible that this was a query that was expected to return rows
|
||||
// but returned none, a case that we cannot distinguish here.
|
||||
return "Query executed successfully and returned no content.", nil
|
||||
}
|
||||
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
|
||||
@@ -17,6 +17,7 @@ package bigquerysql
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
bigqueryapi "cloud.google.com/go/bigquery"
|
||||
@@ -24,6 +25,7 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
bigqueryds "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
bigqueryrestapi "google.golang.org/api/bigquery/v2"
|
||||
"google.golang.org/api/iterator"
|
||||
)
|
||||
|
||||
@@ -45,6 +47,7 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.T
|
||||
|
||||
type compatibleSource interface {
|
||||
BigQueryClient() *bigqueryapi.Client
|
||||
BigQueryRestService() *bigqueryrestapi.Service
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
@@ -101,6 +104,7 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
Statement: cfg.Statement,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
Client: s.BigQueryClient(),
|
||||
RestService: s.BigQueryRestService(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: paramManifest, AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
@@ -117,15 +121,17 @@ type Tool struct {
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
TemplateParameters tools.Parameters `yaml:"templateParameters"`
|
||||
AllParams tools.Parameters `yaml:"allParams"`
|
||||
|
||||
Client *bigqueryapi.Client
|
||||
Statement string
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
Statement string
|
||||
Client *bigqueryapi.Client
|
||||
RestService *bigqueryrestapi.Service
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
namedArgs := make([]bigqueryapi.QueryParameter, 0, len(params))
|
||||
highLevelParams := make([]bigqueryapi.QueryParameter, 0, len(t.Parameters))
|
||||
lowLevelParams := make([]*bigqueryrestapi.QueryParameter, 0, len(t.Parameters))
|
||||
|
||||
paramsMap := params.AsMap()
|
||||
newStatement, err := tools.ResolveTemplateParams(t.TemplateParameters, t.Statement, paramsMap)
|
||||
if err != nil {
|
||||
@@ -136,14 +142,11 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error)
|
||||
name := p.GetName()
|
||||
value := paramsMap[name]
|
||||
|
||||
// BigQuery's QueryParameter only accepts typed slices as input
|
||||
// This checks if the param is an array.
|
||||
// If yes, convert []any to typed slice (e.g []string, []int)
|
||||
switch arrayParam := p.(type) {
|
||||
case *tools.ArrayParameter:
|
||||
// This block for converting []any to typed slices is still necessary and correct.
|
||||
if arrayParam, ok := p.(*tools.ArrayParameter); ok {
|
||||
arrayParamValue, ok := value.([]any)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unable to convert parameter `%s` to []any %w", name, err)
|
||||
return nil, fmt.Errorf("unable to convert parameter `%s` to []any", name)
|
||||
}
|
||||
itemType := arrayParam.GetItems().GetType()
|
||||
var err error
|
||||
@@ -153,22 +156,69 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error)
|
||||
}
|
||||
}
|
||||
|
||||
if strings.Contains(t.Statement, "@"+name) {
|
||||
namedArgs = append(namedArgs, bigqueryapi.QueryParameter{
|
||||
Name: name,
|
||||
Value: value,
|
||||
})
|
||||
} else {
|
||||
namedArgs = append(namedArgs, bigqueryapi.QueryParameter{
|
||||
Value: value,
|
||||
})
|
||||
// Determine if the parameter is named or positional for the high-level client.
|
||||
var paramNameForHighLevel string
|
||||
if strings.Contains(newStatement, "@"+name) {
|
||||
paramNameForHighLevel = name
|
||||
}
|
||||
|
||||
// 1. Create the high-level parameter for the final query execution.
|
||||
highLevelParams = append(highLevelParams, bigqueryapi.QueryParameter{
|
||||
Name: paramNameForHighLevel,
|
||||
Value: value,
|
||||
})
|
||||
|
||||
// 2. Create the low-level parameter for the dry run, using the defined type from `p`.
|
||||
lowLevelParam := &bigqueryrestapi.QueryParameter{
|
||||
Name: paramNameForHighLevel,
|
||||
ParameterType: &bigqueryrestapi.QueryParameterType{},
|
||||
ParameterValue: &bigqueryrestapi.QueryParameterValue{},
|
||||
}
|
||||
|
||||
if arrayParam, ok := p.(*tools.ArrayParameter); ok {
|
||||
// Handle array types based on their defined item type.
|
||||
lowLevelParam.ParameterType.Type = "ARRAY"
|
||||
itemType, err := BQTypeStringFromToolType(arrayParam.GetItems().GetType())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
lowLevelParam.ParameterType.ArrayType = &bigqueryrestapi.QueryParameterType{Type: itemType}
|
||||
|
||||
// Build the array values.
|
||||
sliceVal := reflect.ValueOf(value)
|
||||
arrayValues := make([]*bigqueryrestapi.QueryParameterValue, sliceVal.Len())
|
||||
for i := 0; i < sliceVal.Len(); i++ {
|
||||
arrayValues[i] = &bigqueryrestapi.QueryParameterValue{
|
||||
Value: fmt.Sprintf("%v", sliceVal.Index(i).Interface()),
|
||||
}
|
||||
}
|
||||
lowLevelParam.ParameterValue.ArrayValues = arrayValues
|
||||
} else {
|
||||
// Handle scalar types based on their defined type.
|
||||
bqType, err := BQTypeStringFromToolType(p.GetType())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
lowLevelParam.ParameterType.Type = bqType
|
||||
lowLevelParam.ParameterValue.Value = fmt.Sprintf("%v", value)
|
||||
}
|
||||
lowLevelParams = append(lowLevelParams, lowLevelParam)
|
||||
}
|
||||
|
||||
query := t.Client.Query(newStatement)
|
||||
query.Parameters = namedArgs
|
||||
query.Parameters = highLevelParams
|
||||
query.Location = t.Client.Location
|
||||
|
||||
dryRunJob, err := dryRunQuery(ctx, t.RestService, t.Client.Project(), t.Client.Location, newStatement, lowLevelParams, query.ConnectionProperties)
|
||||
if err != nil {
|
||||
// This is a fallback check in case the switch logic was bypassed.
|
||||
return nil, fmt.Errorf("final query validation failed: %w", err)
|
||||
}
|
||||
statementType := dryRunJob.Statistics.Query.StatementType
|
||||
|
||||
// This block handles SELECT statements, which return a row set.
|
||||
// We iterate through the results, convert each row into a map of
|
||||
// column names to values, and return the collection of rows.
|
||||
it, err := query.Read(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to execute query: %w", err)
|
||||
@@ -177,7 +227,7 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error)
|
||||
var out []any
|
||||
for {
|
||||
var row map[string]bigqueryapi.Value
|
||||
err := it.Next(&row)
|
||||
err = it.Next(&row)
|
||||
if err == iterator.Done {
|
||||
break
|
||||
}
|
||||
@@ -190,8 +240,21 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error)
|
||||
}
|
||||
out = append(out, vMap)
|
||||
}
|
||||
// If the query returned any rows, return them directly.
|
||||
if len(out) > 0 {
|
||||
return out, nil
|
||||
}
|
||||
|
||||
return out, nil
|
||||
// This handles the standard case for a SELECT query that successfully
|
||||
// executes but returns zero rows.
|
||||
if statementType == "SELECT" {
|
||||
return "The query returned 0 rows.", nil
|
||||
}
|
||||
// This is the fallback for a successful query that doesn't return content.
|
||||
// In most cases, this will be for DML/DDL statements like INSERT, UPDATE, CREATE, etc.
|
||||
// However, it is also possible that this was a query that was expected to return rows
|
||||
// but returned none, a case that we cannot distinguish here.
|
||||
return "Query executed successfully and returned no content.", nil
|
||||
}
|
||||
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
@@ -209,3 +272,58 @@ func (t Tool) McpManifest() tools.McpManifest {
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
func BQTypeStringFromToolType(toolType string) (string, error) {
|
||||
switch toolType {
|
||||
case "string":
|
||||
return "STRING", nil
|
||||
case "integer":
|
||||
return "INT64", nil
|
||||
case "float":
|
||||
return "FLOAT64", nil
|
||||
case "boolean":
|
||||
return "BOOL", nil
|
||||
// Note: 'array' is handled separately as it has a nested item type.
|
||||
default:
|
||||
return "", fmt.Errorf("unsupported tool parameter type for BigQuery: %s", toolType)
|
||||
}
|
||||
}
|
||||
|
||||
func dryRunQuery(
|
||||
ctx context.Context,
|
||||
restService *bigqueryrestapi.Service,
|
||||
projectID string,
|
||||
location string,
|
||||
sql string,
|
||||
params []*bigqueryrestapi.QueryParameter,
|
||||
connProps []*bigqueryapi.ConnectionProperty,
|
||||
) (*bigqueryrestapi.Job, error) {
|
||||
useLegacySql := false
|
||||
|
||||
restConnProps := make([]*bigqueryrestapi.ConnectionProperty, len(connProps))
|
||||
for i, prop := range connProps {
|
||||
restConnProps[i] = &bigqueryrestapi.ConnectionProperty{Key: prop.Key, Value: prop.Value}
|
||||
}
|
||||
|
||||
jobToInsert := &bigqueryrestapi.Job{
|
||||
JobReference: &bigqueryrestapi.JobReference{
|
||||
ProjectId: projectID,
|
||||
Location: location,
|
||||
},
|
||||
Configuration: &bigqueryrestapi.JobConfiguration{
|
||||
DryRun: true,
|
||||
Query: &bigqueryrestapi.JobConfigurationQuery{
|
||||
Query: sql,
|
||||
UseLegacySql: &useLegacySql,
|
||||
ConnectionProperties: restConnProps,
|
||||
QueryParameters: params,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
insertResponse, err := restService.Jobs.Insert(projectID, jobToInsert).Context(ctx).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to insert dry run job: %w", err)
|
||||
}
|
||||
return insertResponse, nil
|
||||
}
|
||||
|
||||
@@ -0,0 +1,183 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package dataplexlookupentry
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
dataplexapi "cloud.google.com/go/dataplex/apiv1"
|
||||
dataplexpb "cloud.google.com/go/dataplex/apiv1/dataplexpb"
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
dataplexds "github.com/googleapis/genai-toolbox/internal/sources/dataplex"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
)
|
||||
|
||||
const kind string = "dataplex-lookup-entry"
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("tool kind %q already registered", kind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type compatibleSource interface {
|
||||
CatalogClient() *dataplexapi.CatalogClient
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
var _ compatibleSource = &dataplexds.Source{}
|
||||
|
||||
var compatibleSources = [...]string{dataplexds.SourceKind}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Source string `yaml:"source" validate:"required"`
|
||||
Description string `yaml:"description"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.ToolConfig = Config{}
|
||||
|
||||
func (cfg Config) ToolConfigKind() string {
|
||||
return kind
|
||||
}
|
||||
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
// Initialize the search configuration with the provided sources
|
||||
rawS, ok := srcs[cfg.Source]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
|
||||
}
|
||||
// verify the source is compatible
|
||||
s, ok := rawS.(compatibleSource)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
viewDesc := `
|
||||
## Argument: view
|
||||
|
||||
**Type:** Integer
|
||||
|
||||
**Description:** Specifies the parts of the entry and its aspects to return.
|
||||
|
||||
**Possible Values:**
|
||||
|
||||
* 1 (BASIC): Returns entry without aspects.
|
||||
* 2 (FULL): Return all required aspects and the keys of non-required aspects. (Default)
|
||||
* 3 (CUSTOM): Return the entry and aspects requested in aspect_types field (at most 100 aspects). Always use this view when aspect_types is not empty.
|
||||
* 4 (ALL): Return the entry and both required and optional aspects (at most 100 aspects)
|
||||
`
|
||||
|
||||
name := tools.NewStringParameter("name", "The project to which the request should be attributed in the following form: projects/{project}/locations/{location}.")
|
||||
view := tools.NewIntParameterWithDefault("view", 2, viewDesc)
|
||||
aspectTypes := tools.NewArrayParameterWithDefault("aspectTypes", []any{}, "Limits the aspects returned to the provided aspect types. It only works when used together with CUSTOM view.", tools.NewStringParameter("aspectType", "The types of aspects to be included in the response in the format `projects/{project}/locations/{location}/aspectTypes/{aspectType}`."))
|
||||
entry := tools.NewStringParameter("entry", "The resource name of the Entry in the following form: projects/{project}/locations/{location}/entryGroups/{entryGroup}/entries/{entry}.")
|
||||
parameters := tools.Parameters{name, view, aspectTypes, entry}
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
Description: cfg.Description,
|
||||
InputSchema: parameters.McpManifest(),
|
||||
}
|
||||
|
||||
t := &Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
CatalogClient: s.CatalogClient(),
|
||||
manifest: tools.Manifest{
|
||||
Description: cfg.Description,
|
||||
Parameters: parameters.Manifest(),
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
|
||||
type Tool struct {
|
||||
Name string
|
||||
Kind string
|
||||
Parameters tools.Parameters
|
||||
AuthRequired []string
|
||||
CatalogClient *dataplexapi.CatalogClient
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t *Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
func (t *Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
paramsMap := params.AsMap()
|
||||
viewMap := map[int]dataplexpb.EntryView{
|
||||
1: dataplexpb.EntryView_BASIC,
|
||||
2: dataplexpb.EntryView_FULL,
|
||||
3: dataplexpb.EntryView_CUSTOM,
|
||||
4: dataplexpb.EntryView_ALL,
|
||||
}
|
||||
name, _ := paramsMap["name"].(string)
|
||||
entry, _ := paramsMap["entry"].(string)
|
||||
view, _ := paramsMap["view"].(int)
|
||||
aspectTypeSlice, err := tools.ConvertAnySliceToTyped(paramsMap["aspectTypes"].([]any), "string")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("can't convert aspectTypes to array of strings: %s", err)
|
||||
}
|
||||
aspectTypes := aspectTypeSlice.([]string)
|
||||
|
||||
req := &dataplexpb.LookupEntryRequest{
|
||||
Name: name,
|
||||
View: viewMap[view],
|
||||
AspectTypes: aspectTypes,
|
||||
Entry: entry,
|
||||
}
|
||||
|
||||
result, err := t.CatalogClient.LookupEntry(ctx, req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (t *Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
// Parse parameters from the provided data
|
||||
return tools.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t *Tool) Manifest() tools.Manifest {
|
||||
// Returns the tool manifest
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
func (t *Tool) McpManifest() tools.McpManifest {
|
||||
// Returns the tool MCP manifest
|
||||
return t.mcpManifest
|
||||
}
|
||||
@@ -0,0 +1,117 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package dataplexlookupentry_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexlookupentry"
|
||||
)
|
||||
|
||||
func TestParseFromYamlDataplexLookupEntry(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.ToolConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
tools:
|
||||
example_tool:
|
||||
kind: dataplex-lookup-entry
|
||||
source: my-instance
|
||||
description: some description
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"example_tool": dataplexlookupentry.Config{
|
||||
Name: "example_tool",
|
||||
Kind: "dataplex-lookup-entry",
|
||||
Source: "my-instance",
|
||||
Description: "some description",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "advanced example",
|
||||
in: `
|
||||
tools:
|
||||
example_tool:
|
||||
kind: dataplex-lookup-entry
|
||||
source: my-instance
|
||||
description: some description
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: some name description
|
||||
- name: view
|
||||
type: string
|
||||
description: some view description
|
||||
- name: aspectTypes
|
||||
type: array
|
||||
description: some aspect types description
|
||||
default: []
|
||||
items:
|
||||
name: aspectType
|
||||
type: string
|
||||
description: some aspect type description
|
||||
- name: entry
|
||||
type: string
|
||||
description: some entry description
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"example_tool": dataplexlookupentry.Config{
|
||||
Name: "example_tool",
|
||||
Kind: "dataplex-lookup-entry",
|
||||
Source: "my-instance",
|
||||
Description: "some description",
|
||||
AuthRequired: []string{},
|
||||
Parameters: []tools.Parameter{
|
||||
tools.NewStringParameter("name", "some name description"),
|
||||
tools.NewStringParameter("view", "some view description"),
|
||||
tools.NewArrayParameterWithDefault("aspectTypes", []any{}, "some aspect types description", tools.NewStringParameter("aspectType", "some aspect type description")),
|
||||
tools.NewStringParameter("entry", "some entry description"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
@@ -80,12 +80,11 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
}
|
||||
|
||||
query := tools.NewStringParameter("query", "The query against which entries in scope should be matched.")
|
||||
name := tools.NewStringParameterWithDefault("name", fmt.Sprintf("projects/%s/locations/global", s.ProjectID()), "The project to which the request should be attributed in the following form: projects/{project}/locations/global")
|
||||
pageSize := tools.NewIntParameterWithDefault("pageSize", 5, "Number of results in the search page.")
|
||||
pageToken := tools.NewStringParameterWithDefault("pageToken", "", "Page token received from a previous locations.searchEntries call. Provide this to retrieve the subsequent page.")
|
||||
orderBy := tools.NewStringParameterWithDefault("orderBy", "relevance", "Specifies the ordering of results. Supported values are: relevance, last_modified_timestamp, last_modified_timestamp asc")
|
||||
semanticSearch := tools.NewBooleanParameterWithDefault("semanticSearch", true, "Whether to use semantic search for the query. If true, the query will be processed using semantic search capabilities.")
|
||||
parameters := tools.Parameters{query, name, pageSize, pageToken, orderBy, semanticSearch}
|
||||
parameters := tools.Parameters{query, pageSize, pageToken, orderBy, semanticSearch}
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
@@ -93,7 +92,7 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
InputSchema: parameters.McpManifest(),
|
||||
}
|
||||
|
||||
t := &SearchTool{
|
||||
t := &Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
@@ -110,7 +109,7 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
return t, nil
|
||||
}
|
||||
|
||||
type SearchTool struct {
|
||||
type Tool struct {
|
||||
Name string
|
||||
Kind string
|
||||
Parameters tools.Parameters
|
||||
@@ -121,14 +120,13 @@ type SearchTool struct {
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t *SearchTool) Authorized(verifiedAuthServices []string) bool {
|
||||
func (t *Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
func (t *SearchTool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
func (t *Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
paramsMap := params.AsMap()
|
||||
query, _ := paramsMap["query"].(string)
|
||||
name, _ := paramsMap["name"].(string)
|
||||
pageSize, _ := paramsMap["pageSize"].(int32)
|
||||
pageToken, _ := paramsMap["pageToken"].(string)
|
||||
orderBy, _ := paramsMap["orderBy"].(string)
|
||||
@@ -136,7 +134,7 @@ func (t *SearchTool) Invoke(ctx context.Context, params tools.ParamValues) (any,
|
||||
|
||||
req := &dataplexpb.SearchEntriesRequest{
|
||||
Query: query,
|
||||
Name: name,
|
||||
Name: fmt.Sprintf("projects/%s/locations/global", t.ProjectID),
|
||||
PageSize: pageSize,
|
||||
PageToken: pageToken,
|
||||
OrderBy: orderBy,
|
||||
@@ -159,17 +157,17 @@ func (t *SearchTool) Invoke(ctx context.Context, params tools.ParamValues) (any,
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (t *SearchTool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
func (t *Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
// Parse parameters from the provided data
|
||||
return tools.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t *SearchTool) Manifest() tools.Manifest {
|
||||
func (t *Tool) Manifest() tools.Manifest {
|
||||
// Returns the tool manifest
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
func (t *SearchTool) McpManifest() tools.McpManifest {
|
||||
func (t *Tool) McpManifest() tools.McpManifest {
|
||||
// Returns the tool MCP manifest
|
||||
return t.mcpManifest
|
||||
}
|
||||
|
||||
@@ -12,20 +12,20 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package duckdbsql
|
||||
package kuzucypher
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources/duckdb"
|
||||
kuzuSource "github.com/googleapis/genai-toolbox/internal/sources/kuzu"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/kuzudb/go-kuzu"
|
||||
)
|
||||
|
||||
const kind string = "duckdb-sql"
|
||||
var kind string = "kuzu-cypher"
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
@@ -41,14 +41,6 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.T
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type compatibleSource interface {
|
||||
DuckDb() *sql.DB
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
var _ compatibleSource = &duckdb.Source{}
|
||||
var compatibleSources = [...]string{duckdb.SourceKind}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
@@ -60,12 +52,19 @@ type Config struct {
|
||||
TemplateParameters tools.Parameters `yaml:"templateParameters"`
|
||||
}
|
||||
|
||||
type compatibleSource interface {
|
||||
KuzuDB() *kuzu.Connection
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
var _ compatibleSource = &kuzuSource.Source{}
|
||||
var compatibleSources = [...]string{kuzuSource.SourceKind}
|
||||
|
||||
// Initialize implements tools.ToolConfig.
|
||||
func (c Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
// verify source exists
|
||||
rawS, ok := srcs[c.Source]
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
rawS, ok := srcs[cfg.Source]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no source named %q configured", c.Source)
|
||||
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
|
||||
}
|
||||
|
||||
// verify the source is compatible
|
||||
@@ -73,123 +72,102 @@ func (c Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
allParameters, paramManifest, paramMcpManifest := tools.ProcessParameters(c.TemplateParameters, c.Parameters)
|
||||
|
||||
allParameters, paramManifest, paramMcpManifest := tools.ProcessParameters(cfg.TemplateParameters, cfg.Parameters)
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: c.Name,
|
||||
Description: c.Description,
|
||||
Name: cfg.Name,
|
||||
Description: cfg.Description,
|
||||
InputSchema: paramMcpManifest,
|
||||
}
|
||||
|
||||
// finish tool setup
|
||||
t := Tool{
|
||||
Name: c.Name,
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: c.Parameters,
|
||||
TemplateParameters: c.TemplateParameters,
|
||||
Parameters: cfg.Parameters,
|
||||
TemplateParameters: cfg.TemplateParameters,
|
||||
AllParams: allParameters,
|
||||
Statement: c.Statement,
|
||||
AuthRequired: c.AuthRequired,
|
||||
Db: s.DuckDb(),
|
||||
manifest: tools.Manifest{Description: c.Description, Parameters: paramManifest, AuthRequired: c.AuthRequired},
|
||||
Statement: cfg.Statement,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
Connection: s.KuzuDB(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: paramManifest, AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// ToolConfigKind implements tools.ToolConfig.
|
||||
func (c Config) ToolConfigKind() string {
|
||||
func (cfg Config) ToolConfigKind() string {
|
||||
return kind
|
||||
}
|
||||
|
||||
var _ tools.ToolConfig = Config{}
|
||||
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
TemplateParameters tools.Parameters `yaml:"templateParameters"`
|
||||
AllParams tools.Parameters `yaml:"allParams"`
|
||||
|
||||
Db *sql.DB
|
||||
Connection *kuzu.Connection
|
||||
Statement string `yaml:"statement"`
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
// Authorized implements tools.Tool.
|
||||
func (t Tool) Authorized(verifiedAuthSources []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthSources)
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
// Invoke implements tools.Tool.
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
conn := t.Connection
|
||||
paramsMap := params.AsMap()
|
||||
newStatement, err := tools.ResolveTemplateParams(t.TemplateParameters, t.Statement, paramsMap)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to extract template params %w", err)
|
||||
}
|
||||
|
||||
newParams, err := tools.GetParams(t.Parameters, paramsMap)
|
||||
preparedStatement, err := conn.Prepare(newStatement)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to generate prepared statement %w", err)
|
||||
}
|
||||
newParamMap, err := getParams(t.Parameters, paramsMap)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to extract standard params %w", err)
|
||||
}
|
||||
|
||||
sliceParams := newParams.AsSlice()
|
||||
// Execute the SQL query with parameters
|
||||
rows, err := t.Db.QueryContext(ctx, newStatement, sliceParams...)
|
||||
result, err := conn.Execute(preparedStatement, newParamMap)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to execute query: %w", err)
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
// Get column names
|
||||
cols, err := rows.Columns()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get column names: %w", err)
|
||||
}
|
||||
|
||||
values := make([]any, len(cols))
|
||||
valuePtrs := make([]any, len(cols))
|
||||
for i := range values {
|
||||
valuePtrs[i] = &values[i]
|
||||
}
|
||||
|
||||
// Prepare the result slice
|
||||
var result []any
|
||||
// Iterate through the rows
|
||||
for rows.Next() {
|
||||
// Scan the row into the value pointers
|
||||
if err := rows.Scan(valuePtrs...); err != nil {
|
||||
return nil, fmt.Errorf("unable to scan row: %w", err)
|
||||
defer result.Close()
|
||||
cols := result.GetColumnNames()
|
||||
var out []any
|
||||
for result.HasNext() {
|
||||
tuple, err := result.Next()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to parse row: %w", err)
|
||||
}
|
||||
defer tuple.Close()
|
||||
|
||||
// Create a map for this row
|
||||
// The result is a tuple, which can be converted to a slice.
|
||||
slice, err := tuple.GetAsSlice()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to slice row: %w", err)
|
||||
}
|
||||
rowMap := make(map[string]interface{})
|
||||
for i, col := range cols {
|
||||
val := values[i]
|
||||
// Handle nil values
|
||||
if val == nil {
|
||||
rowMap[col] = nil
|
||||
continue
|
||||
}
|
||||
val := slice[i]
|
||||
// Store the value in the map
|
||||
rowMap[col] = val
|
||||
}
|
||||
result = append(result, rowMap)
|
||||
out = append(out, rowMap)
|
||||
}
|
||||
|
||||
if err = rows.Close(); err != nil {
|
||||
return nil, fmt.Errorf("unable to close rows: %w", err)
|
||||
}
|
||||
|
||||
if err = rows.Err(); err != nil {
|
||||
return nil, fmt.Errorf("error iterating rows: %w", err)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// Manifest implements tools.Tool.
|
||||
@@ -208,3 +186,16 @@ func (t Tool) ParseParams(data map[string]any, claimsMap map[string]map[string]a
|
||||
}
|
||||
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
func getParams(params tools.Parameters, paramValuesMap map[string]interface{}) (map[string]interface{}, error) {
|
||||
newParamMap := make(map[string]any)
|
||||
for _, p := range params {
|
||||
k := p.GetName()
|
||||
v, ok := paramValuesMap[k]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("missing parameter %s", k)
|
||||
}
|
||||
newParamMap[k] = v
|
||||
}
|
||||
return newParamMap, nil
|
||||
}
|
||||
94
internal/tools/kuzu/kuzucypher/kuzucypher_test.go
Normal file
@@ -0,0 +1,94 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package kuzucypher_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/kuzu/kuzucypher"
|
||||
)
|
||||
|
||||
func TestParseFromYamlKuzu(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.ToolConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
tools:
|
||||
example_tool:
|
||||
kind: kuzu-cypher
|
||||
source: my-kuzu-instance
|
||||
description: some description
|
||||
statement: |
|
||||
match (a:user {name:$name}) return a.*;
|
||||
authRequired:
|
||||
- my-google-auth-service
|
||||
- other-auth-service
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: some description
|
||||
authServices:
|
||||
- name: my-google-auth-service
|
||||
field: user_id
|
||||
- name: other-auth-service
|
||||
field: user_id
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"example_tool": kuzucypher.Config{
|
||||
Name: "example_tool",
|
||||
Kind: "kuzu-cypher",
|
||||
Source: "my-kuzu-instance",
|
||||
Description: "some description",
|
||||
Statement: "match (a:user {name:$name}) return a.*;\n",
|
||||
AuthRequired: []string{"my-google-auth-service", "other-auth-service"},
|
||||
Parameters: []tools.Parameter{
|
||||
tools.NewStringParameterWithAuth("name", "some description",
|
||||
[]tools.ParamAuthService{{Name: "my-google-auth-service", Field: "user_id"},
|
||||
{Name: "other-auth-service", Field: "user_id"}}),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
187
internal/tools/looker/lookercommon/lookercommon.go
Normal file
@@ -0,0 +1,187 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
package lookercommon
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
v4 "github.com/looker-open-source/sdk-codegen/go/sdk/v4"
|
||||
"github.com/thlib/go-timezone-local/tzlocal"
|
||||
)
|
||||
|
||||
const (
|
||||
DimensionsFields = "fields(dimensions(name,type,label,label_short))"
|
||||
FiltersFields = "fields(filters(name,type,label,label_short))"
|
||||
MeasuresFields = "fields(measures(name,type,label,label_short))"
|
||||
ParametersFields = "fields(parameters(name,type,label,label_short))"
|
||||
)
|
||||
|
||||
// ExtractLookerFieldProperties extracts common properties from Looker field objects.
|
||||
func ExtractLookerFieldProperties(ctx context.Context, fields *[]v4.LookmlModelExploreField) ([]any, error) {
|
||||
var data []any
|
||||
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
// This should ideally not happen if the context is properly set up.
|
||||
// Log and return an empty map or handle as appropriate for your error strategy.
|
||||
return data, fmt.Errorf("error getting logger from context in ExtractLookerFieldProperties: %v", err)
|
||||
}
|
||||
|
||||
for _, v := range *fields {
|
||||
logger.DebugContext(ctx, "Got response element of %v\n", v)
|
||||
vMap := make(map[string]any)
|
||||
if v.Name != nil {
|
||||
vMap["name"] = *v.Name
|
||||
}
|
||||
if v.Type != nil {
|
||||
vMap["type"] = *v.Type
|
||||
}
|
||||
if v.Label != nil {
|
||||
vMap["label"] = *v.Label
|
||||
}
|
||||
if v.LabelShort != nil {
|
||||
vMap["label_short"] = *v.LabelShort
|
||||
}
|
||||
logger.DebugContext(ctx, "Converted to %v\n", vMap)
|
||||
data = append(data, vMap)
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
// CheckLookerExploreFields checks if the Fields object in LookmlModelExplore is nil before accessing its sub-fields.
|
||||
func CheckLookerExploreFields(resp *v4.LookmlModelExplore) error {
|
||||
if resp == nil || resp.Fields == nil {
|
||||
return fmt.Errorf("looker API response or its fields object is nil")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func GetFieldParameters() tools.Parameters {
|
||||
modelParameter := tools.NewStringParameter("model", "The model containing the explore.")
|
||||
exploreParameter := tools.NewStringParameter("explore", "The explore containing the fields.")
|
||||
return tools.Parameters{modelParameter, exploreParameter}
|
||||
}
|
||||
|
||||
func GetQueryParameters() tools.Parameters {
|
||||
modelParameter := tools.NewStringParameter("model", "The model containing the explore.")
|
||||
exploreParameter := tools.NewStringParameter("explore", "The explore to be queried.")
|
||||
fieldsParameter := tools.NewArrayParameter("fields",
|
||||
"The fields to be retrieved.",
|
||||
tools.NewStringParameter("field", "A field to be returned in the query"),
|
||||
)
|
||||
filtersParameter := tools.NewMapParameterWithDefault("filters",
|
||||
map[string]any{},
|
||||
"The filters for the query",
|
||||
"",
|
||||
)
|
||||
pivotsParameter := tools.NewArrayParameterWithDefault("pivots",
|
||||
[]any{},
|
||||
"The query pivots (must be included in fields as well).",
|
||||
tools.NewStringParameter("pivot_field", "A field to be used as a pivot in the query"),
|
||||
)
|
||||
sortsParameter := tools.NewArrayParameterWithDefault("sorts",
|
||||
[]any{},
|
||||
"The sorts like \"field.id desc 0\".",
|
||||
tools.NewStringParameter("sort_field", "A field to be used as a sort in the query"),
|
||||
)
|
||||
limitParameter := tools.NewIntParameterWithDefault("limit", 500, "The row limit.")
|
||||
tzParameter := tools.NewStringParameterWithRequired("tz", "The query timezone.", false)
|
||||
|
||||
return tools.Parameters{
|
||||
modelParameter,
|
||||
exploreParameter,
|
||||
fieldsParameter,
|
||||
filtersParameter,
|
||||
pivotsParameter,
|
||||
sortsParameter,
|
||||
limitParameter,
|
||||
tzParameter,
|
||||
}
|
||||
}
|
||||
|
||||
func ProcessFieldArgs(ctx context.Context, params tools.ParamValues) (*string, *string, error) {
|
||||
mapParams := params.AsMap()
|
||||
model, ok := mapParams["model"].(string)
|
||||
if !ok {
|
||||
return nil, nil, fmt.Errorf("'model' must be a string, got %T", mapParams["model"])
|
||||
}
|
||||
explore, ok := mapParams["explore"].(string)
|
||||
if !ok {
|
||||
return nil, nil, fmt.Errorf("'explore' must be a string, got %T", mapParams["explore"])
|
||||
}
|
||||
return &model, &explore, nil
|
||||
}
|
||||
|
||||
func ProcessQueryArgs(ctx context.Context, params tools.ParamValues) (*v4.WriteQuery, error) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
|
||||
logger.DebugContext(ctx, "params = ", params)
|
||||
paramsMap := params.AsMap()
|
||||
|
||||
f, err := tools.ConvertAnySliceToTyped(paramsMap["fields"].([]any), "string")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("can't convert fields to array of strings: %s", err)
|
||||
}
|
||||
fields := f.([]string)
|
||||
filters := paramsMap["filters"].(map[string]any)
|
||||
// Sometimes filters come as "'field.id'": "expression" so strip extra ''
|
||||
for k, v := range filters {
|
||||
if len(k) > 0 && k[0] == '\'' && k[len(k)-1] == '\'' {
|
||||
delete(filters, k)
|
||||
filters[k[1:len(k)-1]] = v
|
||||
}
|
||||
}
|
||||
p, err := tools.ConvertAnySliceToTyped(paramsMap["pivots"].([]any), "string")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("can't convert pivots to array of strings: %s", err)
|
||||
}
|
||||
pivots := p.([]string)
|
||||
s, err := tools.ConvertAnySliceToTyped(paramsMap["sorts"].([]any), "string")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("can't convert sorts to array of strings: %s", err)
|
||||
}
|
||||
sorts := s.([]string)
|
||||
limit := fmt.Sprintf("%v", paramsMap["limit"].(int))
|
||||
|
||||
var tz string
|
||||
if paramsMap["tz"] != nil {
|
||||
tz = paramsMap["tz"].(string)
|
||||
} else {
|
||||
tzname, err := tzlocal.RuntimeTZ()
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, fmt.Sprintf("Error getting local timezone: %s", err))
|
||||
tzname = "Etc/UTC"
|
||||
}
|
||||
tz = tzname
|
||||
}
|
||||
|
||||
wq := v4.WriteQuery{
|
||||
Model: paramsMap["model"].(string),
|
||||
View: paramsMap["explore"].(string),
|
||||
Fields: &fields,
|
||||
Pivots: &pivots,
|
||||
Filters: &filters,
|
||||
Sorts: &sorts,
|
||||
QueryTimezone: &tz,
|
||||
Limit: &limit,
|
||||
}
|
||||
return &wq, nil
|
||||
}
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
lookersrc "github.com/googleapis/genai-toolbox/internal/sources/looker"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/looker/lookercommon"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
|
||||
"github.com/looker-open-source/sdk-codegen/go/rtl"
|
||||
@@ -71,9 +72,7 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be `looker`", kind)
|
||||
}
|
||||
|
||||
modelParameter := tools.NewStringParameter("model", "The model containing the explore.")
|
||||
exploreParameter := tools.NewStringParameter("explore", "The explore containing the dimensions.")
|
||||
parameters := tools.Parameters{modelParameter, exploreParameter}
|
||||
parameters := lookercommon.GetFieldParameters()
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
@@ -117,45 +116,29 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
mapParams := params.AsMap()
|
||||
model, ok := mapParams["model"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("'model' must be a string, got %T", mapParams["model"])
|
||||
}
|
||||
explore, ok := mapParams["explore"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("'explore' must be a string, got %T", mapParams["explore"])
|
||||
model, explore, err := lookercommon.ProcessFieldArgs(ctx, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error processing model or explore: %w", err)
|
||||
}
|
||||
|
||||
fields := "fields(dimensions(name,type,label,label_short))"
|
||||
fields := lookercommon.DimensionsFields
|
||||
req := v4.RequestLookmlModelExplore{
|
||||
LookmlModelName: model,
|
||||
ExploreName: explore,
|
||||
LookmlModelName: *model,
|
||||
ExploreName: *explore,
|
||||
Fields: &fields,
|
||||
}
|
||||
resp, err := t.Client.LookmlModelExplore(req, t.ApiSettings)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error making get_dimensions request: %s", err)
|
||||
return nil, fmt.Errorf("error making get_dimensions request: %w", err)
|
||||
}
|
||||
|
||||
var data []any
|
||||
for _, v := range *resp.Fields.Dimensions {
|
||||
logger.DebugContext(ctx, "Got response element of %v\n", v)
|
||||
vMap := make(map[string]any)
|
||||
if v.Name != nil {
|
||||
vMap["name"] = *v.Name
|
||||
}
|
||||
if v.Type != nil {
|
||||
vMap["type"] = *v.Type
|
||||
}
|
||||
if v.Label != nil {
|
||||
vMap["label"] = *v.Label
|
||||
}
|
||||
if v.LabelShort != nil {
|
||||
vMap["label_short"] = *v.LabelShort
|
||||
}
|
||||
logger.DebugContext(ctx, "Converted to %v\n", vMap)
|
||||
data = append(data, vMap)
|
||||
if err := lookercommon.CheckLookerExploreFields(&resp); err != nil {
|
||||
return nil, fmt.Errorf("error processing get_dimensions response: %w", err)
|
||||
}
|
||||
|
||||
data, err := lookercommon.ExtractLookerFieldProperties(ctx, resp.Fields.Dimensions)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error extracting get_dimensions response: %w", err)
|
||||
}
|
||||
logger.DebugContext(ctx, "data = ", data)
|
||||
|
||||
@@ -175,5 +158,5 @@ func (t Tool) McpManifest() tools.McpManifest {
|
||||
}
|
||||
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return true
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
@@ -161,5 +161,5 @@ func (t Tool) McpManifest() tools.McpManifest {
|
||||
}
|
||||
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return true
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
lookersrc "github.com/googleapis/genai-toolbox/internal/sources/looker"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/looker/lookercommon"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
|
||||
"github.com/looker-open-source/sdk-codegen/go/rtl"
|
||||
@@ -71,9 +72,7 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be `looker`", kind)
|
||||
}
|
||||
|
||||
modelParameter := tools.NewStringParameter("model", "The model containing the explore.")
|
||||
exploreParameter := tools.NewStringParameter("explore", "The explore containing the filters.")
|
||||
parameters := tools.Parameters{modelParameter, exploreParameter}
|
||||
parameters := lookercommon.GetFieldParameters()
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
@@ -117,45 +116,29 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
mapParams := params.AsMap()
|
||||
model, ok := mapParams["model"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("'model' must be a string, got %T", mapParams["model"])
|
||||
}
|
||||
explore, ok := mapParams["explore"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("'explore' must be a string, got %T", mapParams["explore"])
|
||||
model, explore, err := lookercommon.ProcessFieldArgs(ctx, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error processing model or explore: %w", err)
|
||||
}
|
||||
|
||||
fields := "fields(filters(name,type,label,label_short))"
|
||||
fields := lookercommon.FiltersFields
|
||||
req := v4.RequestLookmlModelExplore{
|
||||
LookmlModelName: model,
|
||||
ExploreName: explore,
|
||||
LookmlModelName: *model,
|
||||
ExploreName: *explore,
|
||||
Fields: &fields,
|
||||
}
|
||||
resp, err := t.Client.LookmlModelExplore(req, t.ApiSettings)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error making get_filters request: %s", err)
|
||||
return nil, fmt.Errorf("error making get_filters request: %w", err)
|
||||
}
|
||||
|
||||
var data []any
|
||||
for _, v := range *resp.Fields.Filters {
|
||||
logger.DebugContext(ctx, "Got response element of %v\n", v)
|
||||
vMap := make(map[string]any)
|
||||
if v.Name != nil {
|
||||
vMap["name"] = *v.Name
|
||||
}
|
||||
if v.Type != nil {
|
||||
vMap["type"] = *v.Type
|
||||
}
|
||||
if v.Label != nil {
|
||||
vMap["label"] = *v.Label
|
||||
}
|
||||
if v.LabelShort != nil {
|
||||
vMap["label_short"] = *v.LabelShort
|
||||
}
|
||||
logger.DebugContext(ctx, "Converted to %v\n", vMap)
|
||||
data = append(data, vMap)
|
||||
if err := lookercommon.CheckLookerExploreFields(&resp); err != nil {
|
||||
return nil, fmt.Errorf("error processing get_filters response: %w", err)
|
||||
}
|
||||
|
||||
data, err := lookercommon.ExtractLookerFieldProperties(ctx, resp.Fields.Filters)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error extracting get_filters response: %w", err)
|
||||
}
|
||||
logger.DebugContext(ctx, "data = ", data)
|
||||
|
||||
@@ -175,5 +158,5 @@ func (t Tool) McpManifest() tools.McpManifest {
|
||||
}
|
||||
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return true
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
@@ -184,5 +184,5 @@ func (t Tool) McpManifest() tools.McpManifest {
|
||||
}
|
||||
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return true
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
lookersrc "github.com/googleapis/genai-toolbox/internal/sources/looker"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/looker/lookercommon"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
|
||||
"github.com/looker-open-source/sdk-codegen/go/rtl"
|
||||
@@ -71,9 +72,7 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be `looker`", kind)
|
||||
}
|
||||
|
||||
modelParameter := tools.NewStringParameter("model", "The model containing the explore.")
|
||||
exploreParameter := tools.NewStringParameter("explore", "The explore containing the measures.")
|
||||
parameters := tools.Parameters{modelParameter, exploreParameter}
|
||||
parameters := lookercommon.GetFieldParameters()
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
@@ -117,45 +116,29 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
mapParams := params.AsMap()
|
||||
model, ok := mapParams["model"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("'model' must be a string, got %T", mapParams["model"])
|
||||
}
|
||||
explore, ok := mapParams["explore"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("'explore' must be a string, got %T", mapParams["explore"])
|
||||
model, explore, err := lookercommon.ProcessFieldArgs(ctx, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error processing model or explore: %w", err)
|
||||
}
|
||||
|
||||
fields := "fields(measures(name,type,label,label_short))"
|
||||
fields := lookercommon.MeasuresFields
|
||||
req := v4.RequestLookmlModelExplore{
|
||||
LookmlModelName: model,
|
||||
ExploreName: explore,
|
||||
LookmlModelName: *model,
|
||||
ExploreName: *explore,
|
||||
Fields: &fields,
|
||||
}
|
||||
resp, err := t.Client.LookmlModelExplore(req, t.ApiSettings)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error making get_measures request: %s", err)
|
||||
return nil, fmt.Errorf("error making get_measures request: %w", err)
|
||||
}
|
||||
|
||||
var data []any
|
||||
for _, v := range *resp.Fields.Measures {
|
||||
logger.DebugContext(ctx, "Got response element of %v\n", v)
|
||||
vMap := make(map[string]any)
|
||||
if v.Name != nil {
|
||||
vMap["name"] = *v.Name
|
||||
}
|
||||
if v.Type != nil {
|
||||
vMap["type"] = *v.Type
|
||||
}
|
||||
if v.Label != nil {
|
||||
vMap["label"] = *v.Label
|
||||
}
|
||||
if v.LabelShort != nil {
|
||||
vMap["label_short"] = *v.LabelShort
|
||||
}
|
||||
logger.DebugContext(ctx, "Converted to %v\n", vMap)
|
||||
data = append(data, vMap)
|
||||
if err := lookercommon.CheckLookerExploreFields(&resp); err != nil {
|
||||
return nil, fmt.Errorf("error processing get_measures response: %w", err)
|
||||
}
|
||||
|
||||
data, err := lookercommon.ExtractLookerFieldProperties(ctx, resp.Fields.Measures)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error extracting get_measures response: %w", err)
|
||||
}
|
||||
logger.DebugContext(ctx, "data = ", data)
|
||||
|
||||
@@ -175,5 +158,5 @@ func (t Tool) McpManifest() tools.McpManifest {
|
||||
}
|
||||
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return true
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
@@ -158,5 +158,5 @@ func (t Tool) McpManifest() tools.McpManifest {
|
||||
}
|
||||
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return true
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
lookersrc "github.com/googleapis/genai-toolbox/internal/sources/looker"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/looker/lookercommon"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
|
||||
"github.com/looker-open-source/sdk-codegen/go/rtl"
|
||||
@@ -71,9 +72,7 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be `looker`", kind)
|
||||
}
|
||||
|
||||
modelParameter := tools.NewStringParameter("model", "The model containing the explore.")
|
||||
exploreParameter := tools.NewStringParameter("explore", "The explore containing the parameters.")
|
||||
parameters := tools.Parameters{modelParameter, exploreParameter}
|
||||
parameters := lookercommon.GetFieldParameters()
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
@@ -117,45 +116,29 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
mapParams := params.AsMap()
|
||||
model, ok := mapParams["model"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("'model' must be a string, got %T", mapParams["model"])
|
||||
}
|
||||
explore, ok := mapParams["explore"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("'explore' must be a string, got %T", mapParams["explore"])
|
||||
model, explore, err := lookercommon.ProcessFieldArgs(ctx, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error processing model or explore: %w", err)
|
||||
}
|
||||
|
||||
fields := "fields(parameters(name,type,label,label_short))"
|
||||
fields := lookercommon.ParametersFields
|
||||
req := v4.RequestLookmlModelExplore{
|
||||
LookmlModelName: model,
|
||||
ExploreName: explore,
|
||||
LookmlModelName: *model,
|
||||
ExploreName: *explore,
|
||||
Fields: &fields,
|
||||
}
|
||||
resp, err := t.Client.LookmlModelExplore(req, t.ApiSettings)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error making get_parameters request: %s", err)
|
||||
return nil, fmt.Errorf("error making get_parameters request: %w", err)
|
||||
}
|
||||
|
||||
var data []any
|
||||
for _, v := range *resp.Fields.Parameters {
|
||||
logger.DebugContext(ctx, "Got response element of %v\n", v)
|
||||
vMap := make(map[string]any)
|
||||
if v.Name != nil {
|
||||
vMap["name"] = *v.Name
|
||||
}
|
||||
if v.Type != nil {
|
||||
vMap["type"] = *v.Type
|
||||
}
|
||||
if v.Label != nil {
|
||||
vMap["label"] = *v.Label
|
||||
}
|
||||
if v.LabelShort != nil {
|
||||
vMap["label_short"] = *v.LabelShort
|
||||
}
|
||||
logger.DebugContext(ctx, "Converted to %v\n", vMap)
|
||||
data = append(data, vMap)
|
||||
if err := lookercommon.CheckLookerExploreFields(&resp); err != nil {
|
||||
return nil, fmt.Errorf("error processing get_parameters response: %w", err)
|
||||
}
|
||||
|
||||
data, err := lookercommon.ExtractLookerFieldProperties(ctx, resp.Fields.Parameters)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error extracting get_parameters response: %w", err)
|
||||
}
|
||||
logger.DebugContext(ctx, "data = ", data)
|
||||
|
||||
@@ -175,5 +158,5 @@ func (t Tool) McpManifest() tools.McpManifest {
|
||||
}
|
||||
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return true
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
@@ -22,12 +22,11 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
lookersrc "github.com/googleapis/genai-toolbox/internal/sources/looker"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/looker/lookercommon"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
|
||||
"github.com/looker-open-source/sdk-codegen/go/rtl"
|
||||
v4 "github.com/looker-open-source/sdk-codegen/go/sdk/v4"
|
||||
|
||||
"github.com/thlib/go-timezone-local/tzlocal"
|
||||
)
|
||||
|
||||
const kind string = "looker-query"
|
||||
@@ -74,40 +73,7 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be `looker`", kind)
|
||||
}
|
||||
|
||||
modelParameter := tools.NewStringParameter("model", "The model containing the explore.")
|
||||
exploreParameter := tools.NewStringParameter("explore", "The explore to be queried.")
|
||||
fieldsParameter := tools.NewArrayParameter("fields",
|
||||
"The fields to be retrieved.",
|
||||
tools.NewStringParameter("field", "A field to be returned in the query"),
|
||||
)
|
||||
filtersParameter := tools.NewMapParameterWithDefault("filters",
|
||||
map[string]any{},
|
||||
"The filters for the query",
|
||||
"",
|
||||
)
|
||||
pivotsParameter := tools.NewArrayParameterWithDefault("pivots",
|
||||
[]any{},
|
||||
"The query pivots (must be included in fields as well).",
|
||||
tools.NewStringParameter("pivot_field", "A field to be used as a pivot in the query"),
|
||||
)
|
||||
sortsParameter := tools.NewArrayParameterWithDefault("sorts",
|
||||
[]any{},
|
||||
"The sorts like \"field.id desc 0\".",
|
||||
tools.NewStringParameter("sort_field", "A field to be used as a sort in the query"),
|
||||
)
|
||||
limitParameter := tools.NewIntParameterWithDefault("limit", 500, "The row limit.")
|
||||
tzParameter := tools.NewStringParameterWithRequired("tz", "The query timezone.", false)
|
||||
|
||||
parameters := tools.Parameters{
|
||||
modelParameter,
|
||||
exploreParameter,
|
||||
fieldsParameter,
|
||||
filtersParameter,
|
||||
pivotsParameter,
|
||||
sortsParameter,
|
||||
limitParameter,
|
||||
tzParameter,
|
||||
}
|
||||
parameters := lookercommon.GetQueryParameters()
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
@@ -151,59 +117,13 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
logger.DebugContext(ctx, "params = ", params)
|
||||
paramsMap := params.AsMap()
|
||||
|
||||
f, err := tools.ConvertAnySliceToTyped(paramsMap["fields"].([]any), "string")
|
||||
wq, err := lookercommon.ProcessQueryArgs(ctx, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("can't convert fields to array of strings: %s", err)
|
||||
}
|
||||
fields := f.([]string)
|
||||
filters := paramsMap["filters"].(map[string]any)
|
||||
// Sometimes filters come as "'field.id'": "expression" so strip extra ''
|
||||
for k, v := range filters {
|
||||
if len(k) > 0 && k[0] == '\'' && k[len(k)-1] == '\'' {
|
||||
delete(filters, k)
|
||||
filters[k[1:len(k)-1]] = v
|
||||
}
|
||||
}
|
||||
p, err := tools.ConvertAnySliceToTyped(paramsMap["pivots"].([]any), "string")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("can't convert pivots to array of strings: %s", err)
|
||||
}
|
||||
pivots := p.([]string)
|
||||
s, err := tools.ConvertAnySliceToTyped(paramsMap["sorts"].([]any), "string")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("can't convert sorts to array of strings: %s", err)
|
||||
}
|
||||
sorts := s.([]string)
|
||||
limit := int64(paramsMap["limit"].(int))
|
||||
|
||||
var tz string
|
||||
if paramsMap["tz"] != nil {
|
||||
tz = paramsMap["tz"].(string)
|
||||
} else {
|
||||
tzname, err := tzlocal.RuntimeTZ()
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, fmt.Sprintf("Error getting local timezone: %s", err))
|
||||
tzname = "Etc/UTC"
|
||||
}
|
||||
tz = tzname
|
||||
}
|
||||
|
||||
wq := v4.WriteQuery{
|
||||
Model: paramsMap["model"].(string),
|
||||
View: paramsMap["explore"].(string),
|
||||
Fields: &fields,
|
||||
Pivots: &pivots,
|
||||
Filters: &filters,
|
||||
Sorts: &sorts,
|
||||
QueryTimezone: &tz,
|
||||
return nil, fmt.Errorf("error building WriteQuery request: %w", err)
|
||||
}
|
||||
req := v4.RequestRunInlineQuery{
|
||||
Body: *wq,
|
||||
ResultFormat: "json",
|
||||
Limit: &limit,
|
||||
Body: wq,
|
||||
}
|
||||
resp, err := t.Client.RunInlineQuery(req, t.ApiSettings)
|
||||
if err != nil {
|
||||
@@ -235,5 +155,5 @@ func (t Tool) McpManifest() tools.McpManifest {
|
||||
}
|
||||
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return true
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||