mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-01-11 08:28:11 -05:00
Compare commits
38 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f693f75f38 | ||
|
|
327ddf0439 | ||
|
|
330b14e518 | ||
|
|
5bdab8c83b | ||
|
|
dbf355d31a | ||
|
|
3bdb12f7a7 | ||
|
|
fd149337e9 | ||
|
|
c1305b5ab4 | ||
|
|
3003b45256 | ||
|
|
1a5fda34b1 | ||
|
|
3353085265 | ||
|
|
a279d32c57 | ||
|
|
0568423e33 | ||
|
|
92845c943a | ||
|
|
90d4558a8e | ||
|
|
7791c6f87e | ||
|
|
8ff60ca430 | ||
|
|
c45390e6f7 | ||
|
|
be7db3dff2 | ||
|
|
7e7d55c5d1 | ||
|
|
30c16a559e | ||
|
|
14a868f2a0 | ||
|
|
3746dbae65 | ||
|
|
25a0bb7a37 | ||
|
|
bd399bb0fb | ||
|
|
4c63f0c1e4 | ||
|
|
78e9752f62 | ||
|
|
dfde52ca9a | ||
|
|
a7474752d8 | ||
|
|
be65924aa6 | ||
|
|
59e23e1725 | ||
|
|
74dbd6124d | ||
|
|
c600c30374 | ||
|
|
ccc3498cf0 | ||
|
|
f55dd6fcd0 | ||
|
|
1526b8ab8c | ||
|
|
2a1b1ff787 | ||
|
|
86ccff0b43 |
@@ -153,6 +153,26 @@ steps:
|
||||
"BigQuery" \
|
||||
bigquery \
|
||||
bigquery
|
||||
|
||||
- id: "dataplex"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "DATAPLEX_PROJECT=$PROJECT_ID"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Dataplex" \
|
||||
dataplex \
|
||||
dataplex
|
||||
|
||||
- id: "postgres"
|
||||
name: golang:1
|
||||
@@ -467,6 +487,24 @@ steps:
|
||||
looker \
|
||||
looker
|
||||
|
||||
- id: "duckdb"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
secretEnv: ["CLIENT_ID"]
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"DuckDB" \
|
||||
duckdb \
|
||||
duckdb
|
||||
|
||||
|
||||
- id: "alloydbwaitforoperation"
|
||||
@@ -578,4 +616,4 @@ substitutions:
|
||||
_DGRAPHURL: "https://play.dgraph.io"
|
||||
_COUCHBASE_BUCKET: "couchbase-bucket"
|
||||
_COUCHBASE_SCOPE: "couchbase-scope"
|
||||
_LOOKER_VERIFY_SSL: "true"
|
||||
_LOOKER_VERIFY_SSL: "true"
|
||||
4
.github/blunderbuss.yml
vendored
4
.github/blunderbuss.yml
vendored
@@ -1,7 +1,7 @@
|
||||
assign_issues:
|
||||
- Yuan325
|
||||
- duwenxin99
|
||||
- akitsch
|
||||
- averikitsch
|
||||
assign_issues_by:
|
||||
- labels:
|
||||
- 'product: bigquery'
|
||||
@@ -12,4 +12,4 @@ assign_issues_by:
|
||||
assign_prs:
|
||||
- Yuan325
|
||||
- duwenxin99
|
||||
- akitsch
|
||||
- averikitsch
|
||||
|
||||
15
.github/release-please.yml
vendored
15
.github/release-please.yml
vendored
@@ -18,6 +18,7 @@ releaseType: simple
|
||||
versionFile: "cmd/version.txt"
|
||||
extraFiles: [
|
||||
"README.md",
|
||||
"docs/en/getting-started/colab_quickstart.ipynb",
|
||||
"docs/en/getting-started/introduction/_index.md",
|
||||
"docs/en/getting-started/local_quickstart.md",
|
||||
"docs/en/getting-started/local_quickstart_js.md",
|
||||
@@ -25,13 +26,17 @@ extraFiles: [
|
||||
"docs/en/getting-started/mcp_quickstart/_index.md",
|
||||
"docs/en/samples/bigquery/local_quickstart.md",
|
||||
"docs/en/samples/bigquery/mcp_quickstart/_index.md",
|
||||
"docs/en/getting-started/colab_quickstart.ipynb",
|
||||
"docs/en/samples/bigquery/colab_quickstart_bigquery.ipynb",
|
||||
"docs/en/how-to/connect-ide/bigquery_mcp.md",
|
||||
"docs/en/how-to/connect-ide/spanner_mcp.md",
|
||||
"docs/en/samples/looker/looker_gemini.md",
|
||||
"docs/en/samples/looker/looker_mcp_inspector.md",
|
||||
"docs/en/how-to/connect-ide/alloydb_pg_mcp.md",
|
||||
"docs/en/how-to/connect-ide/cloud_sql_mysql_mcp.md",
|
||||
"docs/en/how-to/connect-ide/alloydb_pg_admin_mcp.md",
|
||||
"docs/en/how-to/connect-ide/bigquery_mcp.md",
|
||||
"docs/en/how-to/connect-ide/cloud_sql_pg_mcp.md",
|
||||
"docs/en/how-to/connect-ide/postgres_mcp.md",
|
||||
"docs/en/how-to/connect-ide/cloud_sql_mssql_mcp.md",
|
||||
"docs/en/how-to/connect-ide/cloud_sql_mysql_mcp.md",
|
||||
"docs/en/how-to/connect-ide/firestore_mcp.md",
|
||||
"docs/en/how-to/connect-ide/looker_mcp.md",
|
||||
"docs/en/how-to/connect-ide/postgres_mcp.md",
|
||||
"docs/en/how-to/connect-ide/spanner_mcp.md",
|
||||
]
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -4,6 +4,9 @@
|
||||
# vscode
|
||||
.vscode/
|
||||
|
||||
# idea
|
||||
.idea/
|
||||
|
||||
# npm
|
||||
node_modules
|
||||
|
||||
@@ -17,4 +20,4 @@ node_modules
|
||||
|
||||
# executable
|
||||
genai-toolbox
|
||||
toolbox
|
||||
toolbox
|
||||
38
CHANGELOG.md
38
CHANGELOG.md
@@ -1,5 +1,43 @@
|
||||
# Changelog
|
||||
|
||||
## [0.10.0](https://github.com/googleapis/genai-toolbox/compare/v0.9.0...v0.10.0) (2025-07-25)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add `Map` parameters support ([#928](https://github.com/googleapis/genai-toolbox/issues/928)) ([4468bc9](https://github.com/googleapis/genai-toolbox/commit/4468bc920bbf27dce4ab160197587b7c12fcd20f))
|
||||
* Add Dataplex source and tool ([#847](https://github.com/googleapis/genai-toolbox/issues/847)) ([30c16a5](https://github.com/googleapis/genai-toolbox/commit/30c16a559e8d49a9a717935269e69b97ec25519a))
|
||||
* Add Looker source and tool ([#923](https://github.com/googleapis/genai-toolbox/issues/923)) ([c67e01b](https://github.com/googleapis/genai-toolbox/commit/c67e01bcf998e7b884be30ebb1fd277c89ed6ffc))
|
||||
* Add support for null optional parameter ([#802](https://github.com/googleapis/genai-toolbox/issues/802)) ([a817b12](https://github.com/googleapis/genai-toolbox/commit/a817b120ca5e09ce80eb8d7544ebbe81fc28b082)), closes [#736](https://github.com/googleapis/genai-toolbox/issues/736)
|
||||
* **prebuilt/alloydb-admin-config:** Add alloydb control plane as a prebuilt config ([#937](https://github.com/googleapis/genai-toolbox/issues/937)) ([0b28b72](https://github.com/googleapis/genai-toolbox/commit/0b28b72aa0ca2cdc87afbddbeb7f4dbb9688593d))
|
||||
* **prebuilt/mysql,prebuilt/mssql:** Add generic mysql and mssql prebuilt tools ([#983](https://github.com/googleapis/genai-toolbox/issues/983)) ([c600c30](https://github.com/googleapis/genai-toolbox/commit/c600c30374443b6106c1f10b60cd334fd202789b))
|
||||
* **server/mcp:** Support MCP version 2025-06-18 ([#898](https://github.com/googleapis/genai-toolbox/issues/898)) ([313d3ca](https://github.com/googleapis/genai-toolbox/commit/313d3ca0d084a3a6e7ac9a21a862aa31bf3edadd))
|
||||
* **sources/mssql:** Add support for encrypt connection parameter ([#874](https://github.com/googleapis/genai-toolbox/issues/874)) ([14a868f](https://github.com/googleapis/genai-toolbox/commit/14a868f2a0780b94c2ca104419b2ff098778303b))
|
||||
* **sources/firestore:** Add Firestore as Source ([#786](https://github.com/googleapis/genai-toolbox/issues/786)) ([2bb790e](https://github.com/googleapis/genai-toolbox/commit/2bb790e4f8194b677fe0ba40122d409d0e3e687e))
|
||||
* **sources/mongodb:** Add MongoDB Source ([#969](https://github.com/googleapis/genai-toolbox/issues/969)) ([74dbd61](https://github.com/googleapis/genai-toolbox/commit/74dbd6124daab6192dd880dbd1d15f36861abf74))
|
||||
* **tools/alloydb-wait-for-operation:** Add wait for operation tool with exponential backoff ([#920](https://github.com/googleapis/genai-toolbox/issues/920)) ([3f6ec29](https://github.com/googleapis/genai-toolbox/commit/3f6ec2944ede18ee02b10157cc048145bdaec87a))
|
||||
* **tools/mongodb-aggregate:** Add MongoDB `aggregate` Tools ([#977](https://github.com/googleapis/genai-toolbox/issues/977)) ([bd399bb](https://github.com/googleapis/genai-toolbox/commit/bd399bb0fb7134469345ed9a1111ea4209440867))
|
||||
* **tools/mongodb-delete:** Add MongoDB `delete` Tools ([#974](https://github.com/googleapis/genai-toolbox/issues/974)) ([78e9752](https://github.com/googleapis/genai-toolbox/commit/78e9752f620e065246f3e7b9d37062e492247c8a))
|
||||
* **tools/mongodb-find:** Add MongoDB `find` Tools ([#970](https://github.com/googleapis/genai-toolbox/issues/970)) ([a747475](https://github.com/googleapis/genai-toolbox/commit/a7474752d8d7ea7af1e80a3c4533d2fd4154d897))
|
||||
* **tools/mongodb-insert:** Add MongoDB `insert` Tools ([#975](https://github.com/googleapis/genai-toolbox/issues/975)) ([4c63f0c](https://github.com/googleapis/genai-toolbox/commit/4c63f0c1e402817a0c8fec611635e99290308d0e))
|
||||
* **tools/mongodb-update:** Add MongoDB `update` Tools ([#972](https://github.com/googleapis/genai-toolbox/issues/972)) ([dfde52c](https://github.com/googleapis/genai-toolbox/commit/dfde52ca9a8e25e2f3944f52b4c2e307072b6c37))
|
||||
* **tools/neo4j-execute-cypher:** Add neo4j-execute-cypher for Neo4j sources ([#946](https://github.com/googleapis/genai-toolbox/issues/946)) ([81d0505](https://github.com/googleapis/genai-toolbox/commit/81d05053b2e08338fd6eabe4849c309064f76b6b))
|
||||
* **tools/neo4j-schema:** Add neo4j-schema tool ([#978](https://github.com/googleapis/genai-toolbox/issues/978)) ([be7db3d](https://github.com/googleapis/genai-toolbox/commit/be7db3dff263625ce64fdb726e81164996b7a708))
|
||||
* **tools/wait:** Create wait for tool ([#885](https://github.com/googleapis/genai-toolbox/issues/885)) ([ed5ef4c](https://github.com/googleapis/genai-toolbox/commit/ed5ef4caea10ba1dbc49c0fc0a0d2b91cf341d3b))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Fix document preview pipeline for forked PRs ([#950](https://github.com/googleapis/genai-toolbox/issues/950)) ([481cc60](https://github.com/googleapis/genai-toolbox/commit/481cc608bae807d9e92497bc8863066916f7ef21))
|
||||
* **prebuilt/firestore:** Mark database field as required in the firestore prebuilt tools ([#959](https://github.com/googleapis/genai-toolbox/issues/959)) ([15417d4](https://github.com/googleapis/genai-toolbox/commit/15417d4e0c7b173e81edbbeb672e53884d186104))
|
||||
* **prebuilt/cloud-sql-mssql:** Correct source reference for execute_sql tool in cloud-sql-mssql.yaml prebuilt config ([#938](https://github.com/googleapis/genai-toolbox/issues/938)) ([d16728e](https://github.com/googleapis/genai-toolbox/commit/d16728e5c603eab37700876a6ddacbf709fd5823))
|
||||
* **prebuilt/cloud-sql-mysql:** Update list_table tool ([#924](https://github.com/googleapis/genai-toolbox/issues/924)) ([2083ba5](https://github.com/googleapis/genai-toolbox/commit/2083ba50483951e9ee6101bb832aa68823cd96a5))
|
||||
* Replace 'float' with 'number' in McpManifest ([#985](https://github.com/googleapis/genai-toolbox/issues/985)) ([59e23e1](https://github.com/googleapis/genai-toolbox/commit/59e23e17250a516e3931996114f32ac6526a4f8e))
|
||||
* **server/api:** Add logger to context in tool invoke handler ([#891](https://github.com/googleapis/genai-toolbox/issues/891)) ([8ce311f](https://github.com/googleapis/genai-toolbox/commit/8ce311f256481e8f11ecb4aa505b95a562f394ef))
|
||||
* **sources/looker:** Add agent tag to Looker API calls. ([#966](https://github.com/googleapis/genai-toolbox/issues/966)) ([f55dd6f](https://github.com/googleapis/genai-toolbox/commit/f55dd6fcd099f23bd89df62b268c4a53d16f3bac))
|
||||
* **tools/bigquery-execute-sql:** Ensure invoke always returns a non-null value ([#925](https://github.com/googleapis/genai-toolbox/issues/925)) ([9a55b80](https://github.com/googleapis/genai-toolbox/commit/9a55b804821a6ccfcd157bcfaee7e599c4a5cb63))
|
||||
* **tools/mysqlsql:** Unmarshal json data from database during invoke ([#979](https://github.com/googleapis/genai-toolbox/issues/979)) ([ccc3498](https://github.com/googleapis/genai-toolbox/commit/ccc3498cf0a4c43eb909e3850b9e6f582cd48f2a)), closes [#840](https://github.com/googleapis/genai-toolbox/issues/840)
|
||||
|
||||
## [0.9.0](https://github.com/googleapis/genai-toolbox/compare/v0.8.0...v0.9.0) (2025-07-11)
|
||||
|
||||
|
||||
|
||||
@@ -81,7 +81,7 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s
|
||||
#### 2. Implement the New Tool
|
||||
|
||||
We recommend looking at an [example tool
|
||||
implementation](https://github.com/googleapis/genai-toolbox/tree/main/internal/tools/postgressql).
|
||||
implementation](https://github.com/googleapis/genai-toolbox/tree/main/internal/tools/postgres/postgressql).
|
||||
|
||||
* **Create a new directory** under `internal/tools` for your tool type (e.g.,
|
||||
`internal/tools/newdb` or `internal/tools/newdb<tool_name>`).
|
||||
@@ -134,7 +134,7 @@ tools.
|
||||
5. (Optional) [RunToolInvokeWithTemplateParameters][temp-param]: tests for [template
|
||||
parameters][temp-param-doc]. Only run this test if template
|
||||
parameters apply to your tool.
|
||||
|
||||
|
||||
* **Add the new database to the test config** in
|
||||
[integration.cloudbuild.yaml](.ci/integration.cloudbuild.yaml).
|
||||
|
||||
|
||||
35
README.md
35
README.md
@@ -114,7 +114,7 @@ To install Toolbox as a binary:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.9.0
|
||||
export VERSION=0.10.0
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -127,12 +127,23 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.9.0
|
||||
export VERSION=0.10.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Homebrew</summary>
|
||||
|
||||
To install Toolbox using Homebrew on macOS or Linux:
|
||||
|
||||
```sh
|
||||
brew install mcp-toolbox
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Compile from source</summary>
|
||||
|
||||
@@ -140,7 +151,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.9.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.10.0
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -154,8 +165,18 @@ execute `toolbox` to start the server:
|
||||
```sh
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
> [!NOTE]
|
||||
> Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
> Toolbox enables dynamic reloading by default. To disable, use the
|
||||
> `--disable-reload` flag.
|
||||
|
||||
#### Homebrew Users
|
||||
|
||||
If you installed Toolbox using Homebrew, the `toolbox` binary is available in your system path. You can start the server with the same command:
|
||||
|
||||
```sh
|
||||
toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
You can use `toolbox help` for a full list of flags! To stop the server, send a
|
||||
terminate signal (`ctrl+c` on most platforms).
|
||||
@@ -509,9 +530,9 @@ For more detailed instructions on using the Toolbox Core SDK, see the
|
||||
// Convert the tool using the tbgenkit package
|
||||
// Use this tool with Genkit Go
|
||||
genkitTool, err := tbgenkit.ToGenkitTool(tool, g)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to convert tool: %v\n", err)
|
||||
}
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to convert tool: %v\n", err)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
20
cmd/BUILD
Normal file
20
cmd/BUILD
Normal file
@@ -0,0 +1,20 @@
|
||||
load("//tools/build_defs/go:go_library.bzl", "go_library")
|
||||
load("//tools/build_defs/go:go_test.bzl", "go_test")
|
||||
|
||||
go_library(
|
||||
name = "cmd",
|
||||
srcs = [
|
||||
"options.go",
|
||||
"root.go",
|
||||
],
|
||||
embedsrcs = ["version.txt"],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "cmd_test",
|
||||
srcs = [
|
||||
"options_test.go",
|
||||
"root_test.go",
|
||||
],
|
||||
library = ":cmd",
|
||||
)
|
||||
18
cmd/root.go
18
cmd/root.go
@@ -51,7 +51,9 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigtable"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/couchbase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchentries"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dgraph"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/duckdbsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoredeletedocuments"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetdocuments"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetrules"
|
||||
@@ -68,13 +70,24 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetparameters"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquery"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquerysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerqueryurl"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrunlook"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbaggregate"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeletemany"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeleteone"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfind"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfindone"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertmany"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertone"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdatemany"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdateone"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jcypher"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jexecutecypher"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/redis"
|
||||
@@ -94,10 +107,13 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/couchbase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/dataplex"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/dgraph"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/duckdb"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/firestore"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/http"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/looker"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mongodb"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mssql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/neo4j"
|
||||
@@ -203,7 +219,7 @@ func NewCommand(opts ...Option) *Command {
|
||||
flags.BoolVar(&cmd.cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.")
|
||||
flags.StringVar(&cmd.cfg.TelemetryOTLP, "telemetry-otlp", "", "Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318')")
|
||||
flags.StringVar(&cmd.cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
|
||||
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", "Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. Allowed: 'alloydb-postgres-admin', alloydb-postgres', 'bigquery', 'cloud-sql-mysql', 'cloud-sql-postgres', 'cloud-sql-mssql', 'firestore', 'postgres', 'spanner', 'spanner-postgres'.")
|
||||
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", "Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. Allowed: 'alloydb-postgres-admin', alloydb-postgres', 'bigquery', 'cloud-sql-mysql', 'cloud-sql-postgres', 'cloud-sql-mssql', 'dataplex', 'firestore', 'looker', 'mssql', 'mysql', 'postgres', 'spanner', 'spanner-postgres'.")
|
||||
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
|
||||
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
|
||||
|
||||
|
||||
@@ -1167,7 +1167,10 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
cloudsqlpg_config, _ := prebuiltconfigs.Get("cloud-sql-postgres")
|
||||
cloudsqlmysql_config, _ := prebuiltconfigs.Get("cloud-sql-mysql")
|
||||
cloudsqlmssql_config, _ := prebuiltconfigs.Get("cloud-sql-mssql")
|
||||
dataplex_config, _ := prebuiltconfigs.Get("dataplex")
|
||||
firestoreconfig, _ := prebuiltconfigs.Get("firestore")
|
||||
mysql_config, _ := prebuiltconfigs.Get("mysql")
|
||||
mssql_config, _ := prebuiltconfigs.Get("mssql")
|
||||
looker_config, _ := prebuiltconfigs.Get("looker")
|
||||
postgresconfig, _ := prebuiltconfigs.Get("postgres")
|
||||
spanner_config, _ := prebuiltconfigs.Get("spanner")
|
||||
@@ -1241,6 +1244,16 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "dataplex prebuilt tools",
|
||||
in: dataplex_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"dataplex-tools": tools.ToolsetConfig{
|
||||
Name: "dataplex-tools",
|
||||
ToolNames: []string{"dataplex_search_entries"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "firestore prebuilt tools",
|
||||
in: firestoreconfig,
|
||||
@@ -1251,13 +1264,33 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "mysql prebuilt tools",
|
||||
in: mysql_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"mysql-database-tools": tools.ToolsetConfig{
|
||||
Name: "mysql-database-tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "mssql prebuilt tools",
|
||||
in: mssql_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"mssql-database-tools": tools.ToolsetConfig{
|
||||
Name: "mssql-database-tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "looker prebuilt tools",
|
||||
in: looker_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"looker-tools": tools.ToolsetConfig{
|
||||
Name: "looker-tools",
|
||||
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "get_looks", "run_look"},
|
||||
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look"},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -1 +1 @@
|
||||
0.9.0
|
||||
0.10.0
|
||||
|
||||
@@ -4,12 +4,12 @@ type: docs
|
||||
notoc: false
|
||||
weight: 1
|
||||
description: >
|
||||
All of Toolbox's documentation.
|
||||
All of Toolbox's documentation.
|
||||
---
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="getting-started/introduction/"/>
|
||||
<meta http-equiv="refresh" content="0;url=getting-started/introduction"/>
|
||||
<meta http-equiv="refresh" content="0;url=getting-started/introduction/"/>
|
||||
</head>
|
||||
</html>
|
||||
|
||||
@@ -3,7 +3,7 @@ title: "Telemetry"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
An overview of telemetry and observability in Toolbox.
|
||||
An overview of telemetry and observability in Toolbox.
|
||||
---
|
||||
|
||||
## About
|
||||
@@ -158,7 +158,7 @@ enabled:
|
||||
|
||||
- [Cloud Logging API](https://cloud.google.com/logging/docs/api/enable-api)
|
||||
- [Cloud Monitoring API](https://cloud.google.com/monitoring/api/enable-api)
|
||||
- [Cloud Trace API](https://cloud.google.com/apis/enableflow?apiid=cloudtrace.googleapis.com)
|
||||
- [Cloud Trace API](https://console.cloud.google.com/apis/enableflow?apiid=cloudtrace.googleapis.com)
|
||||
{{< /notice >}}
|
||||
|
||||
#### OTLP Exporter
|
||||
@@ -177,7 +177,7 @@ It receives telemetry data, transforms it, and then exports data to backends
|
||||
that can store it permanently. Toolbox provide an option to export telemetry
|
||||
data to user's choice of backend(s) that are compatible with the Open Telemetry
|
||||
Protocol (OTLP). If you would like to use a collector, please refer to this
|
||||
[Export Telemetry using the Otel Collector](../how-to/export_telemetry.md).
|
||||
[Export Telemetry using the Otel Collector](../../how-to/export_telemetry.md).
|
||||
|
||||
### Flags
|
||||
|
||||
|
||||
@@ -234,7 +234,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.9.0\" # x-release-please-version\n",
|
||||
"version = \"0.10.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -86,7 +86,7 @@ To install Toolbox as a binary:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.9.0
|
||||
export VERSION=0.10.0
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -97,10 +97,17 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.9.0
|
||||
export VERSION=0.10.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Homebrew" lang="en" %}}
|
||||
To install Toolbox using Homebrew on macOS or Linux:
|
||||
|
||||
```sh
|
||||
brew install mcp-toolbox
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Compile from source" lang="en" %}}
|
||||
|
||||
@@ -108,7 +115,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.9.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.10.0
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
@@ -123,15 +130,25 @@ execute `toolbox` to start the server:
|
||||
```sh
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
Toolbox enables dynamic reloading by default. To disable, use the
|
||||
`--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
|
||||
#### Homebrew Users
|
||||
|
||||
If you installed Toolbox using Homebrew, the `toolbox` binary is available in your system path. You can start the server with the same command:
|
||||
|
||||
```sh
|
||||
toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
You can use `toolbox help` for a full list of flags! To stop the server, send a
|
||||
terminate signal (`ctrl+c` on most platforms).
|
||||
|
||||
For more detailed documentation on deploying to different environments, check
|
||||
out the resources in the [How-to section](../../how-to/_index.md)
|
||||
out the resources in the [How-to section](../../how-to/)
|
||||
|
||||
### Integrating your application
|
||||
|
||||
@@ -139,6 +156,7 @@ Once your server is up and running, you can load the tools into your
|
||||
application. See below the list of Client SDKs for using various frameworks:
|
||||
|
||||
#### Python
|
||||
|
||||
{{< tabpane text=true persist=header >}}
|
||||
{{% tab header="Core" lang="en" %}}
|
||||
|
||||
@@ -151,7 +169,7 @@ from toolbox_core import ToolboxClient
|
||||
|
||||
# update the url to point to your server
|
||||
|
||||
async with ToolboxClient("<http://127.0.0.1:5000>") as client:
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
|
||||
# these tools can be passed to your application!
|
||||
tools = await client.load_toolset("toolset_name")
|
||||
@@ -172,7 +190,7 @@ from toolbox_langchain import ToolboxClient
|
||||
|
||||
# update the url to point to your server
|
||||
|
||||
async with ToolboxClient("<http://127.0.0.1:5000>") as client:
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
|
||||
# these tools can be passed to your application!
|
||||
tools = client.load_toolset()
|
||||
@@ -193,7 +211,7 @@ from toolbox_llamaindex import ToolboxClient
|
||||
|
||||
# update the url to point to your server
|
||||
|
||||
async with ToolboxClient("<http://127.0.0.1:5000>") as client:
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
|
||||
# these tools can be passed to your application
|
||||
|
||||
@@ -303,7 +321,7 @@ const toolboxTools = await client.loadToolset('toolsetName');
|
||||
const getTool = (toolboxTool) => tool({
|
||||
name: toolboxTool.getName(),
|
||||
description: toolboxTool.getDescription(),
|
||||
parameters: toolboxTool.getParams(),
|
||||
parameters: toolboxTool.getParamSchema(),
|
||||
execute: toolboxTool
|
||||
});;
|
||||
|
||||
@@ -565,4 +583,6 @@ func main() {
|
||||
For more detailed instructions on using the Toolbox Go SDK, see the
|
||||
[project's README](https://github.com/googleapis/mcp-toolbox-sdk-go/blob/main/core/README.md).
|
||||
|
||||
For end-to-end samples on using the Toolbox Go SDK with orchestration frameworks, see the [project's samples](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main/core/samples)
|
||||
For end-to-end samples on using the Toolbox Go SDK with orchestration
|
||||
frameworks, see the [project's
|
||||
samples](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main/core/samples)
|
||||
|
||||
@@ -19,7 +19,9 @@ This guide assumes you have already done the following:
|
||||
|
||||
### Cloud Setup (Optional)
|
||||
|
||||
If you plan to use **Google Cloud’s Vertex AI** with your agent (e.g., using `vertexai=True` or a Google GenAI model), follow these one-time setup steps for local development:
|
||||
If you plan to use **Google Cloud’s Vertex AI** with your agent (e.g., using
|
||||
`vertexai=True` or a Google GenAI model), follow these one-time setup steps for
|
||||
local development:
|
||||
|
||||
1. [Install the Google Cloud CLI](https://cloud.google.com/sdk/docs/install)
|
||||
1. [Set up Application Default Credentials (ADC)](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment)
|
||||
@@ -154,7 +156,6 @@ postgres` and a password next time.
|
||||
\q
|
||||
```
|
||||
|
||||
|
||||
## Step 2: Install and configure Toolbox
|
||||
|
||||
In this section, we will download Toolbox, configure our tools in a
|
||||
@@ -170,7 +171,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.9.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -271,8 +272,10 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
```bash
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
Toolbox enables dynamic reloading by default. To disable, use the
|
||||
`--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
|
||||
## Step 3: Connect your agent to Toolbox
|
||||
|
||||
@@ -15,7 +15,8 @@ This guide assumes you have already done the following:
|
||||
|
||||
### Cloud Setup (Optional)
|
||||
|
||||
If you plan to use **Google Cloud’s Vertex AI** with your agent (e.g., using Gemini or PaLM models), follow these one-time setup steps:
|
||||
If you plan to use **Google Cloud’s Vertex AI** with your agent (e.g., using
|
||||
Gemini or PaLM models), follow these one-time setup steps:
|
||||
|
||||
1. [Install the Google Cloud CLI]
|
||||
1. [Set up Application Default Credentials (ADC)]
|
||||
@@ -29,8 +30,8 @@ If you plan to use **Google Cloud’s Vertex AI** with your agent (e.g., using G
|
||||
[Go (v1.24.2 or higher)]: https://go.dev/doc/install
|
||||
[install-postgres]: https://www.postgresql.org/download/
|
||||
[Install the Google Cloud CLI]: https://cloud.google.com/sdk/docs/install
|
||||
[Set up Application Default Credentials (ADC)]: https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment
|
||||
|
||||
[Set up Application Default Credentials (ADC)]:
|
||||
https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment
|
||||
|
||||
## Step 1: Set up your database
|
||||
|
||||
@@ -166,7 +167,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.9.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -267,8 +268,10 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
```bash
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
Toolbox enables dynamic reloading by default. To disable, use the
|
||||
`--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
|
||||
## Step 3: Connect your agent to Toolbox
|
||||
@@ -282,13 +285,15 @@ from Toolbox.
|
||||
go mod init main
|
||||
```
|
||||
|
||||
1. In a new terminal, install the [SDK](https://pkg.go.dev/github.com/googleapis/mcp-toolbox-sdk-go).
|
||||
1. In a new terminal, install the
|
||||
[SDK](https://pkg.go.dev/github.com/googleapis/mcp-toolbox-sdk-go).
|
||||
|
||||
```bash
|
||||
go get github.com/googleapis/mcp-toolbox-sdk-go
|
||||
```
|
||||
|
||||
1. Create a new file named `hotelagent.go` and copy the following code to create an agent:
|
||||
1. Create a new file named `hotelagent.go` and copy the following code to create
|
||||
an agent:
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="LangChain Go" lang="go" >}}
|
||||
@@ -917,5 +922,6 @@ func main() {
|
||||
```
|
||||
|
||||
{{< notice info >}}
|
||||
For more information, visit the [Go SDK repo](https://github.com/googleapis/mcp-toolbox-sdk-go).
|
||||
{{</ notice >}}
|
||||
For more information, visit the [Go SDK
|
||||
repo](https://github.com/googleapis/mcp-toolbox-sdk-go).
|
||||
{{</ notice >}}
|
||||
|
||||
@@ -15,7 +15,8 @@ This guide assumes you have already done the following:
|
||||
|
||||
### Cloud Setup (Optional)
|
||||
|
||||
If you plan to use **Google Cloud’s Vertex AI** with your agent (e.g., using Gemini or PaLM models), follow these one-time setup steps:
|
||||
If you plan to use **Google Cloud’s Vertex AI** with your agent (e.g., using
|
||||
Gemini or PaLM models), follow these one-time setup steps:
|
||||
|
||||
1. [Install the Google Cloud CLI]
|
||||
1. [Set up Application Default Credentials (ADC)]
|
||||
@@ -29,8 +30,8 @@ If you plan to use **Google Cloud’s Vertex AI** with your agent (e.g., using G
|
||||
[Node.js (v18 or higher)]: https://nodejs.org/
|
||||
[install-postgres]: https://www.postgresql.org/download/
|
||||
[Install the Google Cloud CLI]: https://cloud.google.com/sdk/docs/install
|
||||
[Set up Application Default Credentials (ADC)]: https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment
|
||||
|
||||
[Set up Application Default Credentials (ADC)]:
|
||||
https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment
|
||||
|
||||
## Step 1: Set up your database
|
||||
|
||||
@@ -166,7 +167,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.9.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -267,6 +268,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
```bash
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
@@ -338,7 +340,6 @@ async function runApplication() {
|
||||
model: "gemini-2.0-flash",
|
||||
});
|
||||
|
||||
|
||||
const client = new ToolboxClient("http://127.0.0.1:5000");
|
||||
const toolboxTools = await client.loadToolset("my-toolset");
|
||||
|
||||
@@ -363,7 +364,6 @@ async function runApplication() {
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
for (const query of queries) {
|
||||
const agentOutput = await agent.invoke(
|
||||
{
|
||||
@@ -575,4 +575,4 @@ main();
|
||||
|
||||
{{< notice info >}}
|
||||
For more information, visit the [JS SDK repo](https://github.com/googleapis/mcp-toolbox-sdk-js).
|
||||
{{</ notice >}}
|
||||
{{</ notice >}}
|
||||
|
||||
@@ -71,7 +71,7 @@ access by our agent, and create a database user for Toolbox to connect with.
|
||||
|
||||
```sql
|
||||
INSERT INTO hotels(id, name, location, price_tier, checkin_date, checkout_date, booked)
|
||||
VALUES
|
||||
VALUES
|
||||
(1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-22', '2024-04-20', B'0'),
|
||||
(2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', B'0'),
|
||||
(3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', B'0'),
|
||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.9.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -200,7 +200,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
```
|
||||
|
||||
For more info on tools, check out the
|
||||
[Tools](../../resources/tools/_index.md) section.
|
||||
[Tools](../../resources/tools/) section.
|
||||
|
||||
1. Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
|
||||
|
||||
@@ -218,7 +218,8 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
|
||||
1. Type `y` when it asks to install the inspector package.
|
||||
|
||||
1. It should show the following when the MCP Inspector is up and running (please take note of `<YOUR_SESSION_TOKEN>`):
|
||||
1. It should show the following when the MCP Inspector is up and running (please
|
||||
take note of `<YOUR_SESSION_TOKEN>`):
|
||||
|
||||
```bash
|
||||
Starting MCP inspector...
|
||||
@@ -236,7 +237,8 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
|
||||
1. For `URL`, type in `http://127.0.0.1:5000/mcp`.
|
||||
|
||||
1. For `Configuration` -> `Proxy Session Token`, make sure `<YOUR_SESSION_TOKEN>` is present.
|
||||
1. For `Configuration` -> `Proxy Session Token`, make sure
|
||||
`<YOUR_SESSION_TOKEN>` is present.
|
||||
|
||||
1. Click Connect.
|
||||
|
||||
@@ -246,4 +248,4 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
|
||||

|
||||
|
||||
1. Test out your tools here!
|
||||
1. Test out your tools here!
|
||||
|
||||
342
docs/en/how-to/connect-ide/alloydb_pg_admin_mcp.md
Normal file
342
docs/en/how-to/connect-ide/alloydb_pg_admin_mcp.md
Normal file
@@ -0,0 +1,342 @@
|
||||
---
|
||||
title: "AlloyDB Admin API using MCP"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
Create your AlloyDB database with MCP Toolbox.
|
||||
---
|
||||
|
||||
This guide covers how to use [MCP Toolbox for Databases][toolbox] to create
|
||||
AlloyDB clusters and instances from IDE enabling their E2E journey.
|
||||
|
||||
- [Cursor][cursor]
|
||||
- [Windsurf][windsurf] (Codium)
|
||||
- [Visual Studio Code][vscode] (Copilot)
|
||||
- [Cline][cline] (VS Code extension)
|
||||
- [Claude desktop][claudedesktop]
|
||||
- [Claude code][claudecode]
|
||||
- [Gemini CLI][geminicli]
|
||||
- [Gemini Code Assist][geminicodeassist]
|
||||
|
||||
[toolbox]: https://github.com/googleapis/genai-toolbox
|
||||
[cursor]: #configure-your-mcp-client
|
||||
[windsurf]: #configure-your-mcp-client
|
||||
[vscode]: #configure-your-mcp-client
|
||||
[cline]: #configure-your-mcp-client
|
||||
[claudedesktop]: #configure-your-mcp-client
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
[geminicli]: #configure-your-mcp-client
|
||||
[geminicodeassist]: #configure-your-mcp-client
|
||||
|
||||
## Before you begin
|
||||
|
||||
1. In the Google Cloud console, on the [project selector
|
||||
page](https://console.cloud.google.com/projectselector2/home/dashboard),
|
||||
select or create a Google Cloud project.
|
||||
|
||||
1. [Make sure that billing is enabled for your Google Cloud
|
||||
project](https://cloud.google.com/billing/docs/how-to/verify-billing-enabled#confirm_billing_is_enabled_on_a_project).
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct
|
||||
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
|
||||
to your OS and CPU architecture. You are required to use Toolbox version
|
||||
V0.10.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Verify the installation:
|
||||
|
||||
```bash
|
||||
./toolbox --version
|
||||
```
|
||||
|
||||
## Configure your MCP Client
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Claude code" lang="en" %}}
|
||||
|
||||
1. Install [Claude
|
||||
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
1. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
1. Generate Access token to be used as API_KEY using `gcloud auth
|
||||
print-access-token`.
|
||||
|
||||
> **Note:** The lifetime of token is 1 hour.
|
||||
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb-admin": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "alloydb-postgres-admin", "--stdio"],
|
||||
"env": {
|
||||
"API_KEY": "your-api-key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude code to apply the new configuration.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Claude desktop" lang="en" %}}
|
||||
|
||||
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
|
||||
1. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
1. Generate Access token to be used as API_KEY using `gcloud auth
|
||||
print-access-token`.
|
||||
|
||||
> **Note:** The lifetime of token is 1 hour.
|
||||
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb-admin": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "alloydb-postgres-admin", "--stdio"],
|
||||
"env": {
|
||||
"API_KEY": "your-api-key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude desktop.
|
||||
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
|
||||
new MCP server available.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Cline" lang="en" %}}
|
||||
|
||||
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap
|
||||
the **MCP Servers** icon.
|
||||
1. Tap Configure MCP Servers to open the configuration file.
|
||||
1. Generate Access token to be used as API_KEY using `gcloud auth
|
||||
print-access-token`.
|
||||
|
||||
> **Note:** The lifetime of token is 1 hour.
|
||||
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb-admin": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "alloydb-postgres-admin", "--stdio"],
|
||||
"env": {
|
||||
"API_KEY": "your-api-key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. You should see a green active status after the server is successfully
|
||||
connected.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Cursor" lang="en" %}}
|
||||
|
||||
1. Create a `.cursor` directory in your project root if it doesn't exist.
|
||||
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
1. Generate Access token to be used as API_KEY using `gcloud auth
|
||||
print-access-token`.
|
||||
|
||||
> **Note:** The lifetime of token is 1 hour.
|
||||
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb-admin": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "alloydb-postgres-admin", "--stdio"],
|
||||
"env": {
|
||||
"API_KEY": "your-api-key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
|
||||
Settings > MCP**. You should see a green active status after the server is
|
||||
successfully connected.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
|
||||
|
||||
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
|
||||
create a `.vscode` directory in your project root if it doesn't exist.
|
||||
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
|
||||
1. Generate Access token to be used as API_KEY using `gcloud auth
|
||||
print-access-token`.
|
||||
|
||||
> **Note:** The lifetime of token is 1 hour.
|
||||
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb-admin": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "alloydb-postgres-admin", "--stdio"],
|
||||
"env": {
|
||||
"API_KEY": "your-api-key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
|
||||
Cascade assistant.
|
||||
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
|
||||
1. Generate Access token to be used as API_KEY using `gcloud auth
|
||||
print-access-token`.
|
||||
|
||||
> **Note:** The lifetime of token is 1 hour.
|
||||
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb-admin": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "alloydb-postgres-admin", "--stdio"],
|
||||
"env": {
|
||||
"API_KEY": "your-api-key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini CLI" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini
|
||||
CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create
|
||||
a `settings.json` file.
|
||||
1. Generate Access token to be used as API_KEY using `gcloud auth print-access-token`.
|
||||
|
||||
> **Note:** The lifetime of token is 1 hour.
|
||||
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb-admin": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "alloydb-postgres-admin", "--stdio"],
|
||||
"env": {
|
||||
"API_KEY": "your-api-key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini Code
|
||||
Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist)
|
||||
extension in Visual Studio Code.
|
||||
1. Enable Agent Mode in Gemini Code Assist chat.
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create
|
||||
a `settings.json` file.
|
||||
1. Generate Access token to be used as API_KEY using `gcloud auth print-access-token`.
|
||||
|
||||
> **Note:** The lifetime of token is 1 hour.
|
||||
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"alloydb-admin": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt", "alloydb-postgres-admin", "--stdio"],
|
||||
"env": {
|
||||
"API_KEY": "your-api-key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Use Tools
|
||||
|
||||
Your AI tool is now connected to AlloyDB using MCP. Try asking your AI assistant
|
||||
to create a database, cluster or instance.
|
||||
|
||||
The following tools are available to the LLM:
|
||||
|
||||
1. **alloydb-create-cluster**: creates alloydb cluster
|
||||
1. **alloydb-create-instance**: creates alloydb instance (PRIMARY, READ_POOL or SECONDARY)
|
||||
1. **alloydb-get-operation**: polls on operations API until the operation is done.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
will adapt to the tools available, so this shouldn't affect most users.
|
||||
{{< /notice >}}
|
||||
|
||||
## Connect to your Data
|
||||
|
||||
After setting up an AlloyDB cluster and instance, you can [connect your IDE to
|
||||
the
|
||||
database](https://cloud.google.com/alloydb/docs/pre-built-tools-with-mcp-toolbox).
|
||||
333
docs/en/how-to/connect-ide/firestore_mcp.md
Normal file
333
docs/en/how-to/connect-ide/firestore_mcp.md
Normal file
@@ -0,0 +1,333 @@
|
||||
---
|
||||
title: "Firestore using MCP"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
Connect your IDE to Firestore using Toolbox.
|
||||
---
|
||||
|
||||
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is
|
||||
an open protocol for connecting Large Language Models (LLMs) to data sources
|
||||
like Firestore. This guide covers how to use [MCP Toolbox for Databases][toolbox]
|
||||
to expose your developer assistant tools to a Firestore instance:
|
||||
|
||||
* [Cursor][cursor]
|
||||
* [Windsurf][windsurf] (Codium)
|
||||
* [Visual Studio Code][vscode] (Copilot)
|
||||
* [Cline][cline] (VS Code extension)
|
||||
* [Claude desktop][claudedesktop]
|
||||
* [Claude code][claudecode]
|
||||
* [Gemini CLI][geminicli]
|
||||
* [Gemini Code Assist][geminicodeassist]
|
||||
|
||||
[toolbox]: https://github.com/googleapis/genai-toolbox
|
||||
[cursor]: #configure-your-mcp-client
|
||||
[windsurf]: #configure-your-mcp-client
|
||||
[vscode]: #configure-your-mcp-client
|
||||
[cline]: #configure-your-mcp-client
|
||||
[claudedesktop]: #configure-your-mcp-client
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
[geminicli]: #configure-your-mcp-client
|
||||
[geminicodeassist]: #configure-your-mcp-client
|
||||
|
||||
## Set up Firestore
|
||||
|
||||
1. Create or select a Google Cloud project.
|
||||
|
||||
* [Create a new
|
||||
project](https://cloud.google.com/resource-manager/docs/creating-managing-projects)
|
||||
* [Select an existing
|
||||
project](https://cloud.google.com/resource-manager/docs/creating-managing-projects#identifying_projects)
|
||||
|
||||
1. [Enable the Firestore
|
||||
API](https://console.cloud.google.com/apis/library/firestore.googleapis.com)
|
||||
for your project.
|
||||
|
||||
1. [Create a Firestore
|
||||
database](https://cloud.google.com/firestore/docs/create-database-web-mobile-client-library)
|
||||
if you haven't already.
|
||||
|
||||
1. Set up authentication for your local environment.
|
||||
|
||||
* [Install gcloud CLI](https://cloud.google.com/sdk/docs/install)
|
||||
* Run `gcloud auth application-default login` to authenticate
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct
|
||||
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
|
||||
to your OS and CPU architecture. You are required to use Toolbox version
|
||||
V0.10.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/windows/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Verify the installation:
|
||||
|
||||
```bash
|
||||
./toolbox --version
|
||||
```
|
||||
|
||||
## Configure your MCP Client
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Claude code" lang="en" %}}
|
||||
|
||||
1. Install [Claude
|
||||
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
1. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"firestore": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","firestore","--stdio"],
|
||||
"env": {
|
||||
"FIRESTORE_PROJECT": "your-project-id",
|
||||
"FIRESTORE_DATABASE": "(default)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude code to apply the new configuration.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Claude desktop" lang="en" %}}
|
||||
|
||||
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
|
||||
1. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"firestore": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","firestore","--stdio"],
|
||||
"env": {
|
||||
"FIRESTORE_PROJECT": "your-project-id",
|
||||
"FIRESTORE_DATABASE": "(default)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude desktop.
|
||||
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
|
||||
new MCP server available.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Cline" lang="en" %}}
|
||||
|
||||
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap
|
||||
the **MCP Servers** icon.
|
||||
1. Tap Configure MCP Servers to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"firestore": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","firestore","--stdio"],
|
||||
"env": {
|
||||
"FIRESTORE_PROJECT": "your-project-id",
|
||||
"FIRESTORE_DATABASE": "(default)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. You should see a green active status after the server is successfully
|
||||
connected.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Cursor" lang="en" %}}
|
||||
|
||||
1. Create a `.cursor` directory in your project root if it doesn't exist.
|
||||
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"firestore": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","firestore","--stdio"],
|
||||
"env": {
|
||||
"FIRESTORE_PROJECT": "your-project-id",
|
||||
"FIRESTORE_DATABASE": "(default)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
|
||||
Settings > MCP**. You should see a green active status after the server is
|
||||
successfully connected.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
|
||||
|
||||
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
|
||||
create a `.vscode` directory in your project root if it doesn't exist.
|
||||
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"firestore": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","firestore","--stdio"],
|
||||
"env": {
|
||||
"FIRESTORE_PROJECT": "your-project-id",
|
||||
"FIRESTORE_DATABASE": "(default)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
|
||||
Cascade assistant.
|
||||
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"firestore": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","firestore","--stdio"],
|
||||
"env": {
|
||||
"FIRESTORE_PROJECT": "your-project-id",
|
||||
"FIRESTORE_DATABASE": "(default)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini CLI" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini
|
||||
CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create
|
||||
a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"firestore": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","firestore","--stdio"],
|
||||
"env": {
|
||||
"FIRESTORE_PROJECT": "your-project-id",
|
||||
"FIRESTORE_DATABASE": "(default)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini Code
|
||||
Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist)
|
||||
extension in Visual Studio Code.
|
||||
1. Enable Agent Mode in Gemini Code Assist chat.
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create
|
||||
a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"firestore": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","firestore","--stdio"],
|
||||
"env": {
|
||||
"FIRESTORE_PROJECT": "your-project-id",
|
||||
"FIRESTORE_DATABASE": "(default)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Use Tools
|
||||
|
||||
Your AI tool is now connected to Firestore using MCP. Try asking your AI
|
||||
assistant to list collections, get documents, query collections, or manage
|
||||
security rules.
|
||||
|
||||
The following tools are available to the LLM:
|
||||
|
||||
1. **firestore-get-documents**: Gets multiple documents from Firestore by their
|
||||
paths
|
||||
1. **firestore-list-collections**: List Firestore collections for a given parent
|
||||
path
|
||||
1. **firestore-delete-documents**: Delete multiple documents from Firestore
|
||||
1. **firestore-query-collection**: Query documents from a collection with
|
||||
filtering, ordering, and limit options
|
||||
1. **firestore-get-rules**: Retrieves the active Firestore security rules for
|
||||
the current project
|
||||
1. **firestore-validate-rules**: Validates Firestore security rules syntax and
|
||||
errors
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
will adapt to the tools available, so this shouldn't affect most users.
|
||||
{{< /notice >}}
|
||||
@@ -44,21 +44,21 @@ to expose your developer assistant tools to a Looker instance:
|
||||
v0.10.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.10.0/linux/amd64/toolbox>
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/arm64/toolbox>
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/amd64/toolbox>
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.10.0/windows/amd64/toolbox.exe>
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
@@ -90,12 +90,8 @@ curl -O <https://storage.googleapis.com/genai-toolbox/v0.10.0/windows/amd64/tool
|
||||
{
|
||||
"mcpServers": {
|
||||
"looker-toolbox": {
|
||||
"command": "/PATH/TO/toolbox",
|
||||
"args": [
|
||||
"--stdio",
|
||||
"--prebuilt",
|
||||
"looker"
|
||||
],
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--stdio", "--prebuilt", "looker"],
|
||||
"env": {
|
||||
"LOOKER_BASE_URL": "https://looker.example.com",
|
||||
"LOOKER_CLIENT_ID": "",
|
||||
@@ -121,12 +117,8 @@ curl -O <https://storage.googleapis.com/genai-toolbox/v0.10.0/windows/amd64/tool
|
||||
{
|
||||
"mcpServers": {
|
||||
"looker-toolbox": {
|
||||
"command": "/PATH/TO/toolbox",
|
||||
"args": [
|
||||
"--stdio",
|
||||
"--prebuilt",
|
||||
"looker"
|
||||
],
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--stdio", "--prebuilt", "looker"],
|
||||
"env": {
|
||||
"LOOKER_BASE_URL": "https://looker.example.com",
|
||||
"LOOKER_CLIENT_ID": "",
|
||||
@@ -155,12 +147,8 @@ curl -O <https://storage.googleapis.com/genai-toolbox/v0.10.0/windows/amd64/tool
|
||||
{
|
||||
"mcpServers": {
|
||||
"looker-toolbox": {
|
||||
"command": "/PATH/TO/toolbox",
|
||||
"args": [
|
||||
"--stdio",
|
||||
"--prebuilt",
|
||||
"looker"
|
||||
],
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--stdio", "--prebuilt", "looker"],
|
||||
"env": {
|
||||
"LOOKER_BASE_URL": "https://looker.example.com",
|
||||
"LOOKER_CLIENT_ID": "",
|
||||
@@ -187,12 +175,8 @@ curl -O <https://storage.googleapis.com/genai-toolbox/v0.10.0/windows/amd64/tool
|
||||
{
|
||||
"mcpServers": {
|
||||
"looker-toolbox": {
|
||||
"command": "/PATH/TO/toolbox",
|
||||
"args": [
|
||||
"--stdio",
|
||||
"--prebuilt",
|
||||
"looker"
|
||||
],
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--stdio", "--prebuilt", "looker"],
|
||||
"env": {
|
||||
"LOOKER_BASE_URL": "https://looker.example.com",
|
||||
"LOOKER_CLIENT_ID": "",
|
||||
@@ -221,12 +205,8 @@ curl -O <https://storage.googleapis.com/genai-toolbox/v0.10.0/windows/amd64/tool
|
||||
{
|
||||
"mcpServers": {
|
||||
"looker-toolbox": {
|
||||
"command": "/PATH/TO/toolbox",
|
||||
"args": [
|
||||
"--stdio",
|
||||
"--prebuilt",
|
||||
"looker"
|
||||
],
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--stdio", "--prebuilt", "looker"],
|
||||
"env": {
|
||||
"LOOKER_BASE_URL": "https://looker.example.com",
|
||||
"LOOKER_CLIENT_ID": "",
|
||||
@@ -252,12 +232,8 @@ curl -O <https://storage.googleapis.com/genai-toolbox/v0.10.0/windows/amd64/tool
|
||||
{
|
||||
"mcpServers": {
|
||||
"looker-toolbox": {
|
||||
"command": "/PATH/TO/toolbox",
|
||||
"args": [
|
||||
"--stdio",
|
||||
"--prebuilt",
|
||||
"looker"
|
||||
],
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--stdio", "--prebuilt", "looker"],
|
||||
"env": {
|
||||
"LOOKER_BASE_URL": "https://looker.example.com",
|
||||
"LOOKER_CLIENT_ID": "",
|
||||
@@ -289,6 +265,7 @@ The following tools are available to the LLM:
|
||||
1. **get_parameters**: list the parameters in a given explore
|
||||
1. **query**: Run a query
|
||||
1. **query_sql**: Return the SQL generated by Looker for a query
|
||||
1. **query_url**: Return a link to the query in Looker for further exploration
|
||||
1. **get_looks**: Return the saved Looks that match a title or description
|
||||
1. **run_look**: Run a saved Look and return the data
|
||||
|
||||
|
||||
@@ -52,19 +52,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.9.0/linux/amd64/toolbox>
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.9.0/darwin/arm64/toolbox>
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.9.0/darwin/amd64/toolbox>
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.9.0/windows/amd64/toolbox>
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -28,8 +28,9 @@ Toolbox currently supports the following versions of MCP specification:
|
||||
|
||||
The auth implementation in Toolbox is not supported in MCP's auth specification.
|
||||
This includes:
|
||||
* [Authenticated Parameters](../resources/tools/_index.md#authenticated-parameters)
|
||||
* [Authorized Invocations](../resources/tools/_index.md#authorized-invocations)
|
||||
|
||||
* [Authenticated Parameters](../resources/tools/#authenticated-parameters)
|
||||
* [Authorized Invocations](../resources/tools/#authorized-invocations)
|
||||
|
||||
## Connecting to Toolbox with an MCP client
|
||||
|
||||
@@ -39,7 +40,7 @@ This includes:
|
||||
MCP is only compatible with Toolbox version 0.3.0 and above.
|
||||
{{< /notice >}}
|
||||
|
||||
1. [Install](../getting-started/introduction/_index.md#installing-the-server)
|
||||
1. [Install](../getting-started/introduction/#installing-the-server)
|
||||
Toolbox version 0.3.0+.
|
||||
|
||||
1. Make sure you've set up and initialized your database.
|
||||
@@ -62,7 +63,8 @@ remote HTTP server. Logs will be set to the `warn` level by default. `debug` and
|
||||
`info` logs are not supported with stdio.
|
||||
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
Toolbox enables dynamic reloading by default. To disable, use the
|
||||
`--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
|
||||
### Connecting via HTTP
|
||||
@@ -131,7 +133,7 @@ testing and debugging Toolbox server.
|
||||
You should be able to inspect your toolbox tools!
|
||||
{{% /tab %}}
|
||||
{{% tab header="HTTP with SSE (deprecated)" lang="en" %}}
|
||||
1. [Run Toolbox](../getting-started/introduction/_index.md#running-the-server).
|
||||
1. [Run Toolbox](../getting-started/introduction/#running-the-server).
|
||||
|
||||
1. In a separate terminal, run Inspector directly through `npx`:
|
||||
|
||||
@@ -148,7 +150,7 @@ testing and debugging Toolbox server.
|
||||
tools!
|
||||
{{% /tab %}}
|
||||
{{% tab header="Streamable HTTP" lang="en" %}}
|
||||
1. [Run Toolbox](../getting-started/introduction/_index.md#running-the-server).
|
||||
1. [Run Toolbox](../getting-started/introduction/#running-the-server).
|
||||
|
||||
1. In a separate terminal, run Inspector directly through `npx`:
|
||||
|
||||
|
||||
@@ -79,7 +79,7 @@ database are in the same VPC network.
|
||||
|
||||
Create a `tools.yaml` file that contains your configuration for Toolbox. For
|
||||
details, see the
|
||||
[configuration](https://googleapis.github.io/genai-toolbox/resources/sources/)
|
||||
[configuration](../resources/sources/)
|
||||
section.
|
||||
|
||||
## Deploy to Cloud Run
|
||||
@@ -125,7 +125,7 @@ section.
|
||||
--region us-central1 \
|
||||
--set-secrets "/app/tools.yaml=tools:latest" \
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080" \
|
||||
# TODO(dev): update the following to match your VPC if necessary
|
||||
# TODO(dev): update the following to match your VPC if necessary
|
||||
--network default \
|
||||
--subnet default
|
||||
# --allow-unauthenticated # https://cloud.google.com/run/docs/authenticating/public#gcloud
|
||||
|
||||
@@ -3,11 +3,11 @@ title: "AuthServices"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
AuthServices represent services that handle authentication and authorization.
|
||||
AuthServices represent services that handle authentication and authorization.
|
||||
---
|
||||
|
||||
AuthServices represent services that handle authentication and authorization. It
|
||||
can primarily be used by [Tools](../tools) in two different ways:
|
||||
can primarily be used by [Tools](../tools/) in two different ways:
|
||||
|
||||
- [**Authorized Invocation**][auth-invoke] is when a tool
|
||||
is validated by the auth service before the call can be invoked. Toolbox
|
||||
|
||||
@@ -3,7 +3,7 @@ title: "Google Sign-In"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Use Google Sign-In for Oauth 2.0 flow and token lifecycle.
|
||||
Use Google Sign-In for Oauth 2.0 flow and token lifecycle.
|
||||
---
|
||||
|
||||
## Getting Started
|
||||
|
||||
93
docs/en/resources/sources/dataplex.md
Normal file
93
docs/en/resources/sources/dataplex.md
Normal file
@@ -0,0 +1,93 @@
|
||||
---
|
||||
title: "Dataplex"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Dataplex Universal Catalog is a unified, intelligent governance solution for data and AI assets in Google Cloud. Dataplex Universal Catalog powers AI, analytics, and business intelligence at scale.
|
||||
---
|
||||
|
||||
# Dataplex Source
|
||||
|
||||
[Dataplex][dataplex-docs] Universal Catalog is a unified, intelligent governance
|
||||
solution for data and AI assets in Google Cloud. Dataplex Universal Catalog
|
||||
powers AI, analytics, and business intelligence at scale.
|
||||
|
||||
At the heart of these governance capabilities is a catalog that contains a
|
||||
centralized inventory of the data assets in your organization. Dataplex
|
||||
Universal Catalog holds business, technical, and runtime metadata for all of
|
||||
your data. It helps you discover relationships and semantics in the metadata by
|
||||
applying artificial intelligence and machine learning.
|
||||
|
||||
[dataplex-docs]: https://cloud.google.com/dataplex/docs
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-dataplex-source:
|
||||
kind: "dataplex"
|
||||
project: "my-project-id"
|
||||
```
|
||||
|
||||
## Sample System Prompt
|
||||
|
||||
You can use the following system prompt as "Custom Instructions" in your client
|
||||
application.
|
||||
|
||||
```
|
||||
Whenever you will receive response from dataplex_search_entries tool decide what do to by following these steps:
|
||||
1. If there are multiple search results found
|
||||
1.1. Present the list of search results
|
||||
1.2. Format the output in nested ordered list, for example:
|
||||
Given
|
||||
```
|
||||
{
|
||||
results: [
|
||||
{
|
||||
name: "projects/test-project/locations/us/entryGroups/@bigquery-aws-us-east-1/entries/users"
|
||||
entrySource: {
|
||||
displayName: "Users"
|
||||
description: "Table contains list of users."
|
||||
location: "aws-us-east-1"
|
||||
system: "BigQuery"
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "projects/another_project/locations/us-central1/entryGroups/@bigquery/entries/top_customers"
|
||||
entrySource: {
|
||||
displayName: "Top customers",
|
||||
description: "Table contains list of best customers."
|
||||
location: "us-central1"
|
||||
system: "BigQuery"
|
||||
}
|
||||
},
|
||||
]
|
||||
}
|
||||
```
|
||||
Return output formatted as markdown nested list:
|
||||
```
|
||||
* Users:
|
||||
- projectId: test_project
|
||||
- location: aws-us-east-1
|
||||
- description: Table contains list of users.
|
||||
* Top customers:
|
||||
- projectId: another_project
|
||||
- location: us-central1
|
||||
- description: Table contains list of best customers.
|
||||
```
|
||||
1.3. Ask to select one of the presented search results
|
||||
2. If there is only one search result found
|
||||
2.1. Present the search result immediately.
|
||||
3. If there are no search result found
|
||||
3.1. Explain that no search result was found
|
||||
3.2. Suggest to provide a more specific search query.
|
||||
|
||||
Do not try to search within search results on your own.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|----------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "dataplex". |
|
||||
| project | string | true | Id of the GCP project used for quota and billing purposes (e.g. "my-project-id").|
|
||||
73
docs/en/resources/sources/duckdb.md
Normal file
73
docs/en/resources/sources/duckdb.md
Normal file
@@ -0,0 +1,73 @@
|
||||
---
|
||||
title: DuckDB
|
||||
linkTitle: DuckDB
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
DuckDB is an in-process SQL OLAP database management system designed for analytical query processing.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
[DuckDB](https://duckdb.org/) is an embedded analytical database management system that runs in-process with the client application. It is optimized for analytical workloads, providing high performance for complex queries with minimal setup.
|
||||
|
||||
DuckDB has the following notable characteristics:
|
||||
|
||||
- In-process, serverless database engine
|
||||
- Supports complex SQL queries for analytical processing
|
||||
- Can operate on in-memory or persistent storage
|
||||
- Zero-configuration - no external dependencies or server setup required
|
||||
- Highly optimized for columnar data storage and query execution
|
||||
|
||||
For more details, refer to the [DuckDB Documentation](https://duckdb.org/).
|
||||
|
||||
## Available Tools
|
||||
- [`duckdb-sql`](../tools/duckdb/duckdb-sql.md)
|
||||
Execute pre-defined prepared SQL queries in DuckDB.
|
||||
|
||||
## Requirements
|
||||
|
||||
### Database File
|
||||
|
||||
To use DuckDB, you can either:
|
||||
|
||||
- Specify a file path for a persistent database stored on the filesystem
|
||||
- Omit the file path to use an in-memory database
|
||||
|
||||
## Example
|
||||
|
||||
For a persistent DuckDB database:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-duckdb:
|
||||
kind: "duckdb"
|
||||
dbFilePath: "/path/to/database.db"
|
||||
configuration:
|
||||
memory_limit: "2GB"
|
||||
threads: "4"
|
||||
```
|
||||
|
||||
For an in-memory DuckDB database:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-duckdb-memory:
|
||||
name: "my-duckdb-memory"
|
||||
kind: "duckdb"
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
### Configuration Fields
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------------|:-----------------:|:------------:|---------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "duckdb". |
|
||||
| dbFilePath | string | false | Path to the DuckDB database file. Omit for an in-memory database. |
|
||||
| configuration | map[string]string | false | Additional DuckDB configuration options (e.g., `memory_limit`, `threads`). |
|
||||
|
||||
For a complete list of available configuration options, refer to the [DuckDB Configuration Documentation](https://duckdb.org/docs/stable/configuration/overview.html#local-configuration-options).
|
||||
|
||||
|
||||
For more details on the Go implementation, see the [go-duckdb package documentation](https://pkg.go.dev/github.com/scottlepp/go-duckdb#section-readme).
|
||||
@@ -33,8 +33,13 @@ with [Firestore][firestore-docs].
|
||||
In addition to [setting the ADC for your server][set-adc], you need to ensure
|
||||
the IAM identity has been given the correct IAM permissions for accessing
|
||||
Firestore. Common roles include:
|
||||
|
||||
- `roles/datastore.user` - Read and write access to Firestore
|
||||
- `roles/datastore.viewer` - Read-only access to Firestore
|
||||
- `roles/firebaserules.admin` - Full management of Firebase Security Rules for
|
||||
Firestore. This role is required for operations that involve creating,
|
||||
updating, or managing Firestore security rules (see [Firebase Security Rules
|
||||
roles][firebaserules-roles])
|
||||
|
||||
See [Firestore access control][firestore-iam] for more information on
|
||||
applying IAM permissions and roles to an identity.
|
||||
@@ -43,6 +48,8 @@ applying IAM permissions and roles to an identity.
|
||||
[adc]: https://cloud.google.com/docs/authentication#adc
|
||||
[set-adc]: https://cloud.google.com/docs/authentication/provide-credentials-adc
|
||||
[firestore-iam]: https://cloud.google.com/firestore/docs/security/iam
|
||||
[firebaserules-roles]:
|
||||
https://cloud.google.com/iam/docs/roles-permissions/firebaserules
|
||||
|
||||
### Database Selection
|
||||
|
||||
|
||||
@@ -21,7 +21,8 @@ in the cloud, on GCP, or on premises.
|
||||
This source only uses API authentication. You will need to
|
||||
[create an API user][looker-user] to login to Looker.
|
||||
|
||||
[looker-user]: https://cloud.google.com/looker/docs/api-auth#authentication_with_an_sdk
|
||||
[looker-user]:
|
||||
https://cloud.google.com/looker/docs/api-auth#authentication_with_an_sdk
|
||||
|
||||
## Example
|
||||
|
||||
@@ -36,9 +37,10 @@ sources:
|
||||
timeout: 600s
|
||||
```
|
||||
|
||||
The Looker base url will look like "https://looker.example.com", don't include a
|
||||
trailing "/". In some cases, especially if your Looker is deployed on-premises,
|
||||
you may need to add the API port numner like "https://looker.example.com:19999".
|
||||
The Looker base url will look like "https://looker.example.com", don't include
|
||||
a trailing "/". In some cases, especially if your Looker is deployed
|
||||
on-premises, you may need to add the API port numner like
|
||||
"https://looker.example.com:19999".
|
||||
|
||||
Verify ssl should almost always be "true" (all lower case) unless you are using
|
||||
a self-signed ssl certificate for the Looker server. Anything other than "true"
|
||||
|
||||
34
docs/en/resources/sources/mongodb.md
Normal file
34
docs/en/resources/sources/mongodb.md
Normal file
@@ -0,0 +1,34 @@
|
||||
---
|
||||
title: "MongoDB"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
MongoDB is a no-sql data platform that can not only serve general purpose data requirements also perform VectorSearch where both operational data and embeddings used of search can reside in same document.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
[MongoDB][mongodb-docs] is a popular NoSQL database that stores data in
|
||||
flexible, JSON-like documents, making it easy to develop and scale applications.
|
||||
|
||||
[mongodb-docs]: https://www.mongodb.com/docs/atlas/getting-started/
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-mongodb:
|
||||
kind: mongodb
|
||||
uri: "mongodb+srv://username:password@host.mongodb.net"
|
||||
database: sample_mflix
|
||||
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|-------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "mongodb". |
|
||||
| uri | string | true | connection string to connect to MongoDB |
|
||||
| database | string | true | Name of the mongodb database to connect to (e.g. "sample_mflix"). |
|
||||
@@ -43,6 +43,7 @@ sources:
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
# encrypt: strict
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -52,11 +53,12 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "mssql". |
|
||||
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
|
||||
| port | string | true | Port to connect to (e.g. "1433"). |
|
||||
| database | string | true | Name of the SQL Server database to connect to (e.g. "my_db"). |
|
||||
| user | string | true | Name of the SQL Server user to connect as (e.g. "my-user"). |
|
||||
| password | string | true | Password of the SQL Server user (e.g. "my-password"). |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "mssql". |
|
||||
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
|
||||
| port | string | true | Port to connect to (e.g. "1433"). |
|
||||
| database | string | true | Name of the SQL Server database to connect to (e.g. "my_db"). |
|
||||
| user | string | true | Name of the SQL Server user to connect as (e.g. "my-user"). |
|
||||
| password | string | true | Password of the SQL Server user (e.g. "my-password"). |
|
||||
| encrypt | string | false | Encryption level for data transmitted between the client and server (e.g., "strict"). If not specified, defaults to the [github.com/microsoft/go-mssqldb](https://github.com/microsoft/go-mssqldb?tab=readme-ov-file#common-parameters) package's default encrypt value. |
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
title: "Tools"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
Tools define actions an agent can take -- such as reading and writing to a
|
||||
description: >
|
||||
Tools define actions an agent can take -- such as reading and writing to a
|
||||
source.
|
||||
---
|
||||
|
||||
@@ -114,20 +114,24 @@ in the list using the items field:
|
||||
| items | parameter object | true | Specify a Parameter object for the type of the values in the array. |
|
||||
|
||||
{{< notice note >}}
|
||||
Items in array should not have a `default` or `required` value. If provided, it will be ignored.
|
||||
Items in array should not have a `default` or `required` value. If provided, it
|
||||
will be ignored.
|
||||
{{< /notice >}}
|
||||
|
||||
### Map Parameters
|
||||
|
||||
The map type is a collection of key-value pairs. It can be configured in two ways:
|
||||
The map type is a collection of key-value pairs. It can be configured in two
|
||||
ways:
|
||||
|
||||
- Generic Map: By default, it accepts values of any primitive type (string, integer, float, boolean), allowing for mixed data.
|
||||
- Generic Map: By default, it accepts values of any primitive type (string,
|
||||
integer, float, boolean), allowing for mixed data.
|
||||
- Typed Map: By setting the valueType field, you can enforce that all values
|
||||
within the map must be of the same specified type.
|
||||
|
||||
#### Generic Map (Mixed Value Types)
|
||||
|
||||
This is the default behavior when valueType is omitted. It's useful for passing a flexible group of settings.
|
||||
This is the default behavior when valueType is omitted. It's useful for passing
|
||||
a flexible group of settings.
|
||||
|
||||
```yaml
|
||||
parameters:
|
||||
@@ -153,10 +157,10 @@ will be thrown in case of value type mismatch.
|
||||
|
||||
Authenticated parameters are automatically populated with user
|
||||
information decoded from [ID
|
||||
tokens](../authsources/#specifying-id-tokens-from-clients) that are passed in
|
||||
tokens](../authServices/#specifying-id-tokens-from-clients) that are passed in
|
||||
request headers. They do not take input values in request bodies like other
|
||||
parameters. To use authenticated parameters, you must configure the tool to map
|
||||
the required [authServices](../authservices) to specific claims within the
|
||||
the required [authServices](../authServices/) to specific claims within the
|
||||
user's ID token.
|
||||
|
||||
```yaml
|
||||
@@ -179,7 +183,7 @@ user's ID token.
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|-----------------------------------------------------------------------------------------|
|
||||
| name | string | true | Name of the [authServices](../authservices) used to verify the OIDC auth token. |
|
||||
| name | string | true | Name of the [authServices](../authServices/) used to verify the OIDC auth token. |
|
||||
| field | string | true | Claim field decoded from the OIDC token used to auto-populate this parameter. |
|
||||
|
||||
### Template Parameters
|
||||
@@ -240,7 +244,7 @@ tools:
|
||||
|
||||
You can require an authorization check for any Tool invocation request by
|
||||
specifying an `authRequired` field. Specify a list of
|
||||
[authServices](../authservices) defined in the previous section.
|
||||
[authServices](../authServices/) defined in the previous section.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
title: "alloydb-ai-nl"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "alloydb-ai-nl" tool leverages
|
||||
[AlloyDB AI](https://cloud.google.com/alloydb/ai) next-generation Natural
|
||||
description: >
|
||||
The "alloydb-ai-nl" tool leverages
|
||||
[AlloyDB AI](https://cloud.google.com/alloydb/ai) next-generation Natural
|
||||
Language support to provide the ability to query the database directly using
|
||||
natural language.
|
||||
aliases:
|
||||
@@ -22,7 +22,7 @@ layer.
|
||||
|
||||
This tool is compatible with the following sources:
|
||||
|
||||
- [alloydb-postgres](../sources/alloydb-pg.md)
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
|
||||
AlloyDB AI Natural Language delivers secure and accurate responses for
|
||||
application end user natural language questions. Natural language streamlines
|
||||
@@ -69,7 +69,7 @@ database queries.
|
||||
You can use the `nlConfigParameters` to list the parameters required for your
|
||||
`nl_config`. You **must** supply all parameters required for all PSVs in the
|
||||
context. It's strongly recommended to use features like [Authenticated
|
||||
Parameters](../tools/#array-parameters) or Bound Parameters to provide secure
|
||||
Parameters](../#array-parameters) or Bound Parameters to provide secure
|
||||
access to queries generated using natural language, as these parameters are not
|
||||
visible to the LLM.
|
||||
|
||||
@@ -88,8 +88,8 @@ tools:
|
||||
- name: user_email
|
||||
type: string
|
||||
description: User ID of the logged in user.
|
||||
# note: we strongly recommend using features like Authenticated or
|
||||
# Bound parameters to prevent the LLM from seeing these params and
|
||||
# note: we strongly recommend using features like Authenticated or
|
||||
# Bound parameters to prevent the LLM from seeing these params and
|
||||
# specifying values it shouldn't in the tool input
|
||||
authServices:
|
||||
- name: my_google_service
|
||||
@@ -104,4 +104,4 @@ tools:
|
||||
| source | string | true | Name of the AlloyDB source the natural language query should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| nlConfig | string | true | The name of the `nl_config` in AlloyDB |
|
||||
| nlConfigParameters | [parameters](_index#specifying-parameters) | true | List of PSV parameters defined in the `nl_config` |
|
||||
| nlConfigParameters | [parameters](../#specifying-parameters) | true | List of PSV parameters defined in the `nl_config` |
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "bigquery-execute-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "bigquery-execute-sql" tool executes a SQL statement against BigQuery.
|
||||
aliases:
|
||||
- /resources/tools/bigquery-execute-sql
|
||||
@@ -13,7 +13,7 @@ aliases:
|
||||
A `bigquery-execute-sql` tool executes a SQL statement against BigQuery.
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [bigquery](../sources/bigquery.md)
|
||||
- [bigquery](../../sources/bigquery.md)
|
||||
|
||||
`bigquery-execute-sql` takes one input parameter `sql` and runs the sql
|
||||
statement against the `source`.
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "bigquery-get-dataset-info"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "bigquery-get-dataset-info" tool retrieves metadata for a BigQuery dataset.
|
||||
aliases:
|
||||
- /resources/tools/bigquery-get-dataset-info
|
||||
@@ -13,10 +13,10 @@ aliases:
|
||||
A `bigquery-get-dataset-info` tool retrieves metadata for a BigQuery dataset.
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [bigquery](../sources/bigquery.md)
|
||||
- [bigquery](../../sources/bigquery.md)
|
||||
|
||||
`bigquery-get-dataset-info` takes a `dataset` parameter to specify the dataset
|
||||
on the given source. It also optionally accepts a `project` parameter to
|
||||
on the given source. It also optionally accepts a `project` parameter to
|
||||
define the Google Cloud project ID. If the `project` parameter is not provided,
|
||||
the tool defaults to using the project defined in the source configuration.
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "bigquery-get-table-info"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "bigquery-get-table-info" tool retrieves metadata for a BigQuery table.
|
||||
aliases:
|
||||
- /resources/tools/bigquery-get-table-info
|
||||
@@ -13,11 +13,11 @@ aliases:
|
||||
A `bigquery-get-table-info` tool retrieves metadata for a BigQuery table.
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [bigquery](../sources/bigquery.md)
|
||||
- [bigquery](../../sources/bigquery.md)
|
||||
|
||||
`bigquery-get-table-info` takes `dataset` and `table` parameters to specify
|
||||
the target table. It also optionally accepts a `project` parameter to define
|
||||
the Google Cloud project ID. If the `project` parameter is not provided, the
|
||||
the target table. It also optionally accepts a `project` parameter to define
|
||||
the Google Cloud project ID. If the `project` parameter is not provided, the
|
||||
tool defaults to using the project defined in the source configuration.
|
||||
|
||||
## Example
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "bigquery-list-dataset-ids"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "bigquery-list-dataset-ids" tool returns all dataset IDs from the source.
|
||||
aliases:
|
||||
- /resources/tools/bigquery-list-dataset-ids
|
||||
@@ -13,10 +13,10 @@ aliases:
|
||||
A `bigquery-list-dataset-ids` tool returns all dataset IDs from the source.
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [bigquery](../sources/bigquery.md)
|
||||
- [bigquery](../../sources/bigquery.md)
|
||||
|
||||
`bigquery-list-dataset-ids` optionally accepts a `project` parameter to define
|
||||
the Google Cloud project ID. If the `project` parameter is not provided, the
|
||||
`bigquery-list-dataset-ids` optionally accepts a `project` parameter to define
|
||||
the Google Cloud project ID. If the `project` parameter is not provided, the
|
||||
tool defaults to using the project defined in the source configuration.
|
||||
|
||||
## Example
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "bigquery-list-table-ids"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "bigquery-list-table-ids" tool returns table IDs in a given BigQuery dataset.
|
||||
aliases:
|
||||
- /resources/tools/bigquery-list-table-ids
|
||||
@@ -13,11 +13,11 @@ aliases:
|
||||
A `bigquery-list-table-ids` tool returns table IDs in a given BigQuery dataset.
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [bigquery](../sources/bigquery.md)
|
||||
- [bigquery](../../sources/bigquery.md)
|
||||
|
||||
`bigquery-get-dataset-info` takes a required `dataset` parameter to specify the dataset
|
||||
from which to list table IDs. It also optionally accepts a `project` parameter to
|
||||
define the Google Cloud project ID. If the `project` parameter is not provided, the
|
||||
from which to list table IDs. It also optionally accepts a `project` parameter to
|
||||
define the Google Cloud project ID. If the `project` parameter is not provided, the
|
||||
tool defaults to using the project defined in the source configuration.
|
||||
|
||||
## Example
|
||||
|
||||
@@ -13,7 +13,7 @@ aliases:
|
||||
A `bigquery-sql` tool executes a pre-defined SQL statement. It's compatible with
|
||||
the following sources:
|
||||
|
||||
- [bigquery](../sources/bigquery.md)
|
||||
- [bigquery](../../sources/bigquery.md)
|
||||
|
||||
### GoogleSQL
|
||||
|
||||
@@ -72,7 +72,7 @@ tools:
|
||||
> including identifiers, column names, and table names. **This makes it more
|
||||
> vulnerable to SQL injections**. Using basic parameters only (see above) is
|
||||
> recommended for performance and safety reasons. For more details, please check
|
||||
> [templateParameters](_index#template-parameters).
|
||||
> [templateParameters](../#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
@@ -101,5 +101,5 @@ tools:
|
||||
| source | string | true | Name of the source the GoogleSQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | The GoogleSQL statement to execute. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
| parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](../#template-parameters) | false | List of [templateParameters](../#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
title: "bigtable-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "bigtable-sql" tool executes a pre-defined SQL statement against a Google
|
||||
description: >
|
||||
A "bigtable-sql" tool executes a pre-defined SQL statement against a Google
|
||||
Cloud Bigtable instance.
|
||||
aliases:
|
||||
- /resources/tools/bigtable-sql
|
||||
@@ -14,16 +14,20 @@ aliases:
|
||||
A `bigtable-sql` tool executes a pre-defined SQL statement against a Bigtable
|
||||
instance. It's compatible with any of the following sources:
|
||||
|
||||
- [bigtable](../sources/bigtable.md)
|
||||
- [bigtable](../../sources/bigtable.md)
|
||||
|
||||
### GoogleSQL
|
||||
|
||||
Bigtable supports SQL queries. The integration with Toolbox supports `googlesql`
|
||||
dialect, the specified SQL statement is executed as a [data manipulation
|
||||
language (DML)][bigtable-googlesql] statements, and specified parameters will inserted according to their name: e.g. `@name`.
|
||||
language (DML)][bigtable-googlesql] statements, and specified parameters will
|
||||
inserted according to their name: e.g. `@name`.
|
||||
|
||||
{{<notice note>}}
|
||||
Bigtable's GoogleSQL support for DML statements might be limited to certain query types. For detailed information on supported DML statements and use cases, refer to the [Bigtable GoogleSQL use cases](https://cloud.google.com/bigtable/docs/googlesql-overview#use-cases).
|
||||
Bigtable's GoogleSQL support for DML statements might be limited to certain
|
||||
query types. For detailed information on supported DML statements and use
|
||||
cases, refer to the [Bigtable GoogleSQL use
|
||||
cases](https://cloud.google.com/bigtable/docs/googlesql-overview#use-cases).
|
||||
{{</notice>}}
|
||||
|
||||
[bigtable-googlesql]: https://cloud.google.com/bigtable/docs/googlesql-overview
|
||||
@@ -41,13 +45,13 @@ tools:
|
||||
kind: bigtable-sql
|
||||
source: my-bigtable-instance
|
||||
statement: |
|
||||
SELECT
|
||||
TO_INT64(cf[ 'id' ]) as id,
|
||||
CAST(cf[ 'name' ] AS string) as name,
|
||||
FROM
|
||||
mytable
|
||||
WHERE
|
||||
TO_INT64(cf[ 'id' ]) = @id
|
||||
SELECT
|
||||
TO_INT64(cf[ 'id' ]) as id,
|
||||
CAST(cf[ 'name' ] AS string) as name,
|
||||
FROM
|
||||
mytable
|
||||
WHERE
|
||||
TO_INT64(cf[ 'id' ]) = @id
|
||||
OR CAST(cf[ 'name' ] AS string) = @name;
|
||||
description: |
|
||||
Use this tool to get information for a specific user.
|
||||
@@ -73,7 +77,7 @@ tools:
|
||||
> including identifiers, column names, and table names. **This makes it more
|
||||
> vulnerable to SQL injections**. Using basic parameters only (see above) is
|
||||
> recommended for performance and safety reasons. For more details, please check
|
||||
> [templateParameters](_index#template-parameters).
|
||||
> [templateParameters](#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
@@ -102,8 +106,8 @@ tools:
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute on. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
| parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](#template-parameters) | false | List of [templateParameters](#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
|
||||
## Tips
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "couchbase-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "couchbase-sql" tool executes a pre-defined SQL statement against a Couchbase
|
||||
database.
|
||||
aliases:
|
||||
@@ -14,7 +14,7 @@ aliases:
|
||||
A `couchbase-sql` tool executes a pre-defined SQL statement against a Couchbase
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [couchbase](../sources/couchbase.md)
|
||||
- [couchbase](../../sources/couchbase.md)
|
||||
|
||||
The specified SQL statement is executed as a parameterized statement, and specified
|
||||
parameters will be used according to their name: e.g. `$id`.
|
||||
@@ -66,7 +66,7 @@ tools:
|
||||
> including identifiers, column names, and table names. **This makes it more
|
||||
> vulnerable to SQL injections**. Using basic parameters only (see above) is
|
||||
> recommended for performance and safety reasons. For more details, please check
|
||||
> [templateParameters](_index#template-parameters).
|
||||
> [templateParameters](#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
@@ -95,6 +95,6 @@ tools:
|
||||
| source | string | true | Name of the source the SQL query should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be used with the SQL statement. |
|
||||
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
| parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be used with the SQL statement. |
|
||||
| templateParameters | [templateParameters](#template-parameters) | false | List of [templateParameters](#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
| authRequired | array[string] | false | List of auth services that are required to use this tool. |
|
||||
|
||||
7
docs/en/resources/tools/dataplex/_index.md
Normal file
7
docs/en/resources/tools/dataplex/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "Dataplex"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with Dataplex Sources.
|
||||
---
|
||||
75
docs/en/resources/tools/dataplex/dataplex-search-entries.md
Normal file
75
docs/en/resources/tools/dataplex/dataplex-search-entries.md
Normal file
@@ -0,0 +1,75 @@
|
||||
---
|
||||
title: "dataplex-search-entries"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "dataplex-search-entries" tool allows to search for entries based on the provided query.
|
||||
aliases:
|
||||
- /resources/tools/dataplex-search-entries
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `dataplex-search-entries` tool returns all entries in Dataplex Catalog (e.g.
|
||||
tables, views, models) that matches given user query.
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [dataplex](../../sources/dataplex.md)
|
||||
|
||||
`dataplex-search-entries` takes a required `query` parameter based on which
|
||||
entries are filtered and returned to the user and a required `name` parameter
|
||||
which is constructed using source's project if user does not provide it
|
||||
explicitly and has the following format: projects/{project}/locations/global. It
|
||||
also optionally accepts following parameters:
|
||||
|
||||
- `pageSize` - Number of results in the search page. Defaults to `5`.
|
||||
- `pageToken` - Page token received from a previous locations.searchEntries
|
||||
call.
|
||||
- `orderBy` - Specifies the ordering of results. Supported values are: relevance
|
||||
(default), last_modified_timestamp, last_modified_timestamp asc
|
||||
- `semanticSearch` - Specifies whether the search should understand the meaning
|
||||
and intent behind the query, rather than just matching keywords. Defaults to
|
||||
`true`.
|
||||
- `scope` - The scope under which the search should be operating. Since this
|
||||
parameter is not exposed to the toolbox user, it defaults to the organization
|
||||
where the project provided in name is located.
|
||||
|
||||
## Requirements
|
||||
|
||||
### IAM Permissions
|
||||
|
||||
Dataplex uses [Identity and Access Management (IAM)][iam-overview] to control
|
||||
user and group access to Dataplex resources. Toolbox will use your
|
||||
[Application Default Credentials (ADC)][adc] to authorize and authenticate when
|
||||
interacting with [Dataplex][dataplex-docs].
|
||||
|
||||
In addition to [setting the ADC for your server][set-adc], you need to ensure
|
||||
the IAM identity has been given the correct IAM permissions for the tasks you
|
||||
intend to perform. See [Dataplex Universal Catalog IAM permissions][iam-permissions]
|
||||
and [Dataplex Universal Catalog IAM roles][iam-roles] for more information on
|
||||
applying IAM permissions and roles to an identity.
|
||||
|
||||
[iam-overview]: https://cloud.google.com/dataplex/docs/iam-and-access-control
|
||||
[adc]: https://cloud.google.com/docs/authentication#adc
|
||||
[set-adc]: https://cloud.google.com/docs/authentication/provide-credentials-adc
|
||||
[iam-permissions]: https://cloud.google.com/dataplex/docs/iam-permissions
|
||||
[iam-roles]: https://cloud.google.com/dataplex/docs/iam-roles
|
||||
[dataplex-docs]: https://cloud.google.com/dataplex
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
dataplex-search-entries:
|
||||
kind: dataplex-search-entries
|
||||
source: my-dataplex-source
|
||||
description: Use this tool to get all the entries based on the provided query.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "dataplex-search-entries". |
|
||||
| source | string | true | Name of the source the tool should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "dgraph-dql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "dgraph-dql" tool executes a pre-defined DQL statement against a Dgraph
|
||||
database.
|
||||
aliases:
|
||||
@@ -14,7 +14,7 @@ aliases:
|
||||
A `dgraph-dql` tool executes a pre-defined DQL statement against a Dgraph
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [dgraph](../sources/dgraph.md)
|
||||
- [dgraph](../../sources/dgraph.md)
|
||||
|
||||
To run a statement as a query, you need to set the config `isQuery=true`. For
|
||||
upserts or mutations, set `isQuery=false`. You can also configure timeout for a
|
||||
@@ -121,4 +121,4 @@ tools:
|
||||
| statement | string | true | dql statement to execute |
|
||||
| isQuery | boolean | false | To run statement as query set true otherwise false |
|
||||
| timeout | string | false | To set timeout for query |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be used with the dql statement. |
|
||||
| parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be used with the dql statement. |
|
||||
|
||||
7
docs/en/resources/tools/duckdb/_index.md
Normal file
7
docs/en/resources/tools/duckdb/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "DuckDB"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with DuckDB Sources.
|
||||
---
|
||||
80
docs/en/resources/tools/duckdb/duckdb-sql.md
Normal file
80
docs/en/resources/tools/duckdb/duckdb-sql.md
Normal file
@@ -0,0 +1,80 @@
|
||||
---
|
||||
title: "duckdb-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Execute SQL statements against a DuckDB database using the DuckDB SQL tools configuration.
|
||||
aliases:
|
||||
- /resources/tools/duckdb-sql
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `duckdb-sql` tool executes a pre-defined SQL statement against a [DuckDB](https://duckdb.org/) database. It is compatible with any DuckDB source configuration as defined in the [DuckDB source documentation](../../sources/duckdb.md).
|
||||
|
||||
The specified SQL statement is executed as a prepared statement, and parameters are inserted according to their position: e.g., `$1` is the first parameter, `$2` is the second, and so on. If template parameters are included, they are resolved before execution of the prepared statement.
|
||||
|
||||
DuckDB's SQL dialect closely follows the conventions of the PostgreSQL dialect, with a few exceptions listed in the [DuckDB PostgreSQL Compatibility documentation](https://duckdb.org/docs/stable/sql/dialect/postgresql_compatibility.html). For an introduction to DuckDB's SQL dialect, refer to the [DuckDB SQL Introduction](https://duckdb.org/docs/stable/sql/introduction).
|
||||
|
||||
### Concepts
|
||||
|
||||
DuckDB is a relational database management system (RDBMS). Data is stored in relations (tables), where each table is a named collection of rows. Each row in a table has the same set of named columns, each with a specific data type. Tables are stored within schemas, and a collection of schemas constitutes the entire database.
|
||||
|
||||
For more details, see the [DuckDB SQL Introduction](https://duckdb.org/docs/stable/sql/introduction).
|
||||
|
||||
## Example
|
||||
|
||||
> **Note:** This tool uses parameterized queries to prevent SQL injections. Query parameters can be used as substitutes for arbitrary expressions but cannot be used for identifiers, column names, table names, or other parts of the query.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search-users:
|
||||
kind: duckdb-sql
|
||||
source: my-duckdb
|
||||
description: Search users by name and age
|
||||
statement: SELECT * FROM users WHERE name LIKE $1 AND age >= $2
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: The name to search for
|
||||
- name: min_age
|
||||
type: integer
|
||||
description: Minimum age
|
||||
```
|
||||
|
||||
## Example with Template Parameters
|
||||
|
||||
> **Note:** Template parameters allow direct modifications to the SQL statement, including identifiers, column names, and table names, which makes them more vulnerable to SQL injections. Using basic parameters (see above) is recommended for performance and safety. For more details, see the [templateParameters](../#template-parameters) section.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_table:
|
||||
kind: duckdb-sql
|
||||
source: my-duckdb
|
||||
statement: |
|
||||
SELECT * FROM {{.tableName}};
|
||||
description: |
|
||||
Use this tool to list all information from a specific table.
|
||||
Example:
|
||||
{{
|
||||
"tableName": "flights",
|
||||
}}
|
||||
templateParameters:
|
||||
- name: tableName
|
||||
type: string
|
||||
description: Table to select from
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
### Configuration Fields
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------------|:-------------------------------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "duckdb-sql". |
|
||||
| source | string | true | Name of the DuckDB source configuration (see [DuckDB source documentation](../../sources/duckdb.md)). |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | The SQL statement to execute. |
|
||||
| authRequired | []string | false | List of authentication requirements for the tool (if any). |
|
||||
| parameters | [parameters](../#specifying-parameters) | false | List of parameters that will be inserted into the SQL statement |
|
||||
| templateParameters | [templateParameters](../#template-parameters) | false | List of template parameters that will be inserted into the SQL statement before executing the prepared statement. |
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "firestore-delete-documents"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "firestore-delete-documents" tool deletes multiple documents from Firestore by their paths.
|
||||
aliases:
|
||||
- /resources/tools/firestore-delete-documents
|
||||
@@ -10,14 +10,15 @@ aliases:
|
||||
|
||||
## About
|
||||
|
||||
A `firestore-delete-documents` tool deletes multiple documents from Firestore by their paths.
|
||||
A `firestore-delete-documents` tool deletes multiple documents from Firestore by
|
||||
their paths.
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [firestore](../sources/firestore.md)
|
||||
- [firestore](../../sources/firestore.md)
|
||||
|
||||
`firestore-delete-documents` takes one input parameter `documentPaths` which is an array of
|
||||
document paths to delete. The tool uses Firestore's BulkWriter for efficient batch deletion
|
||||
and returns the success status for each document.
|
||||
`firestore-delete-documents` takes one input parameter `documentPaths` which is
|
||||
an array of document paths to delete. The tool uses Firestore's BulkWriter for
|
||||
efficient batch deletion and returns the success status for each document.
|
||||
|
||||
## Example
|
||||
|
||||
@@ -31,8 +32,8 @@ tools:
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "firestore-delete-documents". |
|
||||
| source | string | true | Name of the Firestore source to delete documents from. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------------:|:------------:|----------------------------------------------------------|
|
||||
| kind | string | true | Must be "firestore-delete-documents". |
|
||||
| source | string | true | Name of the Firestore source to delete documents from. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "firestore-get-documents"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "firestore-get-documents" tool retrieves multiple documents from Firestore by their paths.
|
||||
aliases:
|
||||
- /resources/tools/firestore-get-documents
|
||||
@@ -10,14 +10,15 @@ aliases:
|
||||
|
||||
## About
|
||||
|
||||
A `firestore-get-documents` tool retrieves multiple documents from Firestore by their paths.
|
||||
A `firestore-get-documents` tool retrieves multiple documents from Firestore by
|
||||
their paths.
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [firestore](../sources/firestore.md)
|
||||
- [firestore](../../sources/firestore.md)
|
||||
|
||||
`firestore-get-documents` takes one input parameter `documentPaths` which is an array of
|
||||
document paths, and returns the documents' data along with metadata such as existence status,
|
||||
creation time, update time, and read time.
|
||||
`firestore-get-documents` takes one input parameter `documentPaths` which is an
|
||||
array of document paths, and returns the documents' data along with metadata
|
||||
such as existence status, creation time, update time, and read time.
|
||||
|
||||
## Example
|
||||
|
||||
@@ -31,8 +32,8 @@ tools:
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "firestore-get-documents". |
|
||||
| source | string | true | Name of the Firestore source to retrieve documents from. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------------:|:------------:|------------------------------------------------------------|
|
||||
| kind | string | true | Must be "firestore-get-documents". |
|
||||
| source | string | true | Name of the Firestore source to retrieve documents from. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "firestore-get-rules"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "firestore-get-rules" tool retrieves the active Firestore security rules for the current project.
|
||||
aliases:
|
||||
- /resources/tools/firestore-get-rules
|
||||
@@ -10,13 +10,15 @@ aliases:
|
||||
|
||||
## About
|
||||
|
||||
A `firestore-get-rules` tool retrieves the active [Firestore security rules](https://firebase.google.com/docs/firestore/security/get-started) for the current project.
|
||||
A `firestore-get-rules` tool retrieves the active [Firestore security
|
||||
rules](https://firebase.google.com/docs/firestore/security/get-started) for the
|
||||
current project.
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [firestore](../sources/firestore.md)
|
||||
- [firestore](../../sources/firestore.md)
|
||||
|
||||
`firestore-get-rules` takes no input parameters and returns the security rules content along with metadata
|
||||
such as the ruleset name, and timestamps.
|
||||
`firestore-get-rules` takes no input parameters and returns the security rules
|
||||
content along with metadata such as the ruleset name, and timestamps.
|
||||
|
||||
## Example
|
||||
|
||||
@@ -30,8 +32,8 @@ tools:
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "firestore-get-rules". |
|
||||
| source | string | true | Name of the Firestore source to retrieve rules from. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:-------------:|:------------:|-------------------------------------------------------|
|
||||
| kind | string | true | Must be "firestore-get-rules". |
|
||||
| source | string | true | Name of the Firestore source to retrieve rules from. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "firestore-list-collections"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "firestore-list-collections" tool lists collections in Firestore, either at the root level or as subcollections of a document.
|
||||
aliases:
|
||||
- /resources/tools/firestore-list-collections
|
||||
@@ -10,10 +10,14 @@ aliases:
|
||||
|
||||
## About
|
||||
|
||||
A `firestore-list-collections` tool lists [collections](https://firebase.google.com/docs/firestore/data-model#collections) in Firestore, either at the root level or as [subcollections](https://firebase.google.com/docs/firestore/data-model#subcollections) of a specific document.
|
||||
A `firestore-list-collections` tool lists
|
||||
[collections](https://firebase.google.com/docs/firestore/data-model#collections)
|
||||
in Firestore, either at the root level or as
|
||||
[subcollections](https://firebase.google.com/docs/firestore/data-model#subcollections)
|
||||
of a specific document.
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [firestore](../sources/firestore.md)
|
||||
- [firestore](../../sources/firestore.md)
|
||||
|
||||
`firestore-list-collections` takes an optional `parentPath` parameter to specify a document
|
||||
path. If provided, it lists all subcollections of that document. If not provided, it lists
|
||||
@@ -31,8 +35,8 @@ tools:
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "firestore-list-collections". |
|
||||
| source | string | true | Name of the Firestore source to list collections from. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:----------------:|:------------:|--------------------------------------------------------|
|
||||
| kind | string | true | Must be "firestore-list-collections". |
|
||||
| source | string | true | Name of the Firestore source to list collections from. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
|
||||
@@ -1,6 +1,17 @@
|
||||
# firestore-query-collection
|
||||
---
|
||||
title: "firestore-query-collection"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "firestore-query-collection" tool allow to query collections in Firestore.
|
||||
aliases:
|
||||
- /resources/tools/firestore-query-collection
|
||||
---
|
||||
|
||||
The `firestore-query-collection` tool allows you to query Firestore collections with filters, ordering, and limit capabilities.
|
||||
# About
|
||||
|
||||
The `firestore-query-collection` tool allows you to query Firestore collections
|
||||
with filters, ordering, and limit capabilities.
|
||||
|
||||
## Configuration
|
||||
|
||||
@@ -10,9 +21,8 @@ To use this tool, you need to configure it in your YAML configuration file:
|
||||
sources:
|
||||
my-firestore:
|
||||
kind: firestore
|
||||
config:
|
||||
project: my-gcp-project
|
||||
database: "(default)"
|
||||
project: my-gcp-project
|
||||
database: "(default)"
|
||||
|
||||
tools:
|
||||
query_collection:
|
||||
@@ -23,17 +33,18 @@ tools:
|
||||
|
||||
## Parameters
|
||||
|
||||
| Parameter | Type | Required | Default | Description |
|
||||
|-----------|------|----------|---------|-------------|
|
||||
| `collectionPath` | string | Yes | - | The path to the Firestore collection to query |
|
||||
| `filters` | array | No | - | Array of filter objects (as JSON strings) to apply to the query |
|
||||
| `orderBy` | string | No | - | JSON string specifying field and direction to order results |
|
||||
| `limit` | integer | No | 100 | Maximum number of documents to return |
|
||||
| `analyzeQuery` | boolean | No | false | If true, returns query explain metrics including execution statistics |
|
||||
| **parameters** | **type** | **required** | **default** | **description** |
|
||||
|------------------|:------------:|:------------:|:-----------:|-----------------------------------------------------------------------|
|
||||
| `collectionPath` | string | true | - | The Firestore Rules source code to validate |
|
||||
| `filters` | array | false | - | Array of filter objects (as JSON strings) to apply to the query |
|
||||
| `orderBy` | string | false | - | JSON string specifying field and direction to order results |
|
||||
| `limit` | integer | false | 100 | Maximum number of documents to return |
|
||||
| `analyzeQuery` | boolean | false | false | If true, returns query explain metrics including execution statistics |
|
||||
|
||||
### Filter Format
|
||||
|
||||
Each filter in the `filters` array should be a JSON string with the following structure:
|
||||
Each filter in the `filters` array should be a JSON string with the following
|
||||
structure:
|
||||
|
||||
```json
|
||||
{
|
||||
@@ -44,6 +55,7 @@ Each filter in the `filters` array should be a JSON string with the following st
|
||||
```
|
||||
|
||||
Supported operators:
|
||||
|
||||
- `<` - Less than
|
||||
- `<=` - Less than or equal to
|
||||
- `>` - Greater than
|
||||
@@ -56,10 +68,12 @@ Supported operators:
|
||||
- `not-in` - Field value is not in the specified array
|
||||
|
||||
Value types supported:
|
||||
|
||||
- String: `"value": "text"`
|
||||
- Number: `"value": 123` or `"value": 45.67`
|
||||
- Boolean: `"value": true` or `"value": false`
|
||||
- Array: `"value": ["item1", "item2"]` (for `in`, `not-in`, `array-contains-any` operators)
|
||||
- Array: `"value": ["item1", "item2"]` (for `in`, `not-in`, `array-contains-any`
|
||||
operators)
|
||||
|
||||
### OrderBy Format
|
||||
|
||||
@@ -73,6 +87,7 @@ The `orderBy` parameter should be a JSON string with the following structure:
|
||||
```
|
||||
|
||||
Direction values:
|
||||
|
||||
- `ASCENDING`
|
||||
- `DESCENDING`
|
||||
|
||||
@@ -154,7 +169,8 @@ The tool returns an array of documents, where each document includes:
|
||||
|
||||
### Response with Query Analysis (analyzeQuery = true)
|
||||
|
||||
When `analyzeQuery` is set to true, the tool returns a single object containing documents and explain metrics:
|
||||
When `analyzeQuery` is set to true, the tool returns a single object containing
|
||||
documents and explain metrics:
|
||||
|
||||
```json
|
||||
{
|
||||
@@ -191,6 +207,7 @@ When `analyzeQuery` is set to true, the tool returns a single object containing
|
||||
## Error Handling
|
||||
|
||||
The tool will return errors for:
|
||||
|
||||
- Invalid collection path
|
||||
- Malformed filter JSON
|
||||
- Unsupported operators
|
||||
|
||||
@@ -1,12 +1,18 @@
|
||||
---
|
||||
title: firestore-validate-rules
|
||||
weight: 6
|
||||
date: 2025-01-07
|
||||
title: "firestore-validate-rules"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "firestore-validate-rules" tool validates Firestore security rules syntax and semantic correctness without deploying them. It provides detailed error reporting with source positions and code snippets.
|
||||
aliases:
|
||||
- /resources/tools/firestore-validate-rules
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
The `firestore-validate-rules` tool validates Firestore security rules syntax and semantic correctness without deploying them. It provides detailed error reporting with source positions and code snippets.
|
||||
The `firestore-validate-rules` tool validates Firestore security rules syntax
|
||||
and semantic correctness without deploying them. It provides detailed error
|
||||
reporting with source positions and code snippets.
|
||||
|
||||
## Configuration
|
||||
|
||||
@@ -24,9 +30,9 @@ This tool requires authentication if the source requires authentication.
|
||||
|
||||
## Parameters
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
|-----------|--------|----------|-------------|
|
||||
| source | string | Yes | The Firestore Rules source code to validate |
|
||||
| **parameters** | **type** | **required** | **description** |
|
||||
|-----------------|:------------:|:------------:|----------------------------------------------|
|
||||
| source | string | true | The Firestore Rules source code to validate |
|
||||
|
||||
## Response
|
||||
|
||||
@@ -34,20 +40,20 @@ The tool returns a `ValidationResult` object containing:
|
||||
|
||||
```json
|
||||
{
|
||||
"valid": boolean, // Whether the rules are valid
|
||||
"issueCount": number, // Number of issues found
|
||||
"formattedIssues": string, // Human-readable formatted issues
|
||||
"rawIssues": [ // Array of raw issue objects
|
||||
"valid": "boolean",
|
||||
"issueCount": "number",
|
||||
"formattedIssues": "string",
|
||||
"rawIssues": [
|
||||
{
|
||||
"sourcePosition": {
|
||||
"fileName": string,
|
||||
"line": number,
|
||||
"column": number,
|
||||
"currentOffset": number,
|
||||
"endOffset": number
|
||||
"fileName": "string",
|
||||
"line": "number",
|
||||
"column": "number",
|
||||
"currentOffset": "number",
|
||||
"endOffset": "number"
|
||||
},
|
||||
"description": string,
|
||||
"severity": string // e.g., "ERROR", "WARNING"
|
||||
"description": "string",
|
||||
"severity": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -98,6 +104,7 @@ The tool returns a `ValidationResult` object containing:
|
||||
## Error Handling
|
||||
|
||||
The tool will return errors for:
|
||||
|
||||
- Missing or empty `source` parameter
|
||||
- API errors when calling the Firebase Rules service
|
||||
- Network connectivity issues
|
||||
@@ -111,5 +118,7 @@ The tool will return errors for:
|
||||
|
||||
## Related Tools
|
||||
|
||||
- [firestore-get-rules]({{< ref "firestore-get-rules" >}}): Retrieve current active rules
|
||||
- [firestore-query-collection]({{< ref "firestore-query-collection" >}}): Test rules by querying collections
|
||||
- [firestore-get-rules]({{< ref "firestore-get-rules" >}}): Retrieve current
|
||||
active rules
|
||||
- [firestore-query-collection]({{< ref "firestore-query-collection" >}}): Test
|
||||
rules by querying collections
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "http"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "http" tool sends out an HTTP request to an HTTP endpoint.
|
||||
aliases:
|
||||
- /resources/tools/http
|
||||
@@ -50,7 +50,7 @@ tools:
|
||||
method: GET
|
||||
path: /search
|
||||
description: Tool to search information from the example API
|
||||
|
||||
|
||||
my-dynamic-path-tool:
|
||||
kind: http
|
||||
source: my-http-source
|
||||
@@ -256,8 +256,8 @@ my-http-tool:
|
||||
| method | string | true | The HTTP method to use (e.g., GET, POST, PUT, DELETE). |
|
||||
| headers | map[string]string | false | A map of headers to include in the HTTP request (overrides source headers). |
|
||||
| requestBody | string | false | The request body payload. Use [go template][go-template-doc] with the parameter name as the placeholder (e.g., `{{.id}}` will be replaced with the value of the parameter that has name `id` in the `bodyParams` section). |
|
||||
| queryParams | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the query string. |
|
||||
| bodyParams | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the request body payload. |
|
||||
| headerParams | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted as the request headers. |
|
||||
| queryParams | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the query string. |
|
||||
| bodyParams | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the request body payload. |
|
||||
| headerParams | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted as the request headers. |
|
||||
|
||||
[go-template-doc]: <https://pkg.go.dev/text/template#pkg-overview>
|
||||
|
||||
@@ -16,7 +16,7 @@ in a given mode in the source.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [looker](../sources/looker.md)
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
`looker-get-dimensions` accepts two parameters, the `model` and the `explore`.
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ for a given model from the source.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [looker](../sources/looker.md)
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
`looker-get-explores` accepts one parameter, the
|
||||
`model` id.
|
||||
|
||||
@@ -16,7 +16,7 @@ in a given mode in the source.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [looker](../sources/looker.md)
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
`looker-get-filters` accepts two parameters, the `model` and the `explore`.
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ name or description.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [looker](../sources/looker.md)
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
`looker-get-looks` takes four parameters, the `title`, `desc`, `limit`
|
||||
and `offset`.
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "looker-get-measures"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "looker-get-measures" tool returns all the measures from a given explore
|
||||
in a given model in the source.
|
||||
aliases:
|
||||
@@ -16,7 +16,7 @@ in a given mode in the source.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [looker](../sources/looker.md)
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
`looker-get-measures` accepts two parameters, the `model` and the `explore`.
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "looker-get-models"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "looker-get-models" tool returns all the models in the source.
|
||||
aliases:
|
||||
- /resources/tools/looker-get-models
|
||||
@@ -14,7 +14,7 @@ A `looker-get-models` tool returns all the models the source.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [looker](../sources/looker.md)
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
`looker-get-models` accepts no parameters.
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "looker-get-parameters"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "looker-get-parameters" tool returns all the parameters from a given explore
|
||||
in a given model in the source.
|
||||
aliases:
|
||||
@@ -16,7 +16,7 @@ in a given mode in the source.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [looker](../sources/looker.md)
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
`looker-get-parameters` accepts two parameters, the `model` and the `explore`.
|
||||
|
||||
|
||||
@@ -16,9 +16,10 @@ semantic model.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [looker](../sources/looker.md)
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
`looker-query` takes eight parameters:
|
||||
|
||||
1. the `model`
|
||||
2. the `explore`
|
||||
3. the `fields` list
|
||||
|
||||
@@ -16,9 +16,10 @@ semantic model.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [looker](../sources/looker.md)
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
`looker-query-sql` takes eight parameters:
|
||||
|
||||
1. the `model`
|
||||
2. the `explore`
|
||||
3. the `fields` list
|
||||
|
||||
53
docs/en/resources/tools/looker/looker_query_url.md
Normal file
53
docs/en/resources/tools/looker/looker_query_url.md
Normal file
@@ -0,0 +1,53 @@
|
||||
---
|
||||
title: "looker-query-url"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
"looker-query-url" generates a url link to a Looker explore.
|
||||
aliases:
|
||||
- /resources/tools/looker-query-url
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `looker-query-url` generates a url link to an explore in
|
||||
Looker so the query can be investigated further.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
`looker-query-url` takes eight parameters:
|
||||
|
||||
1. the `model`
|
||||
2. the `explore`
|
||||
3. the `fields` list
|
||||
4. an optional set of `filters`
|
||||
5. an optional set of `pivots`
|
||||
6. an optional set of `sorts`
|
||||
7. an optional `limit`
|
||||
8. an optional `tz`
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
query_url:
|
||||
kind: looker-query-url
|
||||
source: looker-source
|
||||
description: |
|
||||
Query URL Tool
|
||||
|
||||
This tool is used to generate the URL of a query in Looker.
|
||||
The user can then explore the query further inside Looker.
|
||||
The tool also returns the query_id and slug. The parameters
|
||||
are the same as the `looker-query` tool.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "looker-query-url" |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -15,7 +15,7 @@ saved Look.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [looker](../sources/looker.md)
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
`looker-run-look` takes one parameter, the `look_id`.
|
||||
|
||||
|
||||
7
docs/en/resources/tools/mongodb/_index.md
Normal file
7
docs/en/resources/tools/mongodb/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "MongoDB"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with the MongoDB Source.
|
||||
---
|
||||
81
docs/en/resources/tools/mongodb/mongodb-aggregate.md
Normal file
81
docs/en/resources/tools/mongodb/mongodb-aggregate.md
Normal file
@@ -0,0 +1,81 @@
|
||||
---
|
||||
title: "mongodb-aggregate"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "mongodb-aggregate" tool executes a multi-stage aggregation pipeline against a MongoDB collection.
|
||||
aliases:
|
||||
- /resources/tools/mongodb-aggregate
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `mongodb-aggregate` tool is the most powerful query tool for MongoDB,
|
||||
allowing you to process data through a multi-stage pipeline. Each stage
|
||||
transforms the documents as they pass through, enabling complex operations like
|
||||
grouping, filtering, reshaping documents, and performing calculations.
|
||||
|
||||
The core of this tool is the `pipelinePayload`, which must be a string
|
||||
containing a **JSON array of pipeline stage documents**. The tool returns a JSON
|
||||
array of documents produced by the final stage of the pipeline.
|
||||
|
||||
A `readOnly` flag can be set to `true` as a safety measure to ensure the
|
||||
pipeline does not contain any write stages (like `$out` or `$merge`).
|
||||
|
||||
This tool is compatible with the following source kind:
|
||||
|
||||
* [`mongodb`](../../sources/mongodb.md)
|
||||
|
||||
## Example
|
||||
|
||||
Here is an example that calculates the average price and total count of products
|
||||
for each category, but only for products with an "active" status.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
get_category_stats:
|
||||
kind: mongodb-aggregate
|
||||
source: my-mongo-source
|
||||
description: Calculates average price and count of products, grouped by category.
|
||||
database: ecommerce
|
||||
collection: products
|
||||
readOnly: true
|
||||
pipelinePayload: |
|
||||
[
|
||||
{
|
||||
"$match": {
|
||||
"status": {{json .status_filter}}
|
||||
}
|
||||
},
|
||||
{
|
||||
"$group": {
|
||||
"_id": "$category",
|
||||
"average_price": { "$avg": "$price" },
|
||||
"item_count": { "$sum": 1 }
|
||||
}
|
||||
},
|
||||
{
|
||||
"$sort": {
|
||||
"average_price": -1
|
||||
}
|
||||
}
|
||||
]
|
||||
pipelineParams:
|
||||
- name: status_filter
|
||||
type: string
|
||||
description: The product status to filter by (e.g., "active").
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:----------------|:---------|:-------------|:---------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-aggregate`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection to run the aggregation on. |
|
||||
| pipelinePayload | string | true | A JSON array of aggregation stage documents, provided as a string. Uses `{{json .param_name}}` for templating. |
|
||||
| pipelineParams | list | true | A list of parameter objects that define the variables used in the `pipelinePayload`. |
|
||||
| canonical | bool | false | Determines if the pipeline string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. |
|
||||
| readOnly | bool | false | If `true`, the tool will fail if the pipeline contains write stages (`$out` or `$merge`). Defaults to `false`. |
|
||||
57
docs/en/resources/tools/mongodb/mongodb-delete-many.md
Normal file
57
docs/en/resources/tools/mongodb/mongodb-delete-many.md
Normal file
@@ -0,0 +1,57 @@
|
||||
---
|
||||
title: "mongodb-delete-many"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "mongodb-delete-many" tool deletes all documents from a MongoDB collection that match a filter.
|
||||
aliases:
|
||||
- /resources/tools/mongodb-delete-many
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `mongodb-delete-many` tool performs a **bulk destructive operation**,
|
||||
deleting **ALL** documents from a collection that match a specified filter.
|
||||
|
||||
The tool returns the total count of documents that were deleted. If the filter
|
||||
does not match any documents (i.e., the deleted count is 0), the tool will
|
||||
return an error.
|
||||
|
||||
This tool is compatible with the following source kind:
|
||||
|
||||
* [`mongodb`](../../sources/mongodb.md)
|
||||
|
||||
---
|
||||
|
||||
## Example
|
||||
|
||||
Here is an example that performs a cleanup task by deleting all products from
|
||||
the `inventory` collection that belong to a discontinued brand.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
retire_brand_products:
|
||||
kind: mongodb-delete-many
|
||||
source: my-mongo-source
|
||||
description: Deletes all products from a specified discontinued brand.
|
||||
database: ecommerce
|
||||
collection: inventory
|
||||
filterPayload: |
|
||||
{ "brand_name": {{json .brand_to_delete}} }
|
||||
filterParams:
|
||||
- name: brand_to_delete
|
||||
type: string
|
||||
description: The name of the discontinued brand whose products should be deleted.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:--------------|:---------|:-------------|:--------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-delete-many`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection from which to delete documents. |
|
||||
| filterPayload | string | true | The MongoDB query filter document to select the documents for deletion. Uses `{{json .param_name}}` for templating. |
|
||||
| filterParams | list | true | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
61
docs/en/resources/tools/mongodb/mongodb-delete-one.md
Normal file
61
docs/en/resources/tools/mongodb/mongodb-delete-one.md
Normal file
@@ -0,0 +1,61 @@
|
||||
---
|
||||
title: "mongodb-delete-one"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "mongodb-delete-one" tool deletes a single document from a MongoDB collection.
|
||||
aliases:
|
||||
- /resources/tools/mongodb-delete-one
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `mongodb-delete-one` tool performs a destructive operation, deleting the
|
||||
**first single document** that matches a specified filter from a MongoDB
|
||||
collection.
|
||||
|
||||
If the filter matches multiple documents, only the first one found by the
|
||||
database will be deleted. This tool is useful for removing specific entries,
|
||||
such as a user account or a single item from an inventory based on a unique ID.
|
||||
|
||||
The tool returns the number of documents deleted, which will be either `1` if a
|
||||
document was found and deleted, or `0` if no matching document was found.
|
||||
|
||||
This tool is compatible with the following source kind:
|
||||
|
||||
* [`mongodb`](../../sources/mongodb.md)
|
||||
|
||||
---
|
||||
|
||||
## Example
|
||||
|
||||
Here is an example that deletes a specific user account from the `users`
|
||||
collection by matching their unique email address. This is a permanent action.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
delete_user_account:
|
||||
kind: mongodb-delete-one
|
||||
source: my-mongo-source
|
||||
description: Permanently deletes a user account by their email address.
|
||||
database: user_data
|
||||
collection: users
|
||||
filterPayload: |
|
||||
{ "email": {{json .email_address}} }
|
||||
filterParams:
|
||||
- name: email_address
|
||||
type: string
|
||||
description: The email of the user account to delete.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:--------------|:---------|:-------------|:-------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-delete-one`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection from which to delete a document. |
|
||||
| filterPayload | string | true | The MongoDB query filter document to select the document for deletion. Uses `{{json .param_name}}` for templating. |
|
||||
| filterParams | list | true | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
68
docs/en/resources/tools/mongodb/mongodb-find-one.md
Normal file
68
docs/en/resources/tools/mongodb/mongodb-find-one.md
Normal file
@@ -0,0 +1,68 @@
|
||||
---
|
||||
title: "mongodb-find-one"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "mongodb-find-one" tool finds and retrieves a single document from a MongoDB collection.
|
||||
aliases:
|
||||
- /resources/tools/mongodb-find-one
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `mongodb-find-one` tool is used to retrieve the **first single document** that
|
||||
matches a specified filter from a MongoDB collection. If multiple documents
|
||||
match the filter, you can use `sort` options to control which document is
|
||||
returned. Otherwise, the selection is not guaranteed.
|
||||
|
||||
The tool returns a single JSON object representing the document, wrapped in a
|
||||
JSON array.
|
||||
|
||||
This tool is compatible with the following source kind:
|
||||
|
||||
* [`mongodb`](../../sources/mongodb.md)
|
||||
|
||||
---
|
||||
|
||||
## Example
|
||||
|
||||
Here's a common use case: finding a specific user by their unique email address
|
||||
and returning their profile information, while excluding sensitive fields like
|
||||
the password hash.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
get_user_profile:
|
||||
kind: mongodb-find-one
|
||||
source: my-mongo-source
|
||||
description: Retrieves a user's profile by their email address.
|
||||
database: user_data
|
||||
collection: profiles
|
||||
filterPayload: |
|
||||
{ "email": {{json .email}} }
|
||||
filterParams:
|
||||
- name: email
|
||||
type: string
|
||||
description: The email address of the user to find.
|
||||
projectPayload: |
|
||||
{
|
||||
"password_hash": 0,
|
||||
"login_history": 0
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:---------------|:---------|:-------------|:---------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-find-one`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database to query. |
|
||||
| collection | string | true | The name of the MongoDB collection to query. |
|
||||
| filterPayload | string | true | The MongoDB query filter document to select the document. Uses `{{json .param_name}}` for templating. |
|
||||
| filterParams | list | true | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
| projectPayload | string | false | An optional MongoDB projection document to specify which fields to include (1) or exclude (0) in the result. |
|
||||
| projectParams | list | false | A list of parameter objects for the `projectPayload`. |
|
||||
| sortPayload | string | false | An optional MongoDB sort document. Useful for selecting which document to return if the filter matches multiple (e.g., get the most recent). |
|
||||
| sortParams | list | false | A list of parameter objects for the `sortPayload`. |
|
||||
76
docs/en/resources/tools/mongodb/mongodb-find.md
Normal file
76
docs/en/resources/tools/mongodb/mongodb-find.md
Normal file
@@ -0,0 +1,76 @@
|
||||
---
|
||||
title: "mongodb-find"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "mongodb-find" tool finds and retrieves documents from a MongoDB collection.
|
||||
aliases:
|
||||
- /resources/tools/mongodb-find
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `mongodb-find` tool is used to query a MongoDB collection and retrieve
|
||||
documents that match a specified filter. It's a flexible tool that allows you to
|
||||
shape the output by selecting specific fields (**projection**), ordering the
|
||||
results (**sorting**), and restricting the number of documents returned
|
||||
(**limiting**).
|
||||
|
||||
The tool returns a JSON array of the documents found.
|
||||
|
||||
This tool is compatible with the following source kind:
|
||||
|
||||
* [`mongodb`](../../sources/mongodb.md)
|
||||
|
||||
## Example
|
||||
|
||||
Here's an example that finds up to 10 users from the `customers` collection who
|
||||
live in a specific city. The results are sorted by their last name, and only
|
||||
their first name, last name, and email are returned.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
find_local_customers:
|
||||
kind: mongodb-find
|
||||
source: my-mongo-source
|
||||
description: Finds customers by city, sorted by last name.
|
||||
database: crm
|
||||
collection: customers
|
||||
limit: 10
|
||||
filterPayload: |
|
||||
{ "address.city": {{json .city}} }
|
||||
filterParams:
|
||||
- name: city
|
||||
type: string
|
||||
description: The city to search for customers in.
|
||||
projectPayload: |
|
||||
{
|
||||
"first_name": 1,
|
||||
"last_name": 1,
|
||||
"email": 1,
|
||||
"_id": 0
|
||||
}
|
||||
sortPayload: |
|
||||
{ "last_name": {{json .sort_order}} }
|
||||
sortParams:
|
||||
- name: sort_order
|
||||
type: integer
|
||||
description: The sort order (1 for ascending, -1 for descending).
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:---------------|:---------|:-------------|:----------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-find`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database to query. |
|
||||
| collection | string | true | The name of the MongoDB collection to query. |
|
||||
| filterPayload | string | true | The MongoDB query filter document to select which documents to return. Uses `{{json .param_name}}` for templating. |
|
||||
| filterParams | list | true | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
| projectPayload | string | false | An optional MongoDB projection document to specify which fields to include (1) or exclude (0) in the results. |
|
||||
| projectParams | list | false | A list of parameter objects for the `projectPayload`. |
|
||||
| sortPayload | string | false | An optional MongoDB sort document to define the order of the returned documents. Use 1 for ascending and -1 for descending. |
|
||||
| sortParams | list | false | A list of parameter objects for the `sortPayload`. |
|
||||
| limit | integer | false | An optional integer specifying the maximum number of documents to return. |
|
||||
58
docs/en/resources/tools/mongodb/mongodb-insert-many.md
Normal file
58
docs/en/resources/tools/mongodb/mongodb-insert-many.md
Normal file
@@ -0,0 +1,58 @@
|
||||
---
|
||||
title: "mongodb-insert-many"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "mongodb-insert-many" tool inserts multiple new documents into a MongoDB collection.
|
||||
aliases:
|
||||
- /resources/tools/mongodb-insert-many
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `mongodb-insert-many` tool inserts **multiple new documents** into a
|
||||
specified MongoDB collection in a single bulk operation. This is highly
|
||||
efficient for adding large amounts of data at once.
|
||||
|
||||
This tool takes one required parameter named `data`. This `data` parameter must
|
||||
be a string containing a **JSON array of document objects**. Upon successful
|
||||
insertion, the tool returns a JSON array containing the unique `_id` of **each**
|
||||
new document that was created.
|
||||
|
||||
This tool is compatible with the following source kind:
|
||||
|
||||
* [`mongodb`](../../sources/mongodb.md)
|
||||
|
||||
---
|
||||
|
||||
## Example
|
||||
|
||||
Here is an example configuration for a tool that logs multiple events at once.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
log_batch_events:
|
||||
kind: mongodb-insert-many
|
||||
source: my-mongo-source
|
||||
description: Inserts a batch of event logs into the database.
|
||||
database: logging
|
||||
collection: events
|
||||
canonical: true
|
||||
```
|
||||
|
||||
An LLM would call this tool by providing an array of documents as a JSON string
|
||||
in the `data` parameter, like this:
|
||||
`tool_code: log_batch_events(data='[{"event": "login", "user": "user1"}, {"event": "click", "user": "user2"}, {"event": "logout", "user": "user1"}]')`
|
||||
|
||||
---
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:------------|:---------|:-------------|:---------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-insert-many`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection into which the documents will be inserted. |
|
||||
| canonical | bool | true | Determines if the data string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. |
|
||||
53
docs/en/resources/tools/mongodb/mongodb-insert-one.md
Normal file
53
docs/en/resources/tools/mongodb/mongodb-insert-one.md
Normal file
@@ -0,0 +1,53 @@
|
||||
---
|
||||
title: "mongodb-insert-one"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "mongodb-insert-one" tool inserts a single new document into a MongoDB collection.
|
||||
aliases:
|
||||
- /resources/tools/mongodb-insert-one
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `mongodb-insert-one` tool inserts a **single new document** into a specified
|
||||
MongoDB collection.
|
||||
|
||||
This tool takes one required parameter named `data`, which must be a string
|
||||
containing the JSON object you want to insert. Upon successful insertion, the
|
||||
tool returns the unique `_id` of the newly created document.
|
||||
|
||||
This tool is compatible with the following source kind:
|
||||
|
||||
* [`mongodb`](../../sources/mongodb.md)
|
||||
|
||||
## Example
|
||||
|
||||
Here is an example configuration for a tool that adds a new user to a `users`
|
||||
collection.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
create_new_user:
|
||||
kind: mongodb-insert-one
|
||||
source: my-mongo-source
|
||||
description: Creates a new user record in the database.
|
||||
database: user_data
|
||||
collection: users
|
||||
canonical: false
|
||||
```
|
||||
|
||||
An LLM would call this tool by providing the document as a JSON string in the
|
||||
`data` parameter, like this:
|
||||
`tool_code: create_new_user(data='{"email": "new.user@example.com", "name": "Jane Doe", "status": "active"}')`
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:------------|:---------|:-------------|:---------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-insert-one`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection into which the document will be inserted. |
|
||||
| canonical | bool | true | Determines if the data string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. |
|
||||
72
docs/en/resources/tools/mongodb/mongodb-update-many.md
Normal file
72
docs/en/resources/tools/mongodb/mongodb-update-many.md
Normal file
@@ -0,0 +1,72 @@
|
||||
---
|
||||
title: "mongodb-update-many"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "mongodb-update-many" tool updates all documents in a MongoDB collection that match a filter.
|
||||
aliases:
|
||||
- /resources/tools/mongodb-update-many
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `mongodb-update-many` tool updates **all** documents within a specified
|
||||
MongoDB collection that match a given filter. It locates the documents using a
|
||||
`filterPayload` and applies the modifications defined in an `updatePayload`.
|
||||
|
||||
The tool returns an array of three integers: `[ModifiedCount, UpsertedCount,
|
||||
MatchedCount]`.
|
||||
|
||||
This tool is compatible with the following source kind:
|
||||
|
||||
* [`mongodb`](../../sources/mongodb.md)
|
||||
|
||||
---
|
||||
|
||||
## Example
|
||||
|
||||
Here's an example configuration. This tool applies a discount to all items
|
||||
within a specific category and also marks them as being on sale.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
apply_category_discount:
|
||||
kind: mongodb-update-many
|
||||
source: my-mongo-source
|
||||
description: Use this tool to apply a discount to all items in a given category.
|
||||
database: products
|
||||
collection: inventory
|
||||
filterPayload: |
|
||||
{ "category": {{json .category_name}} }
|
||||
filterParams:
|
||||
- name: category_name
|
||||
type: string
|
||||
description: The category of items to update.
|
||||
updatePayload: |
|
||||
{
|
||||
"$mul": { "price": {{json .discount_multiplier}} },
|
||||
"$set": { "on_sale": true }
|
||||
}
|
||||
updateParams:
|
||||
- name: discount_multiplier
|
||||
type: number
|
||||
description: The multiplier to apply to the price (e.g., 0.8 for a 20% discount).
|
||||
canonical: false
|
||||
upsert: false
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:--------------|:---------|:-------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-update-many`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection in which to update documents. |
|
||||
| filterPayload | string | true | The MongoDB query filter document to select the documents for updating. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| filterParams | list | true | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
| updatePayload | string | true | The MongoDB update document, It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| updateParams | list | true | A list of parameter objects that define the variables used in the `updatePayload`. |
|
||||
| canonical | bool | true | Determines if the `filterPayload` and `updatePayload` strings are parsed using MongoDB's Canonical or Relaxed Extended JSON format. **Canonical** is stricter about type representation, while **Relaxed** is more lenient. |
|
||||
| upsert | bool | false | If `true`, a new document is created if no document matches the `filterPayload`. Defaults to `false`. |
|
||||
72
docs/en/resources/tools/mongodb/mongodb-update-one.md
Normal file
72
docs/en/resources/tools/mongodb/mongodb-update-one.md
Normal file
@@ -0,0 +1,72 @@
|
||||
---
|
||||
title: "mongodb-update-one"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "mongodb-update-one" tool updates a single document in a MongoDB collection.
|
||||
aliases:
|
||||
- /resources/tools/mongodb-update-one
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `mongodb-update-one` tool updates a single document within a specified MongoDB
|
||||
collection. It locates the document to be updated using a `filterPayload` and
|
||||
applies modifications defined in an `updatePayload`. If the filter matches
|
||||
multiple documents, only the first one found will be updated.
|
||||
|
||||
This tool is compatible with the following source kind:
|
||||
|
||||
* [`mongodb`](../../sources/mongodb.md)
|
||||
|
||||
---
|
||||
|
||||
## Example
|
||||
|
||||
Here's an example of a `mongodb-update-one` tool configuration. This tool
|
||||
updates the `stock` and `status` fields of a document in the `inventory`
|
||||
collection where the `item` field matches a provided value. If no matching
|
||||
document is found, the `upsert: true` option will create a new one.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
update_inventory_item:
|
||||
kind: mongodb-update-one
|
||||
source: my-mongo-source
|
||||
description: Use this tool to update an item's stock and status in the inventory.
|
||||
database: products
|
||||
collection: inventory
|
||||
filterPayload: |
|
||||
{ "item": {{json .item_name}} }
|
||||
filterParams:
|
||||
- name: item_name
|
||||
type: string
|
||||
description: The name of the item to update.
|
||||
updatePayload: |
|
||||
{ "$set": { "stock": {{json .new_stock}}, "status": {{json .new_status}} } }
|
||||
updateParams:
|
||||
- name: new_stock
|
||||
type: integer
|
||||
description: The new stock quantity.
|
||||
- name: new_status
|
||||
type: string
|
||||
description: The new status of the item (e.g., "In Stock", "Backordered").
|
||||
canonical: false
|
||||
upsert: true
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:--------------|:---------|:-------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-update-one`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection to update a document in. |
|
||||
| filterPayload | string | true | The MongoDB query filter document to select the document for updating. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| filterParams | list | true | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
| updatePayload | string | true | The MongoDB update document, which specifies the modifications. This often uses update operators like `$set`. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| updateParams | list | true | A list of parameter objects that define the variables used in the `updatePayload`. |
|
||||
| canonical | bool | true | Determines if the `updatePayload` string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. **Canonical** is stricter about type representation (e.g., `{"$numberInt": "42"}`), while **Relaxed** is more lenient (e.g., `42`). |
|
||||
| upsert | bool | false | If `true`, a new document is created if no document matches the `filterPayload`. Defaults to `false`. |
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "mssql-execute-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "mssql-execute-sql" tool executes a SQL statement against a SQL Server
|
||||
database.
|
||||
aliases:
|
||||
@@ -14,8 +14,8 @@ aliases:
|
||||
A `mssql-execute-sql` tool executes a SQL statement against a SQL Server
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [cloud-sql-mssql](../sources/cloud-sql-mssql.md)
|
||||
- [mssql](../sources/mssql.md)
|
||||
- [cloud-sql-mssql](../../sources/cloud-sql-mssql.md)
|
||||
- [mssql](../../sources/mssql.md)
|
||||
|
||||
`mssql-execute-sql` takes one input parameter `sql` and run the sql
|
||||
statement against the `source`.
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "mssql-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "mssql-sql" tool executes a pre-defined SQL statement against a SQL Server
|
||||
database.
|
||||
aliases:
|
||||
@@ -14,8 +14,8 @@ aliases:
|
||||
A `mssql-sql` tool executes a pre-defined SQL statement against a SQL Server
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [cloud-sql-mssql](../sources/cloud-sql-mssql.md)
|
||||
- [mssql](../sources/mssql.md)
|
||||
- [cloud-sql-mssql](../../sources/cloud-sql-mssql.md)
|
||||
- [mssql](../../sources/mssql.md)
|
||||
|
||||
Toolbox supports the [prepare statement syntax][prepare-statement] of MS SQL
|
||||
Server and expects parameters in the SQL query to be in the form of either
|
||||
@@ -78,7 +78,7 @@ tools:
|
||||
> including identifiers, column names, and table names. **This makes it more
|
||||
> vulnerable to SQL injections**. Using basic parameters only (see above) is
|
||||
> recommended for performance and safety reasons. For more details, please check
|
||||
> [templateParameters](_index#template-parameters).
|
||||
> [templateParameters](#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
@@ -107,5 +107,5 @@ tools:
|
||||
| source | string | true | Name of the source the T-SQL statement should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
| parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](#template-parameters) | false | List of [templateParameters](#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "mysql-execute-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "mysql-execute-sql" tool executes a SQL statement against a MySQL
|
||||
database.
|
||||
aliases:
|
||||
@@ -14,8 +14,8 @@ aliases:
|
||||
A `mysql-execute-sql` tool executes a SQL statement against a MySQL
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [cloud-sql-mysql](../sources/cloud-sql-mysql.md)
|
||||
- [mysql](../sources/mysql.md)
|
||||
- [cloud-sql-mysql](../../sources/cloud-sql-mysql.md)
|
||||
- [mysql](../../sources/mysql.md)
|
||||
|
||||
`mysql-execute-sql` takes one input parameter `sql` and run the sql
|
||||
statement against the `source`.
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "mysql-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "mysql-sql" tool executes a pre-defined SQL statement against a MySQL
|
||||
database.
|
||||
aliases:
|
||||
@@ -14,8 +14,8 @@ aliases:
|
||||
A `mysql-sql` tool executes a pre-defined SQL statement against a MySQL
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [cloud-sql-mysql](../sources/cloud-sql-mysql.md)
|
||||
- [mysql](../sources/mysql.md)
|
||||
- [cloud-sql-mysql](../../sources/cloud-sql-mysql.md)
|
||||
- [mysql](../../sources/mysql.md)
|
||||
|
||||
The specified SQL statement is executed as a [prepared statement][mysql-prepare],
|
||||
and expects parameters in the SQL query to be in the form of placeholders `?`.
|
||||
@@ -73,7 +73,7 @@ tools:
|
||||
> including identifiers, column names, and table names. **This makes it more
|
||||
> vulnerable to SQL injections**. Using basic parameters only (see above) is
|
||||
> recommended for performance and safety reasons. For more details, please check
|
||||
> [templateParameters](_index#template-parameters).
|
||||
> [templateParameters](#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
@@ -102,5 +102,5 @@ tools:
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute on. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
| parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](#template-parameters) | false | List of [templateParameters](#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "neo4j-cypher"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "neo4j-cypher" tool executes a pre-defined cypher statement against a Neo4j
|
||||
database.
|
||||
aliases:
|
||||
@@ -14,7 +14,7 @@ aliases:
|
||||
A `neo4j-cypher` tool executes a pre-defined Cypher statement against a Neo4j
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [neo4j](../sources/neo4j.md)
|
||||
- [neo4j](../../sources/neo4j.md)
|
||||
|
||||
The specified Cypher statement is executed as a [parameterized
|
||||
statement][neo4j-parameters], and specified parameters will be used according to
|
||||
@@ -63,7 +63,7 @@ tools:
|
||||
description: Full actor name, "firstname lastname"
|
||||
- name: year
|
||||
type: integer
|
||||
description: 4 digit number starting in 1900 up to the current year
|
||||
description: 4 digit number starting in 1900 up to the current year
|
||||
```
|
||||
|
||||
## Reference
|
||||
@@ -74,4 +74,4 @@ tools:
|
||||
| source | string | true | Name of the source the Cypher query should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | Cypher statement to execute |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be used with the Cypher statement. |
|
||||
| parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be used with the Cypher statement. |
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "neo4j-execute-cypher"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "neo4j-execute-cypher" tool executes any arbitrary Cypher statement against a Neo4j
|
||||
database.
|
||||
aliases:
|
||||
@@ -11,15 +11,24 @@ aliases:
|
||||
|
||||
## About
|
||||
|
||||
A `neo4j-execute-cypher` tool executes an arbitrary Cypher query provided as a string parameter against a Neo4j database. It's designed to be a flexible tool for interacting with the database when a pre-defined query is not sufficient. This tool is compatible with any of the following sources:
|
||||
A `neo4j-execute-cypher` tool executes an arbitrary Cypher query provided as a
|
||||
string parameter against a Neo4j database. It's designed to be a flexible tool
|
||||
for interacting with the database when a pre-defined query is not sufficient.
|
||||
This tool is compatible with any of the following sources:
|
||||
|
||||
- [neo4j](../sources/neo4j.md)
|
||||
- [neo4j](../../sources/neo4j.md)
|
||||
|
||||
For security, the tool can be configured to be read-only. If the `readOnly` flag is set to `true`, the tool will analyze the incoming Cypher query and reject any write operations (like `CREATE`, `MERGE`, `DELETE`, etc.) before execution.
|
||||
For security, the tool can be configured to be read-only. If the `readOnly` flag
|
||||
is set to `true`, the tool will analyze the incoming Cypher query and reject any
|
||||
write operations (like `CREATE`, `MERGE`, `DELETE`, etc.) before execution.
|
||||
|
||||
The Cypher query uses standard [Neo4j Cypher](https://neo4j.com/docs/cypher-manual/current/queries/) syntax and supports all Cypher features, including pattern matching, filtering, and aggregation.
|
||||
The Cypher query uses standard [Neo4j
|
||||
Cypher](https://neo4j.com/docs/cypher-manual/current/queries/) syntax and
|
||||
supports all Cypher features, including pattern matching, filtering, and
|
||||
aggregation.
|
||||
|
||||
`neo4j-execute-cypher` takes one input parameter `cypher` and run the cypher query against the `source`.
|
||||
`neo4j-execute-cypher` takes one input parameter `cypher` and run the cypher
|
||||
query against the `source`.
|
||||
|
||||
> **Note:** This tool is intended for developer assistant workflows with
|
||||
> human-in-the-loop and shouldn't be used for production agents.
|
||||
@@ -50,4 +59,3 @@ tools:
|
||||
| source | string | true | Name of the source the Cypher query should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| readOnly | boolean | false | If set to `true`, the tool will reject any write operations in the Cypher query. Default is `false`. |
|
||||
|
||||
|
||||
42
docs/en/resources/tools/neo4j/neo4j-schema.md
Normal file
42
docs/en/resources/tools/neo4j/neo4j-schema.md
Normal file
@@ -0,0 +1,42 @@
|
||||
---
|
||||
title: "neo4j-schema"
|
||||
type: "docs"
|
||||
weight: 1
|
||||
description: >
|
||||
A "neo4j-schema" tool extracts a comprehensive schema from a Neo4j
|
||||
database.
|
||||
aliases:
|
||||
- /resources/tools/neo4j-schema
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `neo4j-schema` tool connects to a Neo4j database and extracts its complete schema information. It runs multiple queries concurrently to efficiently gather details about node labels, relationships, properties, constraints, and indexes.
|
||||
|
||||
The tool automatically detects if the APOC (Awesome Procedures on Cypher) library is available. If so, it uses APOC procedures like `apoc.meta.schema` for a highly detailed overview of the database structure; otherwise, it falls back to using native Cypher queries.
|
||||
|
||||
The extracted schema is **cached** to improve performance for subsequent requests. The output is a structured JSON object containing all the schema details, which can be invaluable for providing database context to an LLM. This tool is compatible with a `neo4j` source and takes no parameters.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
get_movie_db_schema:
|
||||
kind: neo4j-schema
|
||||
source: my-neo4j-movies-instance
|
||||
description: |
|
||||
Use this tool to get the full schema of the movie database.
|
||||
This provides information on all available node labels (like Movie, Person),
|
||||
relationships (like ACTED_IN), and the properties on each.
|
||||
This tool takes no parameters.
|
||||
# Optional configuration to cache the schema for 2 hours
|
||||
cacheExpireMinutes: 120
|
||||
```
|
||||
|
||||
## Reference
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|---------------------|:----------:|:------------:|-------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `neo4j-db-schema`. |
|
||||
| source | string | true | Name of the source the schema should be extracted from. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| cacheExpireMinutes | integer | false | Cache expiration time in minutes. Defaults to 60. |
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "postgres-execute-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "postgres-execute-sql" tool executes a SQL statement against a Postgres
|
||||
database.
|
||||
aliases:
|
||||
@@ -14,9 +14,9 @@ aliases:
|
||||
A `postgres-execute-sql` tool executes a SQL statement against a Postgres
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [alloydb-postgres](../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../sources/cloud-sql-pg.md)
|
||||
- [postgres](../sources/postgres.md)
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
`postgres-execute-sql` takes one input parameter `sql` and run the sql
|
||||
statement against the `source`.
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "postgres-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "postgres-sql" tool executes a pre-defined SQL statement against a Postgres
|
||||
database.
|
||||
aliases:
|
||||
@@ -14,9 +14,9 @@ aliases:
|
||||
A `postgres-sql` tool executes a pre-defined SQL statement against a Postgres
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [alloydb-postgres](../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../sources/cloud-sql-pg.md)
|
||||
- [postgres](../sources/postgres.md)
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
The specified SQL statement is executed as a [prepared statement][pg-prepare],
|
||||
and specified parameters will inserted according to their position: e.g. `1`
|
||||
@@ -77,7 +77,7 @@ tools:
|
||||
> including identifiers, column names, and table names. **This makes it more
|
||||
> vulnerable to SQL injections**. Using basic parameters only (see above) is
|
||||
> recommended for performance and safety reasons. For more details, please check
|
||||
> [templateParameters](_index#template-parameters).
|
||||
> [templateParameters](#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
@@ -106,5 +106,5 @@ tools:
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute on. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
| parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](#template-parameters) | false | List of [templateParameters](#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "spanner-execute-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
description: >
|
||||
A "spanner-execute-sql" tool executes a SQL statement against a Spanner
|
||||
database.
|
||||
aliases:
|
||||
@@ -14,7 +14,7 @@ aliases:
|
||||
A `spanner-execute-sql` tool executes a SQL statement against a Spanner
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [spanner](../sources/spanner.md)
|
||||
- [spanner](../../sources/spanner.md)
|
||||
|
||||
`spanner-execute-sql` takes one input parameter `sql` and run the sql
|
||||
statement against the `source`.
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
title: "spanner-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "spanner-sql" tool executes a pre-defined SQL statement against a Google
|
||||
description: >
|
||||
A "spanner-sql" tool executes a pre-defined SQL statement against a Google
|
||||
Cloud Spanner database.
|
||||
aliases:
|
||||
- /resources/tools/spanner-sql
|
||||
@@ -15,7 +15,7 @@ A `spanner-sql` tool executes a pre-defined SQL statement (either `googlesql` or
|
||||
`postgresql`) against a Cloud Spanner database. It's compatible with any of the
|
||||
following sources:
|
||||
|
||||
- [spanner](../sources/spanner.md)
|
||||
- [spanner](../../sources/spanner.md)
|
||||
|
||||
### GoogleSQL
|
||||
|
||||
@@ -28,7 +28,8 @@ inserted according to their name: e.g. `@name`.
|
||||
> Parameters cannot be used as substitutes for identifiers, column names, table
|
||||
> names, or other parts of the query.
|
||||
|
||||
[gsql-dml]: https://cloud.google.com/spanner/docs/reference/standard-sql/dml-syntax
|
||||
[gsql-dml]:
|
||||
https://cloud.google.com/spanner/docs/reference/standard-sql/dml-syntax
|
||||
|
||||
### PostgreSQL
|
||||
|
||||
@@ -133,7 +134,7 @@ tools:
|
||||
> including identifiers, column names, and table names. **This makes it more
|
||||
> vulnerable to SQL injections**. Using basic parameters only (see above) is
|
||||
> recommended for performance and safety reasons. For more details, please check
|
||||
> [templateParameters](_index#template-parameters).
|
||||
> [templateParameters](#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
@@ -162,6 +163,6 @@ tools:
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute on. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| readOnly | bool | false | When set to `true`, the `statement` is run as a read-only transaction. Default: `false`. |
|
||||
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
| templateParameters | [templateParameters](#template-parameters) | false | List of [templateParameters](#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
|
||||
@@ -13,7 +13,7 @@ aliases:
|
||||
A `sqlite-sql` tool executes SQL statements against a SQLite database.
|
||||
It's compatible with any of the following sources:
|
||||
|
||||
- [sqlite](../sources/sqlite.md)
|
||||
- [sqlite](../../sources/sqlite.md)
|
||||
|
||||
SQLite uses the `?` placeholder for parameters in SQL statements. Parameters are
|
||||
bound in the order they are provided.
|
||||
@@ -51,7 +51,7 @@ tools:
|
||||
> including identifiers, column names, and table names. **This makes it more
|
||||
> vulnerable to SQL injections**. Using basic parameters only (see above) is
|
||||
> recommended for performance and safety reasons. For more details, please check
|
||||
> [templateParameters](_index#template-parameters).
|
||||
> [templateParameters](#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
@@ -80,5 +80,5 @@ tools:
|
||||
| source | string | true | Name of the source the SQLite source configuration. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | The SQL statement to execute. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
| parameters | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](#template-parameters) | false | List of [templateParameters](#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
|
||||
7
docs/en/resources/tools/utility/_index.md
Normal file
7
docs/en/resources/tools/utility/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "Utility tools"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that provide utility.
|
||||
---
|
||||
@@ -6,10 +6,14 @@ description: >
|
||||
Wait for a long-running AlloyDB operation to complete.
|
||||
---
|
||||
|
||||
The `alloydb-wait-for-operation` tool is a utility tool that waits for a long-running AlloyDB operation to complete. It does this by polling the AlloyDB Admin API operation status endpoint until the operation is finished, using exponential backoff.
|
||||
The `alloydb-wait-for-operation` tool is a utility tool that waits for a
|
||||
long-running AlloyDB operation to complete. It does this by polling the AlloyDB
|
||||
Admin API operation status endpoint until the operation is finished, using
|
||||
exponential backoff.
|
||||
|
||||
{{< notice info >}}
|
||||
This tool is intended for developer assistant workflows with human-in-the-loop and shouldn't be used for production agents.
|
||||
This tool is intended for developer assistant workflows with human-in-the-loop
|
||||
and shouldn't be used for production agents.
|
||||
{{< /notice >}}
|
||||
|
||||
## Example
|
||||
@@ -40,7 +44,7 @@ tools:
|
||||
| ----------- | :------: | :----------: | ---------------------------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "alloydb-wait-for-operation". |
|
||||
| source | string | true | Name of the source the HTTP request should be sent to. |
|
||||
| description | string | true | A description of the tool. |
|
||||
| description | string | true | A description of the tool. |
|
||||
| delay | duration | false | The initial delay between polling requests (e.g., `3s`). Defaults to 3 seconds. |
|
||||
| maxDelay | duration | false | The maximum delay between polling requests (e.g., `4m`). Defaults to 4 minutes. |
|
||||
| multiplier | float | false | The multiplier for the polling delay. The delay is multiplied by this value after each request. Defaults to 2.0. |
|
||||
|
||||
@@ -10,13 +10,16 @@ aliases:
|
||||
|
||||
## About
|
||||
|
||||
A `wait` tool pauses execution for a specified duration. This can be useful in workflows where a delay is needed between steps.
|
||||
A `wait` tool pauses execution for a specified duration. This can be useful in
|
||||
workflows where a delay is needed between steps.
|
||||
|
||||
`wait` takes one input parameter `duration` which is a string representing the time to wait (e.g., "10s", "2m", "1h").
|
||||
`wait` takes one input parameter `duration` which is a string representing the
|
||||
time to wait (e.g., "10s", "2m", "1h").
|
||||
|
||||
{{% notice info %}}
|
||||
This tool is intended for developer assistant workflows with human-in-the-loop and shouldn't be used for production agents.
|
||||
{{% /notice %}}
|
||||
{{< notice info >}}
|
||||
This tool is intended for developer assistant workflows with human-in-the-loop
|
||||
and shouldn't be used for production agents.
|
||||
{{< /notice >}}
|
||||
|
||||
## Example
|
||||
|
||||
@@ -30,8 +33,8 @@ tools:
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "wait". |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| timeout | string | true | The default duration the tool can wait for. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------------:|:------------:|-------------------------------------------------------|
|
||||
| kind | string | true | Must be "wait". |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| timeout | string | true | The default duration the tool can wait for. |
|
||||
|
||||
340
docs/en/samples/alloydb/_index.md
Normal file
340
docs/en/samples/alloydb/_index.md
Normal file
@@ -0,0 +1,340 @@
|
||||
---
|
||||
title: "AlloyDB"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
How to get started running Toolbox with MCP Inspector and AlloyDB as the source.
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
[Model Context Protocol](https://modelcontextprotocol.io) is an open protocol
|
||||
that standardizes how applications provide context to LLMs. Check out this page
|
||||
on how to [connect to Toolbox via MCP](../../how-to/connect_via_mcp.md).
|
||||
|
||||
## Before you begin
|
||||
|
||||
This guide assumes you have already done the following:
|
||||
|
||||
1. [Create a AlloyDB cluster and instance](https://cloud.google.com/alloydb/docs/cluster-create) with a database and user.
|
||||
1. Connect to the instance using [AlloyDB Studio](https://cloud.google.com/alloydb/docs/manage-data-using-studio), [`psql` command-line tool](https://www.postgresql.org/download/), or any other PostgreSQL client.
|
||||
|
||||
2. Enable the `pgvector` and `google_ml_integration` [extensions](https://cloud.google.com/alloydb/docs/ai). These are required for Semantic Search and Natural Language to SQL tools. Run the following SQL commands:
|
||||
|
||||
```sql
|
||||
CREATE EXTENSION IF NOT EXISTS "vector";
|
||||
CREATE EXTENSION IF NOT EXISTS "google_ml_integration";
|
||||
CREATE EXTENSION IF NOT EXISTS alloydb_ai_nl cascade;
|
||||
CREATE EXTENSION IF NOT EXISTS parameterized_views;
|
||||
```
|
||||
|
||||
## Step 1: Set up your AlloyDB database
|
||||
|
||||
In this section, we will create the necessary tables and functions in your AlloyDB instance.
|
||||
|
||||
1. Create tables using the following commands:
|
||||
|
||||
```sql
|
||||
CREATE TABLE products (
|
||||
product_id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
price DECIMAL(10, 2) NOT NULL,
|
||||
category_id INT,
|
||||
embedding vector(3072) -- Vector size for model(gemini-embedding-001)
|
||||
);
|
||||
|
||||
CREATE TABLE customers (
|
||||
customer_id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) UNIQUE NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE cart (
|
||||
cart_id SERIAL PRIMARY KEY,
|
||||
customer_id INT UNIQUE NOT NULL,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (customer_id) REFERENCES customers(customer_id)
|
||||
);
|
||||
|
||||
CREATE TABLE cart_items (
|
||||
cart_item_id SERIAL PRIMARY KEY,
|
||||
cart_id INT NOT NULL,
|
||||
product_id INT NOT NULL,
|
||||
quantity INT NOT NULL,
|
||||
price DECIMAL(10, 2) NOT NULL,
|
||||
FOREIGN KEY (cart_id) REFERENCES cart(cart_id),
|
||||
FOREIGN KEY (product_id) REFERENCES products(product_id)
|
||||
);
|
||||
|
||||
CREATE TABLE categories (
|
||||
category_id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL
|
||||
);
|
||||
```
|
||||
|
||||
2. Insert sample data into the tables:
|
||||
|
||||
```sql
|
||||
INSERT INTO categories (category_id, name) VALUES
|
||||
(1, 'Flowers'),
|
||||
(2, 'Vases');
|
||||
|
||||
INSERT INTO products (product_id, name, description, price, category_id, embedding) VALUES
|
||||
(1, 'Rose', 'A beautiful red rose', 2.50, 1, embedding('gemini-embedding-001', 'A beautiful red rose')),
|
||||
(2, 'Tulip', 'A colorful tulip', 1.50, 1, embedding('gemini-embedding-001', 'A colorful tulip')),
|
||||
(3, 'Glass Vase', 'A transparent glass vase', 10.00, 2, embedding('gemini-embedding-001', 'A transparent glass vase')),
|
||||
(4, 'Ceramic Vase', 'A handmade ceramic vase', 15.00, 2, embedding('gemini-embedding-001', 'A handmade ceramic vase'));
|
||||
|
||||
INSERT INTO customers (customer_id, name, email) VALUES
|
||||
(1, 'John Doe', 'john.doe@example.com'),
|
||||
(2, 'Jane Smith', 'jane.smith@example.com');
|
||||
|
||||
INSERT INTO cart (cart_id, customer_id) VALUES
|
||||
(1, 1),
|
||||
(2, 2);
|
||||
|
||||
INSERT INTO cart_items (cart_id, product_id, quantity, price) VALUES
|
||||
(1, 1, 2, 2.50),
|
||||
(1, 3, 1, 10.00),
|
||||
(2, 2, 5, 1.50);
|
||||
```
|
||||
|
||||
## Step 2: Install Toolbox
|
||||
|
||||
In this section, we will download and install the Toolbox binary.
|
||||
|
||||
1. Download the latest version of Toolbox as a binary:
|
||||
|
||||
{{< notice tip >}}
|
||||
Select the
|
||||
[correct binary](https://github.com/googleapis/genai-toolbox/releases)
|
||||
corresponding to your OS and CPU architecture.
|
||||
{{< /notice >}}
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
export VERSION="0.10.0"
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
## Step 3: Configure the tools
|
||||
|
||||
Create a `tools.yaml` file and add the following content. You must replace the placeholders with your actual AlloyDB configuration.
|
||||
|
||||
First, define the data source for your tools. This tells Toolbox how to connect to your AlloyDB instance.
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
alloydb-pg-source:
|
||||
kind: alloydb-postgres
|
||||
project: YOUR_PROJECT_ID
|
||||
region: YOUR_REGION
|
||||
cluster: YOUR_CLUSTER
|
||||
instance: YOUR_INSTANCE
|
||||
database: YOUR_DATABASE
|
||||
user: YOUR_USER
|
||||
password: YOUR_PASSWORD
|
||||
```
|
||||
|
||||
Next, define the tools the agent can use. We will categorize them into three types:
|
||||
|
||||
### 1. Structured Queries Tools
|
||||
|
||||
These tools execute predefined SQL statements. They are ideal for common, structured queries like managing a shopping cart. Add the following to your `tools.yaml` file:
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
|
||||
access-cart-information:
|
||||
kind: postgres-sql
|
||||
source: alloydb-pg-source
|
||||
description: >-
|
||||
List items in customer cart.
|
||||
Use this tool to list items in a customer cart. This tool requires the cart ID.
|
||||
parameters:
|
||||
- name: cart_id
|
||||
type: integer
|
||||
description: The id of the cart.
|
||||
statement: |
|
||||
SELECT
|
||||
p.name AS product_name,
|
||||
ci.quantity,
|
||||
ci.price AS item_price,
|
||||
(ci.quantity * ci.price) AS total_item_price,
|
||||
c.created_at AS cart_created_at,
|
||||
ci.product_id AS product_id
|
||||
FROM
|
||||
cart_items ci JOIN cart c ON ci.cart_id = c.cart_id
|
||||
JOIN products p ON ci.product_id = p.product_id
|
||||
WHERE
|
||||
c.cart_id = $1;
|
||||
|
||||
add-to-cart:
|
||||
kind: postgres-sql
|
||||
source: alloydb-pg-source
|
||||
description: >-
|
||||
Add items to customer cart using the product ID and product prices from the product list.
|
||||
Use this tool to add items to a customer cart.
|
||||
This tool requires the cart ID, product ID, quantity, and price.
|
||||
parameters:
|
||||
- name: cart_id
|
||||
type: integer
|
||||
description: The id of the cart.
|
||||
- name: product_id
|
||||
type: integer
|
||||
description: The id of the product.
|
||||
- name: quantity
|
||||
type: integer
|
||||
description: The quantity of items to add.
|
||||
- name: price
|
||||
type: float
|
||||
description: The price of items to add.
|
||||
statement: |
|
||||
INSERT INTO
|
||||
cart_items (cart_id, product_id, quantity, price)
|
||||
VALUES($1,$2,$3,$4);
|
||||
|
||||
delete-from-cart:
|
||||
kind: postgres-sql
|
||||
source: alloydb-pg-source
|
||||
description: >-
|
||||
Remove products from customer cart.
|
||||
Use this tool to remove products from a customer cart.
|
||||
This tool requires the cart ID and product ID.
|
||||
parameters:
|
||||
- name: cart_id
|
||||
type: integer
|
||||
description: The id of the cart.
|
||||
- name: product_id
|
||||
type: integer
|
||||
description: The id of the product.
|
||||
statement: |
|
||||
DELETE FROM
|
||||
cart_items
|
||||
WHERE
|
||||
cart_id = $1 AND product_id = $2;
|
||||
```
|
||||
|
||||
### 2. Semantic Search Tools
|
||||
|
||||
These tools use vector embeddings to find the most relevant results based on the meaning of a user's query, rather than just keywords. Append the following tools to the `tools` section in your `tools.yaml`:
|
||||
|
||||
```yaml
|
||||
search-product-recommendations:
|
||||
kind: postgres-sql
|
||||
source: alloydb-pg-source
|
||||
description: >-
|
||||
Search for products based on user needs.
|
||||
Use this tool to search for products. This tool requires the user's needs.
|
||||
parameters:
|
||||
- name: query
|
||||
type: string
|
||||
description: The product characteristics
|
||||
statement: |
|
||||
SELECT
|
||||
product_id,
|
||||
name,
|
||||
description,
|
||||
ROUND(CAST(price AS numeric), 2) as price
|
||||
FROM
|
||||
products
|
||||
ORDER BY
|
||||
embedding('gemini-embedding-001', $1)::vector <=> embedding
|
||||
LIMIT 5;
|
||||
```
|
||||
|
||||
### 3. Natural Language to SQL (NL2SQL) Tools
|
||||
|
||||
1. Create a [natural language configuration](https://cloud.google.com/alloydb/docs/ai/use-natural-language-generate-sql-queries#create-config) for your AlloyDB cluster.
|
||||
|
||||
{{< notice tip >}}Before using NL2SQL tools,
|
||||
you must first install the `alloydb_ai_nl` extension and
|
||||
create the [semantic layer](https://cloud.google.com/alloydb/docs/ai/natural-language-overview) under a configuration named `flower_shop`.
|
||||
{{< /notice >}}
|
||||
|
||||
2. Configure your NL2SQL tool to use your configuration. These tools translate natural language questions into SQL queries, allowing users to interact with the database conversationally. Append the following tool to the `tools` section:
|
||||
|
||||
```yaml
|
||||
ask-questions-about-products:
|
||||
kind: alloydb-ai-nl
|
||||
source: alloydb-pg-source
|
||||
nlConfig: flower_shop
|
||||
description: >-
|
||||
Ask questions related to products or brands.
|
||||
Use this tool to ask questions about products or brands.
|
||||
Always SELECT the IDs of objects when generating queries.
|
||||
```
|
||||
|
||||
Finally, group the tools into a `toolset` to make them available to the model. Add the following to the end of your `tools.yaml` file:
|
||||
|
||||
```yaml
|
||||
toolsets:
|
||||
flower_shop:
|
||||
- access-cart-information
|
||||
- search-product-recommendations
|
||||
- ask-questions-about-products
|
||||
- add-to-cart
|
||||
- delete-from-cart
|
||||
```
|
||||
|
||||
For more info on tools, check out the
|
||||
[Tools](../../resources/tools/) section.
|
||||
|
||||
## Step 4: Run the Toolbox server
|
||||
|
||||
Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
|
||||
|
||||
```bash
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
## Step 5: Connect to MCP Inspector
|
||||
|
||||
1. Run the MCP Inspector:
|
||||
|
||||
```bash
|
||||
npx @modelcontextprotocol/inspector
|
||||
```
|
||||
|
||||
1. Type `y` when it asks to install the inspector package.
|
||||
|
||||
1. It should show the following when the MCP Inspector is up and running (please take note of `<YOUR_SESSION_TOKEN>`):
|
||||
|
||||
```bash
|
||||
Starting MCP inspector...
|
||||
⚙️ Proxy server listening on localhost:6277
|
||||
🔑 Session token: <YOUR_SESSION_TOKEN>
|
||||
Use this token to authenticate requests or set DANGEROUSLY_OMIT_AUTH=true to disable auth
|
||||
|
||||
🚀 MCP Inspector is up and running at:
|
||||
http://localhost:6274/?MCP_PROXY_AUTH_TOKEN=<YOUR_SESSION_TOKEN>
|
||||
```
|
||||
|
||||
1. Open the above link in your browser.
|
||||
|
||||
1. For `Transport Type`, select `Streamable HTTP`.
|
||||
|
||||
1. For `URL`, type in `http://127.0.0.1:5000/mcp`.
|
||||
|
||||
1. For `Configuration` -> `Proxy Session Token`, make sure `<YOUR_SESSION_TOKEN>` is present.
|
||||
|
||||
1. Click Connect.
|
||||
|
||||
1. Select `List Tools`, you will see a list of tools configured in `tools.yaml`.
|
||||
|
||||
1. Test out your tools here!
|
||||
|
||||
## What's next
|
||||
|
||||
- Learn more about [MCP Inspector](../../how-to/connect_via_mcp.md).
|
||||
- Learn more about [Toolbox Resources](../../resources/).
|
||||
- Learn more about [Toolbox How-to guides](../../how-to/).
|
||||
|
||||
@@ -220,7 +220,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.9.0\" # x-release-please-version\n",
|
||||
"version = \"0.10.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -3,7 +3,7 @@ title: "Quickstart (Local with BigQuery)"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
How to get started running Toolbox locally with Python, BigQuery, and
|
||||
How to get started running Toolbox locally with Python, BigQuery, and
|
||||
LangGraph, LlamaIndex, or ADK.
|
||||
---
|
||||
|
||||
@@ -179,7 +179,7 @@ to use BigQuery, and then run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.9.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -292,8 +292,10 @@ to use BigQuery, and then run the Toolbox server.
|
||||
```bash
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
Toolbox enables dynamic reloading by default. To disable, use the
|
||||
`--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
|
||||
## Step 3: Connect your agent to Toolbox
|
||||
@@ -691,7 +693,7 @@ with ToolboxSyncClient("<http://127.0.0.1:5000>") as toolbox_client:
|
||||
{{< tabpane text=true persist=header >}}
|
||||
{{% tab header="Core" lang="en" %}}
|
||||
To learn more about the Core SDK, check out the [Toolbox Core SDK
|
||||
documentation.](https://github.com/googleapis/genai-toolbox/tree/main/sdks/toolbox-core)
|
||||
documentation.](https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/packages/toolbox-core/README.md)
|
||||
{{% /tab %}}
|
||||
{{% tab header="Langchain" lang="en" %}}
|
||||
To learn more about Agents in LangChain, check out the [LangGraph Agent
|
||||
|
||||
@@ -10,7 +10,7 @@ description: >
|
||||
|
||||
[Model Context Protocol](https://modelcontextprotocol.io) is an open protocol
|
||||
that standardizes how applications provide context to LLMs. Check out this page
|
||||
on how to [connect to Toolbox via MCP](../../how-to/connect_via_mcp.md).
|
||||
on how to [connect to Toolbox via MCP](../../../how-to/connect_via_mcp.md).
|
||||
|
||||
## Step 1: Set up your BigQuery Dataset and Table
|
||||
|
||||
@@ -98,7 +98,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.9.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.10.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -190,7 +190,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
```
|
||||
|
||||
For more info on tools, check out the
|
||||
[Tools](../../resources/tools/_index.md) section.
|
||||
[Tools](../../../resources/tools/) section.
|
||||
|
||||
1. Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
|
||||
|
||||
@@ -208,7 +208,8 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
|
||||
1. Type `y` when it asks to install the inspector package.
|
||||
|
||||
1. It should show the following when the MCP Inspector is up and running (please take note of `<YOUR_SESSION_TOKEN>`):
|
||||
1. It should show the following when the MCP Inspector is up and running (please
|
||||
take note of `<YOUR_SESSION_TOKEN>`):
|
||||
|
||||
```bash
|
||||
Starting MCP inspector...
|
||||
@@ -226,7 +227,8 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
|
||||
1. For `URL`, type in `http://127.0.0.1:5000/mcp`.
|
||||
|
||||
1. For `Configuration` -> `Proxy Session Token`, make sure `<YOUR_SESSION_TOKEN>` is present.
|
||||
1. For `Configuration` -> `Proxy Session Token`, make sure
|
||||
`<YOUR_SESSION_TOKEN>` is present.
|
||||
|
||||
1. Click Connect.
|
||||
|
||||
@@ -236,4 +238,4 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
|
||||

|
||||
|
||||
1. Test out your tools here!
|
||||
1. Test out your tools here!
|
||||
|
||||
@@ -46,7 +46,8 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
|
||||
1. Edit the file `~/.gemini/settings.json` and add the following
|
||||
to the list of mcpServers. Use the Client Id and Client Secret
|
||||
you obtained earlier.
|
||||
you obtained earlier. The name of the server - here
|
||||
`looker-toolbox` - can be anything meaningful to you.
|
||||
|
||||
```json
|
||||
"mcpServers": {
|
||||
@@ -99,10 +100,13 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
- looker-toolbox__query_sql
|
||||
- looker-toolbox__get_dimensions
|
||||
- looker-toolbox__run_look
|
||||
- looker-toolbox__query_url
|
||||
```
|
||||
|
||||
1. Start exploring your Looker instance with commands like
|
||||
`Find an explore to see orders` or `show me my current
|
||||
inventory broken down by item category`.
|
||||
|
||||
1. Gemini will prompt you for your approval before using
|
||||
a tool. You can approve all the tools.
|
||||
a tool. You can approve all the tools at once or
|
||||
one at a time.
|
||||
|
||||
49
go.mod
49
go.mod
@@ -1,6 +1,6 @@
|
||||
module github.com/googleapis/genai-toolbox
|
||||
|
||||
go 1.23.8
|
||||
go 1.24
|
||||
|
||||
toolchain go1.24.5
|
||||
|
||||
@@ -9,6 +9,7 @@ require (
|
||||
cloud.google.com/go/bigquery v1.69.0
|
||||
cloud.google.com/go/bigtable v1.38.0
|
||||
cloud.google.com/go/cloudsqlconn v1.17.3
|
||||
cloud.google.com/go/dataplex v1.26.0
|
||||
cloud.google.com/go/firestore v1.18.0
|
||||
cloud.google.com/go/spanner v1.83.0
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0
|
||||
@@ -33,6 +34,7 @@ require (
|
||||
github.com/spf13/cobra v1.9.1
|
||||
github.com/thlib/go-timezone-local v0.0.7
|
||||
github.com/valkey-io/valkey-go v1.0.63
|
||||
go.mongodb.org/mongo-driver v1.17.4
|
||||
go.opentelemetry.io/contrib/propagators/autoprop v0.62.0
|
||||
go.opentelemetry.io/otel v1.37.0
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.37.0
|
||||
@@ -42,14 +44,24 @@ require (
|
||||
go.opentelemetry.io/otel/sdk/metric v1.37.0
|
||||
go.opentelemetry.io/otel/trace v1.37.0
|
||||
golang.org/x/oauth2 v0.30.0
|
||||
google.golang.org/api v0.243.0
|
||||
modernc.org/sqlite v1.38.0
|
||||
google.golang.org/api v0.244.0
|
||||
modernc.org/sqlite v1.38.2
|
||||
)
|
||||
|
||||
require golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 // indirect
|
||||
require (
|
||||
github.com/duckdb/duckdb-go-bindings v0.1.17 // indirect
|
||||
github.com/duckdb/duckdb-go-bindings/darwin-amd64 v0.1.12 // indirect
|
||||
github.com/duckdb/duckdb-go-bindings/darwin-arm64 v0.1.12 // indirect
|
||||
github.com/duckdb/duckdb-go-bindings/linux-amd64 v0.1.12 // indirect
|
||||
github.com/duckdb/duckdb-go-bindings/linux-arm64 v0.1.12 // indirect
|
||||
github.com/duckdb/duckdb-go-bindings/windows-amd64 v0.1.12 // indirect
|
||||
github.com/marcboeker/go-duckdb/arrowmapping v0.0.10 // indirect
|
||||
github.com/marcboeker/go-duckdb/mapping v0.0.11 // indirect
|
||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
cel.dev/expr v0.23.0 // indirect
|
||||
cel.dev/expr v0.24.0 // indirect
|
||||
cloud.google.com/go v0.121.2 // indirect
|
||||
cloud.google.com/go/alloydb v1.18.0 // indirect
|
||||
cloud.google.com/go/auth v0.16.3 // indirect
|
||||
@@ -64,10 +76,11 @@ require (
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0 // indirect
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0 // indirect
|
||||
github.com/ajg/form v1.5.1 // indirect
|
||||
github.com/apache/arrow-go/v18 v18.4.0 // indirect
|
||||
github.com/apache/arrow/go/v15 v15.0.2 // indirect
|
||||
github.com/cenkalti/backoff/v5 v5.0.2 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f // indirect
|
||||
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 // indirect
|
||||
github.com/couchbase/gocbcore/v10 v10.7.1 // indirect
|
||||
github.com/couchbase/gocbcoreps v0.1.3 // indirect
|
||||
github.com/couchbase/goprotostellar v1.0.2 // indirect
|
||||
@@ -84,12 +97,13 @@ require (
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/goccy/go-json v0.10.2 // indirect
|
||||
github.com/go-viper/mapstructure/v2 v2.3.0 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect
|
||||
github.com/golang-sql/sqlexp v0.1.0 // indirect
|
||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
|
||||
github.com/golang/snappy v0.0.4 // indirect
|
||||
github.com/google/flatbuffers v23.5.26+incompatible // indirect
|
||||
github.com/golang/snappy v1.0.0 // indirect
|
||||
github.com/google/flatbuffers v25.2.10+incompatible // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.15.0 // indirect
|
||||
@@ -100,18 +114,23 @@ require (
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||
github.com/jackc/puddle/v2 v2.2.2 // indirect
|
||||
github.com/klauspost/compress v1.16.7 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.5 // indirect
|
||||
github.com/klauspost/compress v1.18.0 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.11 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/marcboeker/go-duckdb/v2 v2.3.4
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/montanaflynn/stats v0.7.1 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.18 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
||||
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/spf13/pflag v1.0.6 // indirect
|
||||
github.com/spiffe/go-spiffe/v2 v2.5.0 // indirect
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
||||
github.com/xdg-go/scram v1.1.2 // indirect
|
||||
github.com/xdg-go/stringprep v1.0.4 // indirect
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
|
||||
github.com/zeebo/errs v1.4.0 // indirect
|
||||
github.com/zeebo/xxh3 v1.0.2 // indirect
|
||||
@@ -139,11 +158,11 @@ require (
|
||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect
|
||||
google.golang.org/genproto v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250715232539-7130f93afb79 // indirect
|
||||
google.golang.org/grpc v1.73.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0 // indirect
|
||||
google.golang.org/grpc v1.74.2 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||
modernc.org/libc v1.65.10 // indirect
|
||||
modernc.org/libc v1.66.3 // indirect
|
||||
modernc.org/mathutil v1.7.1 // indirect
|
||||
modernc.org/memory v1.11.0 // indirect
|
||||
)
|
||||
|
||||
110
go.sum
110
go.sum
@@ -1,5 +1,5 @@
|
||||
cel.dev/expr v0.23.0 h1:wUb94w6OYQS4uXraxo9U+wUAs9jT47Xvl4iPgAwM2ss=
|
||||
cel.dev/expr v0.23.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw=
|
||||
cel.dev/expr v0.24.0 h1:56OvJKSH3hDGL0ml5uSxZmz3/3Pq4tJ+fb1unVLAFcY=
|
||||
cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw=
|
||||
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
|
||||
@@ -236,6 +236,8 @@ cloud.google.com/go/dataplex v1.3.0/go.mod h1:hQuRtDg+fCiFgC8j0zV222HvzFQdRd+SVX
|
||||
cloud.google.com/go/dataplex v1.4.0/go.mod h1:X51GfLXEMVJ6UN47ESVqvlsRplbLhcsAt0kZCCKsU0A=
|
||||
cloud.google.com/go/dataplex v1.5.2/go.mod h1:cVMgQHsmfRoI5KFYq4JtIBEUbYwc3c7tXmIDhRmNNVQ=
|
||||
cloud.google.com/go/dataplex v1.6.0/go.mod h1:bMsomC/aEJOSpHXdFKFGQ1b0TDPIeL28nJObeO1ppRs=
|
||||
cloud.google.com/go/dataplex v1.26.0 h1:nu8/KrLR5v62L1lApGNgm61Oq+xaa2bS9rgc1csjqE0=
|
||||
cloud.google.com/go/dataplex v1.26.0/go.mod h1:12R9nlLUzxOscbb2HgoYnkGNibmv4sXEVMXxrdw2a90=
|
||||
cloud.google.com/go/dataproc v1.7.0/go.mod h1:CKAlMjII9H90RXaMpSxQ8EU6dQx6iAYNPcYPOkSbi8s=
|
||||
cloud.google.com/go/dataproc v1.8.0/go.mod h1:5OW+zNAH0pMpw14JVrPONsxMQYMBqJuzORhIBfBn9uI=
|
||||
cloud.google.com/go/dataproc v1.12.0/go.mod h1:zrF3aX0uV3ikkMz6z4uBbIKyhRITnxvr4i3IjKsKrw4=
|
||||
@@ -670,12 +672,18 @@ github.com/ajstarks/deck/generate v0.0.0-20210309230005-c3f852c02e19/go.mod h1:T
|
||||
github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw=
|
||||
github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b/go.mod h1:1KcenG0jGWcpt8ov532z81sp/kMMUG485J2InIOyADM=
|
||||
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
|
||||
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
|
||||
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||
github.com/apache/arrow-go/v18 v18.4.0 h1:/RvkGqH517iY8bZKc4FD5/kkdwXJGjxf28JIXbJ/oB0=
|
||||
github.com/apache/arrow-go/v18 v18.4.0/go.mod h1:Aawvwhj8x2jURIzD9Moy72cF0FyJXOpkYpdmGRHcw14=
|
||||
github.com/apache/arrow/go/v10 v10.0.1/go.mod h1:YvhnlEePVnBS4+0z3fhPfUy7W1Ikj0Ih0vcRo/gZ1M0=
|
||||
github.com/apache/arrow/go/v11 v11.0.0/go.mod h1:Eg5OsL5H+e299f7u5ssuXsuHQVEGC4xei5aX110hRiI=
|
||||
github.com/apache/arrow/go/v15 v15.0.2 h1:60IliRbiyTWCWjERBCkO1W4Qun9svcYoZrSLcyOsMLE=
|
||||
github.com/apache/arrow/go/v15 v15.0.2/go.mod h1:DGXsR3ajT524njufqf95822i+KTh+yea1jass9YXgjA=
|
||||
github.com/apache/thrift v0.16.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU=
|
||||
github.com/apache/thrift v0.22.0 h1:r7mTJdj51TMDe6RtcmNdQxgn9XcyfGDOzegMDRg47uc=
|
||||
github.com/apache/thrift v0.22.0/go.mod h1:1e7J/O1Ae6ZQMTYdy9xa3w9k+XHWPfRvdPyJeynQ+/g=
|
||||
github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
|
||||
github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
|
||||
github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
|
||||
@@ -710,8 +718,8 @@ github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWH
|
||||
github.com/cncf/xds/go v0.0.0-20220314180256-7f1daf1720fc/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
||||
github.com/cncf/xds/go v0.0.0-20230105202645-06c439db220b/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
||||
github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
||||
github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f h1:C5bqEmzEPLsHm9Mv73lSE9e9bKV23aB1vxOsmZrkl3k=
|
||||
github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8=
|
||||
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv1aFbZMiM9vblcSArJRf2Irls=
|
||||
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8=
|
||||
github.com/couchbase/gocb/v2 v2.10.1 h1:5r1jngGxw3dTZdtq6Xmjq3pdU6hOwRvynvbVIp58T64=
|
||||
github.com/couchbase/gocb/v2 v2.10.1/go.mod h1:GGEJuYjrfnPHCQLcxTcIco+Puy63PS2p8QQd8FRw66I=
|
||||
github.com/couchbase/gocbcore/v10 v10.7.1 h1:6jsNDtqyfoQ8Xg6kv99rzccc3CrHbp7FjeY+ahWXTF4=
|
||||
@@ -737,6 +745,18 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8Yc
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
||||
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
|
||||
github.com/duckdb/duckdb-go-bindings v0.1.17 h1:SjpRwrJ7v0vqnIvLeVFHlhuS72+Lp8xxQ5jIER2LZP4=
|
||||
github.com/duckdb/duckdb-go-bindings v0.1.17/go.mod h1:pBnfviMzANT/9hi4bg+zW4ykRZZPCXlVuvBWEcZofkc=
|
||||
github.com/duckdb/duckdb-go-bindings/darwin-amd64 v0.1.12 h1:8CLBnsq9YDhi2Gmt3sjSUeXxMzyMQAKefjqUy9zVPFk=
|
||||
github.com/duckdb/duckdb-go-bindings/darwin-amd64 v0.1.12/go.mod h1:Ezo7IbAfB8NP7CqPIN8XEHKUg5xdRRQhcPPlCXImXYA=
|
||||
github.com/duckdb/duckdb-go-bindings/darwin-arm64 v0.1.12 h1:wjO4I0GhMh2xIpiUgRpzuyOT4KxXLoUS/rjU7UUVvCE=
|
||||
github.com/duckdb/duckdb-go-bindings/darwin-arm64 v0.1.12/go.mod h1:eS7m/mLnPQgVF4za1+xTyorKRBuK0/BA44Oy6DgrGXI=
|
||||
github.com/duckdb/duckdb-go-bindings/linux-amd64 v0.1.12 h1:HzKQi2C+1jzmwANsPuYH6x9Sfw62SQTjNAEq3OySKFI=
|
||||
github.com/duckdb/duckdb-go-bindings/linux-amd64 v0.1.12/go.mod h1:1GOuk1PixiESxLaCGFhag+oFi7aP+9W8byymRAvunBk=
|
||||
github.com/duckdb/duckdb-go-bindings/linux-arm64 v0.1.12 h1:YGSR7AFLw2gJ7IbgLE6DkKYmgKv1LaRSd/ZKF1yh2oE=
|
||||
github.com/duckdb/duckdb-go-bindings/linux-arm64 v0.1.12/go.mod h1:o7crKMpT2eOIi5/FY6HPqaXcvieeLSqdXXaXbruGX7w=
|
||||
github.com/duckdb/duckdb-go-bindings/windows-amd64 v0.1.12 h1:2aduW6fnFnT2Q45PlIgHbatsPOxV9WSZ5B2HzFfxaxA=
|
||||
github.com/duckdb/duckdb-go-bindings/windows-amd64 v0.1.12/go.mod h1:IlOhJdVKUJCAPj3QsDszUo8DVdvp1nBFp4TUJVdw99s=
|
||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
@@ -810,9 +830,11 @@ github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAu
|
||||
github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo=
|
||||
github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
github.com/go-viper/mapstructure/v2 v2.3.0 h1:27XbWsHIqhbdR5TIC911OfYvgSaW93HM+dX7970Q7jk=
|
||||
github.com/go-viper/mapstructure/v2 v2.3.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
|
||||
github.com/goccy/go-json v0.9.11/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
||||
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
|
||||
github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
|
||||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||
@@ -863,15 +885,16 @@ github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiu
|
||||
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
|
||||
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
|
||||
github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
||||
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/golang/snappy v1.0.0 h1:Oy607GVXHs7RtbggtPBnr2RmDArIsAefDwvrdWvRhGs=
|
||||
github.com/golang/snappy v1.0.0/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/btree v1.1.3 h1:CVpQJjYgC4VbzxeGVHfvZrv1ctoYCAI8vbl07Fcxlyg=
|
||||
github.com/google/btree v1.1.3/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4=
|
||||
github.com/google/flatbuffers v2.0.8+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
||||
github.com/google/flatbuffers v23.5.26+incompatible h1:M9dgRyhJemaM4Sw8+66GHBu8ioaQmyPLg1b8VwK5WJg=
|
||||
github.com/google/flatbuffers v23.5.26+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
||||
github.com/google/flatbuffers v25.2.10+incompatible h1:F3vclr7C3HpB1k9mxCGRMXq6FdUalZ6H/pNX4FP1v0Q=
|
||||
github.com/google/flatbuffers v25.2.10+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
@@ -991,13 +1014,14 @@ github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
|
||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4=
|
||||
github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE=
|
||||
github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU=
|
||||
github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I=
|
||||
github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
||||
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
||||
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
|
||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||
github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg=
|
||||
github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
||||
github.com/klauspost/cpuid/v2 v2.2.11 h1:0OwqZRYI2rFrjS4kvkDnqJkKHdHaRnCm68/DY4OxRzU=
|
||||
github.com/klauspost/cpuid/v2 v2.2.11/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
@@ -1015,6 +1039,12 @@ github.com/looker-open-source/sdk-codegen/go v0.25.10/go.mod h1:YM/IYSsTPk7I54j4
|
||||
github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
|
||||
github.com/lyft/protoc-gen-star v0.6.1/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
|
||||
github.com/lyft/protoc-gen-star/v2 v2.0.1/go.mod h1:RcCdONR2ScXaYnQC5tUzxzlpA3WVYF7/opLeUgcQs/o=
|
||||
github.com/marcboeker/go-duckdb/arrowmapping v0.0.10 h1:G1W+GVnUefR8uy7jHdNO+CRMsmFG5mFPIHVAespfFCA=
|
||||
github.com/marcboeker/go-duckdb/arrowmapping v0.0.10/go.mod h1:jccUb8TYD0p5TsEEeN4SXuslNJHo23QaKOqKD+U6uFU=
|
||||
github.com/marcboeker/go-duckdb/mapping v0.0.11 h1:fusN1b1l7Myxafifp596I6dNLNhN5Uv/rw31qAqBwqw=
|
||||
github.com/marcboeker/go-duckdb/mapping v0.0.11/go.mod h1:aYBjFLgfKO0aJIbDtXPiaL5/avRQISveX/j9tMf9JhU=
|
||||
github.com/marcboeker/go-duckdb/v2 v2.3.4 h1:o98wrefPbH0IdJRix4pF0+jZiXoFQ+FSR8InMsCUZD0=
|
||||
github.com/marcboeker/go-duckdb/v2 v2.3.4/go.mod h1:8adNrftF4Ye29XMrpIl5NYNosTVsZu1mz3C82WdVvrk=
|
||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
@@ -1022,13 +1052,17 @@ github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D
|
||||
github.com/mattn/go-sqlite3 v1.14.14/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||
github.com/microsoft/go-mssqldb v1.9.2 h1:nY8TmFMQOHpm2qVWo6y4I2mAmVdZqlGiMGAYt64Ibbs=
|
||||
github.com/microsoft/go-mssqldb v1.9.2/go.mod h1:GBbW9ASTiDC+mpgWDGKdm3FnFLTUsLYN3iFL90lQ+PA=
|
||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs=
|
||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY=
|
||||
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8DFdX7uMikMLXX4oubIzJF4kv/wI=
|
||||
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
|
||||
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
|
||||
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
|
||||
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.1 h1:RKWQW7wTgYAY2fU9S+9LaJ9OwRPbRc0I17tlT7nDmAY=
|
||||
@@ -1040,8 +1074,8 @@ github.com/phpdave11/gofpdf v1.4.2/go.mod h1:zpO6xFn9yxo3YLyMvW8HcKWVdbNqgIfOOp2
|
||||
github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI=
|
||||
github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI=
|
||||
github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
github.com/pierrec/lz4/v4 v4.1.18 h1:xaKrnTkyoqfh1YItXl56+6KJNVYWlEEPuAQW9xsplYQ=
|
||||
github.com/pierrec/lz4/v4 v4.1.18/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU=
|
||||
github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||
@@ -1102,6 +1136,12 @@ github.com/thlib/go-timezone-local v0.0.7 h1:fX8zd3aJydqLlTs/TrROrIIdztzsdFV23Oz
|
||||
github.com/thlib/go-timezone-local v0.0.7/go.mod h1:/Tnicc6m/lsJE0irFMA0LfIwTBo4QP7A8IfyIv4zZKI=
|
||||
github.com/valkey-io/valkey-go v1.0.63 h1:LNlDTcUxy9jxrmGHSvd0s/NsgEmQbvREYvvBAHCIir0=
|
||||
github.com/valkey-io/valkey-go v1.0.63/go.mod h1:bHmwjIEOrGq/ubOJfh5uMRs7Xj6mV3mQ/ZXUbmqpjqY=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
|
||||
github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY=
|
||||
github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4=
|
||||
github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8=
|
||||
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM=
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI=
|
||||
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
@@ -1117,6 +1157,8 @@ github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM=
|
||||
github.com/zeebo/errs v1.4.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4=
|
||||
github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0=
|
||||
github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA=
|
||||
go.mongodb.org/mongo-driver v1.17.4 h1:jUorfmVzljjr0FLzYQsGP8cgN/qzzxlY9Vh0C9KFXVw=
|
||||
go.mongodb.org/mongo-driver v1.17.4/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ=
|
||||
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
|
||||
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
|
||||
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
@@ -1201,8 +1243,8 @@ golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u0
|
||||
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
||||
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
|
||||
golang.org/x/exp v0.0.0-20220827204233-334a2380cb91/go.mod h1:cyybsKvd6eL0RnXn6p/Grxp8F5bW7iYuBgsNCOHpMYE=
|
||||
golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 h1:R84qjqJb5nVJMxqWYb3np9L5ZsaDtB+a39EqjV0JSUM=
|
||||
golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0/go.mod h1:S9Xr4PYopiDyqSyp5NjCrhFrqg6A5zA2E/iPHPhqnS8=
|
||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/yqS/lQJ6PmkyIV3YP+o=
|
||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8=
|
||||
golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs=
|
||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
@@ -1551,8 +1593,8 @@ gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJ
|
||||
gonum.org/v1/gonum v0.8.2/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0=
|
||||
gonum.org/v1/gonum v0.9.3/go.mod h1:TZumC3NeyVQskjXqmyWt4S3bINhy7B4eYwW69EbyX+0=
|
||||
gonum.org/v1/gonum v0.11.0/go.mod h1:fSG4YDCxxUZQJ7rKsQrj0gMOg00Il0Z96/qMA4bVQhA=
|
||||
gonum.org/v1/gonum v0.12.0 h1:xKuo6hzt+gMav00meVPUlXwSdoEJP46BR+wdxQEFK2o=
|
||||
gonum.org/v1/gonum v0.12.0/go.mod h1:73TDxJfAAHeA8Mk9mf8NlIppyhQNo5GLTcYeqgo2lvY=
|
||||
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
|
||||
gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw=
|
||||
gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc=
|
||||
gonum.org/v1/plot v0.9.0/go.mod h1:3Pcqqmp6RHvJI72kgb8fThyUnav364FOsdDo2aGW5lY=
|
||||
@@ -1614,8 +1656,8 @@ google.golang.org/api v0.108.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/
|
||||
google.golang.org/api v0.110.0/go.mod h1:7FC4Vvx1Mooxh8C5HWjzZHcavuS2f6pmJpZx60ca7iI=
|
||||
google.golang.org/api v0.111.0/go.mod h1:qtFHvU9mhgTJegR31csQ+rwxyUTHOKFqCKWp1J0fdw0=
|
||||
google.golang.org/api v0.114.0/go.mod h1:ifYI2ZsFK6/uGddGfAD5BMxlnkBqCmqHSDUVi45N5Yg=
|
||||
google.golang.org/api v0.243.0 h1:sw+ESIJ4BVnlJcWu9S+p2Z6Qq1PjG77T8IJ1xtp4jZQ=
|
||||
google.golang.org/api v0.243.0/go.mod h1:GE4QtYfaybx1KmeHMdBnNnyLzBZCVihGBXAmJu/uUr8=
|
||||
google.golang.org/api v0.244.0 h1:lpkP8wVibSKr++NCD36XzTk/IzeKJ3klj7vbj+XU5pE=
|
||||
google.golang.org/api v0.244.0/go.mod h1:dMVhVcylamkirHdzEBAIQWUCgqY885ivNeZYd7VAVr8=
|
||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
@@ -1760,8 +1802,8 @@ google.golang.org/genproto v0.0.0-20250603155806-513f23925822 h1:rHWScKit0gvAPuO
|
||||
google.golang.org/genproto v0.0.0-20250603155806-513f23925822/go.mod h1:HubltRL7rMh0LfnQPkMH4NPDFEWp0jw3vixw7jEM53s=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 h1:oWVWY3NzT7KJppx2UKhKmzPq4SRe0LdCijVRwvGeikY=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822/go.mod h1:h3c4v36UTKzUiuaOKQ6gr3S+0hovBtUrXzTG/i3+XEc=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250715232539-7130f93afb79 h1:1ZwqphdOdWYXsUHgMpU/101nCtf/kSp9hOrcvFsnl10=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250715232539-7130f93afb79/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0 h1:MAKi5q709QWfnkkpNQ0M12hYJ1+e8qYVDyowc4U1XZM=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
||||
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||
@@ -1803,8 +1845,8 @@ google.golang.org/grpc v1.52.3/go.mod h1:pu6fVzoFb+NBYNAvQL08ic+lvB2IojljRYuun5v
|
||||
google.golang.org/grpc v1.53.0/go.mod h1:OnIrk0ipVdj4N5d9IUoFUx72/VlD7+jUsHwZgwSMQpw=
|
||||
google.golang.org/grpc v1.54.0/go.mod h1:PUSEXI6iWghWaB6lXM4knEgpJNu2qUcKfDtNci3EC2g=
|
||||
google.golang.org/grpc v1.56.3/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s=
|
||||
google.golang.org/grpc v1.73.0 h1:VIWSmpI2MegBtTuFt5/JWy2oXxtjJ/e89Z70ImfD2ok=
|
||||
google.golang.org/grpc v1.73.0/go.mod h1:50sbHOUqWoCQGI8V2HQLJM0B+LMlIUjNSZmow7EVBQc=
|
||||
google.golang.org/grpc v1.74.2 h1:WoosgB65DlWVC9FqI82dGsZhWFNBSLjQ84bjROOpMu4=
|
||||
google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM=
|
||||
google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw=
|
||||
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
||||
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
||||
@@ -1852,8 +1894,8 @@ lukechampine.com/uint128 v1.2.0/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl
|
||||
modernc.org/cc/v3 v3.36.0/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI=
|
||||
modernc.org/cc/v3 v3.36.2/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI=
|
||||
modernc.org/cc/v3 v3.36.3/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI=
|
||||
modernc.org/cc/v4 v4.26.1 h1:+X5NtzVBn0KgsBCBe+xkDC7twLb/jNVj9FPgiwSQO3s=
|
||||
modernc.org/cc/v4 v4.26.1/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
|
||||
modernc.org/cc/v4 v4.26.2 h1:991HMkLjJzYBIfha6ECZdjrIYz2/1ayr+FL8GN+CNzM=
|
||||
modernc.org/cc/v4 v4.26.2/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
|
||||
modernc.org/ccgo/v3 v3.0.0-20220428102840-41399a37e894/go.mod h1:eI31LL8EwEBKPpNpA4bU1/i+sKOwOrQy8D87zWUcRZc=
|
||||
modernc.org/ccgo/v3 v3.0.0-20220430103911-bc99d88307be/go.mod h1:bwdAnOoaIt8Ax9YdWGjxWsdkPcZyRPHqrOvJxaKAKGw=
|
||||
modernc.org/ccgo/v3 v3.16.4/go.mod h1:tGtX0gE9Jn7hdZFeU88slbTh1UtCYKusWOoCJuvkWsQ=
|
||||
@@ -1863,10 +1905,12 @@ modernc.org/ccgo/v3 v3.16.9/go.mod h1:zNMzC9A9xeNUepy6KuZBbugn3c0Mc9TeiJO4lgvkJD
|
||||
modernc.org/ccgo/v4 v4.28.0 h1:rjznn6WWehKq7dG4JtLRKxb52Ecv8OUGah8+Z/SfpNU=
|
||||
modernc.org/ccgo/v4 v4.28.0/go.mod h1:JygV3+9AV6SmPhDasu4JgquwU81XAKLd3OKTUDNOiKE=
|
||||
modernc.org/ccorpus v1.11.6/go.mod h1:2gEUTrWqdpH2pXsmTM1ZkjeSrUWDpjMu2T6m29L/ErQ=
|
||||
modernc.org/fileutil v1.3.3 h1:3qaU+7f7xxTUmvU1pJTZiDLAIoJVdUSSauJNHg9yXoA=
|
||||
modernc.org/fileutil v1.3.3/go.mod h1:HxmghZSZVAz/LXcMNwZPA/DRrQZEVP9VX0V4LQGQFOc=
|
||||
modernc.org/fileutil v1.3.8 h1:qtzNm7ED75pd1C7WgAGcK4edm4fvhtBsEiI/0NQ54YM=
|
||||
modernc.org/fileutil v1.3.8/go.mod h1:HxmghZSZVAz/LXcMNwZPA/DRrQZEVP9VX0V4LQGQFOc=
|
||||
modernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI=
|
||||
modernc.org/gc/v2 v2.6.5/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito=
|
||||
modernc.org/goabi0 v0.2.0 h1:HvEowk7LxcPd0eq6mVOAEMai46V+i7Jrj13t4AzuNks=
|
||||
modernc.org/goabi0 v0.2.0/go.mod h1:CEFRnnJhKvWT1c1JTI3Avm+tgOWbkOu5oPA8eH8LnMI=
|
||||
modernc.org/httpfs v1.0.6/go.mod h1:7dosgurJGp0sPaRanU53W4xZYKh14wfzX420oZADeHM=
|
||||
modernc.org/libc v0.0.0-20220428101251-2d5f3daf273b/go.mod h1:p7Mg4+koNjc8jkqwcoFBJx7tXkpj00G77X7A72jXPXA=
|
||||
modernc.org/libc v1.16.0/go.mod h1:N4LD6DBE9cf+Dzf9buBlzVJndKr/iJHG97vGLHYnb5A=
|
||||
@@ -1875,8 +1919,8 @@ modernc.org/libc v1.16.17/go.mod h1:hYIV5VZczAmGZAnG15Vdngn5HSF5cSkbvfz2B7GRuVU=
|
||||
modernc.org/libc v1.16.19/go.mod h1:p7Mg4+koNjc8jkqwcoFBJx7tXkpj00G77X7A72jXPXA=
|
||||
modernc.org/libc v1.17.0/go.mod h1:XsgLldpP4aWlPlsjqKRdHPqCxCjISdHfM/yeWC5GyW0=
|
||||
modernc.org/libc v1.17.1/go.mod h1:FZ23b+8LjxZs7XtFMbSzL/EhPxNbfZbErxEHc7cbD9s=
|
||||
modernc.org/libc v1.65.10 h1:ZwEk8+jhW7qBjHIT+wd0d9VjitRyQef9BnzlzGwMODc=
|
||||
modernc.org/libc v1.65.10/go.mod h1:StFvYpx7i/mXtBAfVOjaU0PWZOvIRoZSgXhrwXzr8Po=
|
||||
modernc.org/libc v1.66.3 h1:cfCbjTUcdsKyyZZfEUKfoHcP3S0Wkvz3jgSzByEWVCQ=
|
||||
modernc.org/libc v1.66.3/go.mod h1:XD9zO8kt59cANKvHPXpx7yS2ELPheAey0vjIuZOhOU8=
|
||||
modernc.org/mathutil v1.2.2/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E=
|
||||
modernc.org/mathutil v1.4.1/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E=
|
||||
modernc.org/mathutil v1.5.0/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E=
|
||||
@@ -1894,8 +1938,8 @@ modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
|
||||
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
|
||||
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
|
||||
modernc.org/sqlite v1.18.1/go.mod h1:6ho+Gow7oX5V+OiOQ6Tr4xeqbx13UZ6t+Fw9IRUG4d4=
|
||||
modernc.org/sqlite v1.38.0 h1:+4OrfPQ8pxHKuWG4md1JpR/EYAh3Md7TdejuuzE7EUI=
|
||||
modernc.org/sqlite v1.38.0/go.mod h1:1Bj+yES4SVvBZ4cBOpVZ6QgesMCKpJZDq0nxYzOpmNE=
|
||||
modernc.org/sqlite v1.38.2 h1:Aclu7+tgjgcQVShZqim41Bbw9Cho0y/7WzYptXqkEek=
|
||||
modernc.org/sqlite v1.38.2/go.mod h1:cPTJYSlgg3Sfg046yBShXENNtPrWrDX8bsbAQBzgQ5E=
|
||||
modernc.org/strutil v1.1.1/go.mod h1:DE+MQQ/hjKBZS2zNInV5hhcipt5rLPWkmpbGeW5mmdw=
|
||||
modernc.org/strutil v1.1.3/go.mod h1:MEHNA7PdEnEwLvspRMtWTNnp2nnyvMfkimT1NKNAGbw=
|
||||
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
|
||||
|
||||
@@ -29,8 +29,11 @@ func TestLoadPrebuiltToolYAMLs(t *testing.T) {
|
||||
"cloud-sql-mssql",
|
||||
"cloud-sql-mysql",
|
||||
"cloud-sql-postgres",
|
||||
"dataplex",
|
||||
"firestore",
|
||||
"looker",
|
||||
"mssql",
|
||||
"mysql",
|
||||
"postgres",
|
||||
"spanner-postgres",
|
||||
"spanner",
|
||||
@@ -72,7 +75,10 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
cloudsqlpg_config, _ := Get("cloud-sql-postgres")
|
||||
cloudsqlmysql_config, _ := Get("cloud-sql-mysql")
|
||||
cloudsqlmssql_config, _ := Get("cloud-sql-mssql")
|
||||
dataplex_config, _ := Get("dataplex")
|
||||
firestoreconfig, _ := Get("firestore")
|
||||
mysql_config, _ := Get("mysql")
|
||||
mssql_config, _ := Get("mssql")
|
||||
postgresconfig, _ := Get("postgres")
|
||||
spanner_config, _ := Get("spanner")
|
||||
spannerpg_config, _ := Get("spanner-postgres")
|
||||
@@ -94,9 +100,18 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
if len(cloudsqlmssql_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch cloud sql mssql prebuilt tools yaml")
|
||||
}
|
||||
if len(dataplex_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch dataplex prebuilt tools yaml")
|
||||
}
|
||||
if len(firestoreconfig) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch firestore prebuilt tools yaml")
|
||||
}
|
||||
if len(mysql_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch mysql prebuilt tools yaml")
|
||||
}
|
||||
if len(mssql_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch mssql prebuilt tools yaml")
|
||||
}
|
||||
if len(postgresconfig) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch postgres prebuilt tools yaml")
|
||||
}
|
||||
|
||||
15
internal/prebuiltconfigs/tools/dataplex.yaml
Normal file
15
internal/prebuiltconfigs/tools/dataplex.yaml
Normal file
@@ -0,0 +1,15 @@
|
||||
sources:
|
||||
dataplex-source:
|
||||
kind: "dataplex"
|
||||
project: ${DATAPLEX_PROJECT}
|
||||
|
||||
tools:
|
||||
dataplex_search_entries:
|
||||
kind: dataplex-search-entries
|
||||
source: dataplex-source
|
||||
description: |
|
||||
Use this tool to search for entries in Dataplex Catalog that represent data assets (e.g. tables, views, models) based on the provided search query.
|
||||
|
||||
toolsets:
|
||||
dataplex-tools:
|
||||
- dataplex_search_entries
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user