mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-02-15 01:25:13 -05:00
Merge branch 'main' into akangsha7-patch-1
This commit is contained in:
@@ -234,7 +234,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.26.0\" # x-release-please-version\n",
|
||||
"version = \"0.27.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -109,7 +109,7 @@ To install Toolbox as a binary on Linux (AMD64):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.26.0
|
||||
export VERSION=0.27.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -120,7 +120,7 @@ To install Toolbox as a binary on macOS (Apple Silicon):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.26.0
|
||||
export VERSION=0.27.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -131,7 +131,7 @@ To install Toolbox as a binary on macOS (Intel):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.26.0
|
||||
export VERSION=0.27.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -142,7 +142,7 @@ To install Toolbox as a binary on Windows (Command Prompt):
|
||||
|
||||
```cmd
|
||||
:: see releases page for other versions
|
||||
set VERSION=0.26.0
|
||||
set VERSION=0.27.0
|
||||
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
```
|
||||
|
||||
@@ -152,7 +152,7 @@ To install Toolbox as a binary on Windows (PowerShell):
|
||||
|
||||
```powershell
|
||||
# see releases page for other versions
|
||||
$VERSION = "0.26.0"
|
||||
$VERSION = "0.27.0"
|
||||
curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||
```
|
||||
|
||||
@@ -164,7 +164,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.26.0
|
||||
export VERSION=0.27.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -183,7 +183,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.26.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.27.0
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 241 KiB After Width: | Height: | Size: 271 KiB |
@@ -115,7 +115,7 @@ pip install google-genai
|
||||
|
||||
1. Update `my_agent/agent.py` with the following content to connect to Toolbox:
|
||||
```py
|
||||
{{< include "quickstart/python/adk/quickstart.py" >}}
|
||||
{{< regionInclude "quickstart/python/adk/quickstart.py" "quickstart" >}}
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
"resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-5.0.2.tgz",
|
||||
"integrity": "sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg==",
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"arrify": "^2.0.0",
|
||||
"extend": "^3.0.2"
|
||||
@@ -31,6 +32,7 @@
|
||||
"resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz",
|
||||
"integrity": "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==",
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=14.0.0"
|
||||
}
|
||||
@@ -40,15 +42,17 @@
|
||||
"resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.0.0.tgz",
|
||||
"integrity": "sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g==",
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
},
|
||||
"node_modules/@google-cloud/storage": {
|
||||
"version": "7.18.0",
|
||||
"resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.18.0.tgz",
|
||||
"integrity": "sha512-r3ZwDMiz4nwW6R922Z1pwpePxyRwE5GdevYX63hRmAQUkUQJcBH/79EnQPDv5cOv1mFBgevdNWQfi3tie3dHrQ==",
|
||||
"version": "7.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.19.0.tgz",
|
||||
"integrity": "sha512-n2FjE7NAOYyshogdc7KQOl/VZb4sneqPjWouSyia9CMDdMhRX5+RIbqalNmC7LOLzuLAN89VlF2HvG8na9G+zQ==",
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@google-cloud/paginator": "^5.0.0",
|
||||
"@google-cloud/projectify": "^4.0.0",
|
||||
@@ -56,7 +60,7 @@
|
||||
"abort-controller": "^3.0.0",
|
||||
"async-retry": "^1.3.3",
|
||||
"duplexify": "^4.1.3",
|
||||
"fast-xml-parser": "^4.4.1",
|
||||
"fast-xml-parser": "^5.3.4",
|
||||
"gaxios": "^6.0.2",
|
||||
"google-auth-library": "^9.6.3",
|
||||
"html-entities": "^2.5.2",
|
||||
@@ -75,6 +79,7 @@
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
|
||||
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"bin": {
|
||||
"uuid": "dist/bin/uuid"
|
||||
}
|
||||
@@ -97,7 +102,6 @@
|
||||
"resolved": "https://registry.npmjs.org/@google/genai/-/genai-1.14.0.tgz",
|
||||
"integrity": "sha512-jirYprAAJU1svjwSDVCzyVq+FrJpJd5CSxR/g2Ga/gZ0ZYZpcWjMS75KJl9y71K1mDN+tcx6s21CzCbB2R840g==",
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"google-auth-library": "^9.14.2",
|
||||
"ws": "^8.18.0"
|
||||
@@ -136,7 +140,6 @@
|
||||
"resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.17.5.tgz",
|
||||
"integrity": "sha512-QakrKIGniGuRVfWBdMsDea/dx1PNE739QJ7gCM41s9q+qaCYTHCdsIBXQVVXry3mfWAiaM9kT22Hyz53Uw8mfg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"ajv": "^6.12.6",
|
||||
"content-type": "^1.0.5",
|
||||
@@ -299,6 +302,7 @@
|
||||
"resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz",
|
||||
"integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
@@ -307,13 +311,15 @@
|
||||
"version": "0.12.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz",
|
||||
"integrity": "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==",
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "24.10.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.1.tgz",
|
||||
"integrity": "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"undici-types": "~7.16.0"
|
||||
}
|
||||
@@ -323,6 +329,7 @@
|
||||
"resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.13.tgz",
|
||||
"integrity": "sha512-FGJ6udDNUCjd19pp0Q3iTiDkwhYup7J8hpMW9c4k53NrccQFFWKRho6hvtPPEhnXWKvukfwAlB6DbDz4yhH5Gg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@types/caseless": "*",
|
||||
"@types/node": "*",
|
||||
@@ -335,6 +342,7 @@
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.5.tgz",
|
||||
"integrity": "sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
@@ -352,6 +360,7 @@
|
||||
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
||||
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
@@ -361,6 +370,7 @@
|
||||
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
||||
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"mime-db": "1.52.0"
|
||||
},
|
||||
@@ -372,13 +382,15 @@
|
||||
"version": "4.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz",
|
||||
"integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==",
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/abort-controller": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
|
||||
"integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"event-target-shim": "^5.0.0"
|
||||
},
|
||||
@@ -453,6 +465,7 @@
|
||||
"resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz",
|
||||
"integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
@@ -462,6 +475,7 @@
|
||||
"resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz",
|
||||
"integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"retry": "0.13.1"
|
||||
}
|
||||
@@ -754,6 +768,7 @@
|
||||
"resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz",
|
||||
"integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"end-of-stream": "^1.4.1",
|
||||
"inherits": "^2.0.3",
|
||||
@@ -802,6 +817,7 @@
|
||||
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz",
|
||||
"integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"once": "^1.4.0"
|
||||
}
|
||||
@@ -871,6 +887,7 @@
|
||||
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
|
||||
"integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
@@ -901,7 +918,6 @@
|
||||
"resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz",
|
||||
"integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"accepts": "^2.0.0",
|
||||
"body-parser": "^2.2.0",
|
||||
@@ -973,9 +989,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/fast-xml-parser": {
|
||||
"version": "4.5.3",
|
||||
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.3.tgz",
|
||||
"integrity": "sha512-RKihhV+SHsIUGXObeVy9AXiBbFwkVk7Syp8XgwN5U3JV416+Gwp/GO9i0JYKmikykgz/UHRrrV4ROuZEo/T0ig==",
|
||||
"version": "5.3.5",
|
||||
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.3.5.tgz",
|
||||
"integrity": "sha512-JeaA2Vm9ffQKp9VjvfzObuMCjUYAp5WDYhRYL5LrBPY/jUDlUtOvDfot0vKSkB9tuX885BDHjtw4fZadD95wnA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
@@ -983,8 +999,9 @@
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"strnum": "^1.1.1"
|
||||
"strnum": "^2.1.2"
|
||||
},
|
||||
"bin": {
|
||||
"fxparser": "src/cli/cli.js"
|
||||
@@ -1333,7 +1350,8 @@
|
||||
"url": "https://patreon.com/mdevils"
|
||||
}
|
||||
],
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/http-errors": {
|
||||
"version": "2.0.0",
|
||||
@@ -1365,6 +1383,7 @@
|
||||
"resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz",
|
||||
"integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@tootallnate/once": "2",
|
||||
"agent-base": "6",
|
||||
@@ -1379,6 +1398,7 @@
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
|
||||
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"debug": "4"
|
||||
},
|
||||
@@ -1555,6 +1575,7 @@
|
||||
"resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz",
|
||||
"integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"bin": {
|
||||
"mime": "cli.js"
|
||||
},
|
||||
@@ -1715,6 +1736,7 @@
|
||||
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
|
||||
"integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"yocto-queue": "^0.1.0"
|
||||
},
|
||||
@@ -1856,6 +1878,7 @@
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
|
||||
"integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"inherits": "^2.0.3",
|
||||
"string_decoder": "^1.1.1",
|
||||
@@ -1870,6 +1893,7 @@
|
||||
"resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz",
|
||||
"integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">= 4"
|
||||
}
|
||||
@@ -1879,6 +1903,7 @@
|
||||
"resolved": "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz",
|
||||
"integrity": "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@types/request": "^2.48.8",
|
||||
"extend": "^3.0.2",
|
||||
@@ -2107,6 +2132,7 @@
|
||||
"resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz",
|
||||
"integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"stubs": "^3.0.0"
|
||||
}
|
||||
@@ -2115,13 +2141,15 @@
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz",
|
||||
"integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==",
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/string_decoder": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
|
||||
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"safe-buffer": "~5.2.0"
|
||||
}
|
||||
@@ -2223,28 +2251,31 @@
|
||||
}
|
||||
},
|
||||
"node_modules/strnum": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/strnum/-/strnum-1.1.2.tgz",
|
||||
"integrity": "sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA==",
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/strnum/-/strnum-2.1.2.tgz",
|
||||
"integrity": "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/NaturalIntelligence"
|
||||
}
|
||||
],
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/stubs": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz",
|
||||
"integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==",
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/teeny-request": {
|
||||
"version": "9.0.0",
|
||||
"resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz",
|
||||
"integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==",
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"http-proxy-agent": "^5.0.0",
|
||||
"https-proxy-agent": "^5.0.0",
|
||||
@@ -2261,6 +2292,7 @@
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
|
||||
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"debug": "4"
|
||||
},
|
||||
@@ -2273,6 +2305,7 @@
|
||||
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz",
|
||||
"integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"agent-base": "6",
|
||||
"debug": "4"
|
||||
@@ -2314,7 +2347,8 @@
|
||||
"version": "7.16.0",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz",
|
||||
"integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==",
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/unpipe": {
|
||||
"version": "1.0.0",
|
||||
@@ -2338,7 +2372,8 @@
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
|
||||
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/uuid": {
|
||||
"version": "9.0.1",
|
||||
@@ -2525,6 +2560,7 @@
|
||||
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
|
||||
"integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
@@ -2537,7 +2573,6 @@
|
||||
"resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
|
||||
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/colinhacks"
|
||||
}
|
||||
|
||||
@@ -3351,13 +3351,13 @@
|
||||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.12.2",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz",
|
||||
"integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==",
|
||||
"version": "1.13.5",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.13.5.tgz",
|
||||
"integrity": "sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.6",
|
||||
"form-data": "^4.0.4",
|
||||
"follow-redirects": "^1.15.11",
|
||||
"form-data": "^4.0.5",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
@@ -4248,9 +4248,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
|
||||
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
|
||||
"version": "4.0.5",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz",
|
||||
"integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
|
||||
@@ -18,7 +18,8 @@
|
||||
"node_modules/@cfworker/json-schema": {
|
||||
"version": "4.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@cfworker/json-schema/-/json-schema-4.1.1.tgz",
|
||||
"integrity": "sha512-gAmrUZSGtKc3AiBL71iNWxDsyUC5uMaKKGdvzYsBoTW/xi42JQHl7eKV2OYzCUqvc+D2RCcf7EXY2iCyFIk6og=="
|
||||
"integrity": "sha512-gAmrUZSGtKc3AiBL71iNWxDsyUC5uMaKKGdvzYsBoTW/xi42JQHl7eKV2OYzCUqvc+D2RCcf7EXY2iCyFIk6og==",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/@google/generative-ai": {
|
||||
"version": "0.24.1",
|
||||
@@ -225,6 +226,7 @@
|
||||
"version": "5.2.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz",
|
||||
"integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
@@ -308,6 +310,7 @@
|
||||
"version": "6.3.0",
|
||||
"resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz",
|
||||
"integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
@@ -420,6 +423,7 @@
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
|
||||
"integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
@@ -821,6 +825,7 @@
|
||||
"version": "1.0.21",
|
||||
"resolved": "https://registry.npmjs.org/js-tiktoken/-/js-tiktoken-1.0.21.tgz",
|
||||
"integrity": "sha512-biOj/6M5qdgx5TKjDnFT1ymSpM5tbd3ylwDtrQvFQSu0Z7bBYko2dF+W/aUkXUPuk6IVpRxk/3Q2sHOzGlS36g==",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"base64-js": "^1.5.1"
|
||||
}
|
||||
@@ -873,9 +878,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/langsmith": {
|
||||
"version": "0.4.3",
|
||||
"resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.4.3.tgz",
|
||||
"integrity": "sha512-vuBAagBZulXj0rpZhUTxmHhrYIBk53z8e2Q8ty4OHVkahN4ul7Im3OZxD9jsXZB0EuncK1xRYtY8J3BW4vj1zw==",
|
||||
"version": "0.5.2",
|
||||
"resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.5.2.tgz",
|
||||
"integrity": "sha512-CfkcQsiajtTWknAcyItvJsKEQdY2VgDpm6U8pRI9wnM07mevnOv5EF+RcqWGwx37SEUxtyi2RXMwnKW8b06JtA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/uuid": "^10.0.0",
|
||||
@@ -969,6 +974,7 @@
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz",
|
||||
"integrity": "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==",
|
||||
"peer": true,
|
||||
"bin": {
|
||||
"mustache": "bin/mustache"
|
||||
}
|
||||
@@ -1407,7 +1413,6 @@
|
||||
"version": "3.25.76",
|
||||
"resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
|
||||
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
|
||||
"peer": true,
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/colinhacks"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,21 @@
|
||||
# [START quickstart]
|
||||
import asyncio
|
||||
|
||||
from google.adk import Agent
|
||||
from google.adk.apps import App
|
||||
from google.adk.runners import InMemoryRunner
|
||||
from google.adk.tools.toolbox_toolset import ToolboxToolset
|
||||
from google.genai.types import Content, Part
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
# TODO(developer): update the TOOLBOX_URL to your toolbox endpoint
|
||||
toolset = ToolboxToolset(
|
||||
@@ -8,10 +23,35 @@ toolset = ToolboxToolset(
|
||||
)
|
||||
|
||||
root_agent = Agent(
|
||||
name='root_agent',
|
||||
name='hotel_assistant',
|
||||
model='gemini-2.5-flash',
|
||||
instruction="You are a helpful AI assistant designed to provide accurate and useful information.",
|
||||
instruction=prompt,
|
||||
tools=[toolset],
|
||||
)
|
||||
|
||||
app = App(root_agent=root_agent, name="my_agent")
|
||||
# [END quickstart]
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
async def main():
|
||||
runner = InMemoryRunner(app=app)
|
||||
session = await runner.session_service.create_session(
|
||||
app_name=app.name, user_id="test_user"
|
||||
)
|
||||
|
||||
for query in queries:
|
||||
print(f"\nUser: {query}")
|
||||
user_message = Content(parts=[Part.from_text(text=query)])
|
||||
|
||||
async for event in runner.run_async(user_id="test_user", session_id=session.id, new_message=user_message):
|
||||
if event.is_final_response() and event.content and event.content.parts:
|
||||
print(f"Agent: {event.content.parts[0].text}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
|
||||
@@ -41,31 +41,29 @@ def golden_keywords():
|
||||
class TestExecution:
|
||||
"""Test framework execution and output validation."""
|
||||
|
||||
_cached_output = None
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def script_output(self, capsys):
|
||||
"""Run the quickstart function and return its output."""
|
||||
|
||||
# TODO: Add better validation for ADK once we have a way to capture its
|
||||
# output.
|
||||
if ORCH_NAME == "adk":
|
||||
return quickstart.app.root_agent.name
|
||||
else:
|
||||
if TestExecution._cached_output is None:
|
||||
asyncio.run(quickstart.main())
|
||||
|
||||
return capsys.readouterr()
|
||||
out, err = capsys.readouterr()
|
||||
TestExecution._cached_output = (out, err)
|
||||
|
||||
class Output:
|
||||
def __init__(self, out, err):
|
||||
self.out = out
|
||||
self.err = err
|
||||
|
||||
return Output(*TestExecution._cached_output)
|
||||
|
||||
def test_script_runs_without_errors(self, script_output):
|
||||
"""Test that the script runs and produces no stderr."""
|
||||
if ORCH_NAME == "adk":
|
||||
return
|
||||
assert script_output.err == "", f"Script produced stderr: {script_output.err}"
|
||||
|
||||
def test_keywords_in_output(self, script_output, golden_keywords):
|
||||
"""Test that expected keywords are present in the script's output."""
|
||||
|
||||
if ORCH_NAME == "adk":
|
||||
assert script_output == "root_agent"
|
||||
return
|
||||
output = script_output.out
|
||||
missing_keywords = [kw for kw in golden_keywords if kw not in output]
|
||||
assert not missing_keywords, f"Missing keywords in output: {missing_keywords}"
|
||||
|
||||
@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -100,19 +100,19 @@ After you install Looker in the MCP Store, resources and tools from the server a
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -45,19 +45,19 @@ instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -43,19 +43,19 @@ expose your developer assistant tools to a MySQL instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -44,19 +44,19 @@ expose your developer assistant tools to a Neo4j instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -32,7 +32,7 @@ to expose your developer assistant tools to a Postgres instance:
|
||||
|
||||
{{< notice tip >}}
|
||||
This guide can be used with [AlloyDB
|
||||
Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
Omni](https://cloud.google.com/alloydb/omni/docs/overview).
|
||||
{{< /notice >}}
|
||||
|
||||
## Set up the database
|
||||
@@ -40,10 +40,10 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
1. Create or select a PostgreSQL instance.
|
||||
|
||||
* [Install PostgreSQL locally](https://www.postgresql.org/download/)
|
||||
* [Install AlloyDB Omni](https://cloud.google.com/alloydb/omni/current/docs/quickstart)
|
||||
* [Install AlloyDB Omni](https://cloud.google.com/alloydb/omni/docs/quickstart)
|
||||
|
||||
1. Create or reuse [a database
|
||||
user](https://cloud.google.com/alloydb/omni/current/docs/database-users/manage-users)
|
||||
user](https://docs.cloud.google.com/alloydb/omni/containers/current/docs/database-users/manage-users)
|
||||
and have the username and password ready.
|
||||
|
||||
## Install MCP Toolbox
|
||||
@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -43,19 +43,19 @@ to expose your developer assistant tools to a SQLite instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -83,15 +83,12 @@ Toolbox instead of the local address.
|
||||
|
||||
2. Open your agent file (`my_agent/agent.py`).
|
||||
|
||||
3. Update the `ToolboxSyncClient` initialization to use your Cloud Run URL.
|
||||
3. Update the `ToolboxToolset` initialization to point to your Cloud Run service URL. Replace the existing initialization code with the following:
|
||||
|
||||
{{% alert color="info" %}}
|
||||
Since Cloud Run services are secured by default, you also need to provide an
|
||||
authentication token.
|
||||
{{% alert color="info" title="Note" %}}
|
||||
Since Cloud Run services are secured by default, you also need to provide a workload identity.
|
||||
{{% /alert %}}
|
||||
|
||||
Replace your existing client initialization code with the following:
|
||||
|
||||
```python
|
||||
from google.adk import Agent
|
||||
from google.adk.apps import App
|
||||
@@ -132,14 +129,14 @@ app = App(root_agent=root_agent, name="my_agent")
|
||||
Run the deployment command:
|
||||
|
||||
```bash
|
||||
make backend
|
||||
make deploy
|
||||
```
|
||||
|
||||
This command will build your agent's container image and deploy it to Vertex AI.
|
||||
|
||||
## Step 6: Test your Deployment
|
||||
|
||||
Once the deployment command (`make backend`) completes, it will output the URL
|
||||
Once the deployment command (`make deploy`) completes, it will output the URL
|
||||
for the Agent Engine Playground. You can click on this URL to open the
|
||||
Playground in your browser and start chatting with your agent to test the tools.
|
||||
|
||||
|
||||
112
docs/en/how-to/generate_skill.md
Normal file
112
docs/en/how-to/generate_skill.md
Normal file
@@ -0,0 +1,112 @@
|
||||
---
|
||||
title: "Generate Agent Skills"
|
||||
type: docs
|
||||
weight: 10
|
||||
description: >
|
||||
How to generate agent skills from a toolset.
|
||||
---
|
||||
|
||||
The `skills-generate` command allows you to convert a **toolset** into an **Agent Skill**. A toolset is a collection of tools, and the generated skill will contain metadata and execution scripts for all tools within that toolset, complying with the [Agent Skill specification](https://agentskills.io/specification).
|
||||
|
||||
## Before you begin
|
||||
|
||||
1. Make sure you have the `toolbox` executable in your PATH.
|
||||
2. Make sure you have [Node.js](https://nodejs.org/) installed on your system.
|
||||
|
||||
## Generating a Skill from a Toolset
|
||||
|
||||
A skill package consists of a `SKILL.md` file (with required YAML frontmatter) and a set of Node.js scripts. Each tool defined in your toolset maps to a corresponding script in the generated Node.js scripts (`.js`) that work across different platforms (Linux, macOS, Windows).
|
||||
|
||||
|
||||
### Command Usage
|
||||
|
||||
The basic syntax for the command is:
|
||||
|
||||
```bash
|
||||
toolbox <tool-source> skills-generate \
|
||||
--name <skill-name> \
|
||||
--toolset <toolset-name> \
|
||||
--description <description> \
|
||||
--output-dir <output-directory>
|
||||
```
|
||||
|
||||
- `<tool-source>`: Can be `--tools-file`, `--tools-files`, `--tools-folder`, and `--prebuilt`. See the [CLI Reference](../reference/cli.md) for details.
|
||||
- `--name`: Name of the generated skill.
|
||||
- `--description`: Description of the generated skill.
|
||||
- `--toolset`: (Optional) Name of the toolset to convert into a skill. If not provided, all tools will be included.
|
||||
- `--output-dir`: (Optional) Directory to output generated skills (default: "skills").
|
||||
|
||||
{{< notice note >}}
|
||||
**Note:** The `<skill-name>` must follow the Agent Skill [naming convention](https://agentskills.io/specification): it must contain only lowercase alphanumeric characters and hyphens, cannot start or end with a hyphen, and cannot contain consecutive hyphens (e.g., `my-skill`, `data-processing`).
|
||||
{{< /notice >}}
|
||||
|
||||
### Example: Custom Tools File
|
||||
|
||||
1. Create a `tools.yaml` file with a toolset and some tools:
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
tool_a:
|
||||
description: "First tool"
|
||||
run:
|
||||
command: "echo 'Tool A'"
|
||||
tool_b:
|
||||
description: "Second tool"
|
||||
run:
|
||||
command: "echo 'Tool B'"
|
||||
toolsets:
|
||||
my_toolset:
|
||||
tools:
|
||||
- tool_a
|
||||
- tool_b
|
||||
```
|
||||
|
||||
2. Generate the skill:
|
||||
|
||||
```bash
|
||||
toolbox --tools-file tools.yaml skills-generate \
|
||||
--name "my-skill" \
|
||||
--toolset "my_toolset" \
|
||||
--description "A skill containing multiple tools" \
|
||||
--output-dir "generated-skills"
|
||||
```
|
||||
|
||||
3. The generated skill directory structure:
|
||||
|
||||
```text
|
||||
generated-skills/
|
||||
└── my-skill/
|
||||
├── SKILL.md
|
||||
├── assets/
|
||||
│ ├── tool_a.yaml
|
||||
│ └── tool_b.yaml
|
||||
└── scripts/
|
||||
├── tool_a.js
|
||||
└── tool_b.js
|
||||
```
|
||||
|
||||
In this example, the skill contains two Node.js scripts (`tool_a.js` and `tool_b.js`), each mapping to a tool in the original toolset.
|
||||
|
||||
### Example: Prebuilt Configuration
|
||||
|
||||
You can also generate skills from prebuilt toolsets:
|
||||
|
||||
```bash
|
||||
toolbox --prebuilt alloydb-postgres-admin skills-generate \
|
||||
--name "alloydb-postgres-admin" \
|
||||
--description "skill for performing administrative operations on alloydb"
|
||||
```
|
||||
|
||||
## Installing the Generated Skill in Gemini CLI
|
||||
|
||||
Once you have generated a skill, you can install it into the Gemini CLI using the `gemini skills install` command.
|
||||
|
||||
### Installation Command
|
||||
|
||||
Provide the path to the directory containing the generated skill:
|
||||
|
||||
```bash
|
||||
gemini skills install /path/to/generated-skills/my-skill
|
||||
```
|
||||
|
||||
Alternatively, use ~/.gemini/skills as the `--output-dir` to generate the skill straight to the Gemini CLI.
|
||||
@@ -20,14 +20,15 @@ The `invoke` command allows you to invoke tools defined in your configuration di
|
||||
1. Make sure you have the `toolbox` binary installed or built.
|
||||
2. Make sure you have a valid tool configuration file (e.g., `tools.yaml`).
|
||||
|
||||
## Basic Usage
|
||||
### Command Usage
|
||||
|
||||
The basic syntax for the command is:
|
||||
|
||||
```bash
|
||||
toolbox [--tools-file <path> | --prebuilt <name>] invoke <tool-name> [params]
|
||||
toolbox <tool-source> invoke <tool-name> [params]
|
||||
```
|
||||
|
||||
- `<tool-source>`: Can be `--tools-file`, `--tools-files`, `--tools-folder`, and `--prebuilt`. See the [CLI Reference](../reference/cli.md) for details.
|
||||
- `<tool-name>`: The name of the tool you want to call. This must match the name defined in your `tools.yaml`.
|
||||
- `[params]`: (Optional) A JSON string representing the arguments for the tool.
|
||||
|
||||
|
||||
@@ -32,7 +32,8 @@ description: >
|
||||
|
||||
## Sub Commands
|
||||
|
||||
### `invoke`
|
||||
<details>
|
||||
<summary><code>invoke</code></summary>
|
||||
|
||||
Executes a tool directly with the provided parameters. This is useful for testing tool configurations and parameters without needing a full client setup.
|
||||
|
||||
@@ -42,8 +43,36 @@ Executes a tool directly with the provided parameters. This is useful for testin
|
||||
toolbox invoke <tool-name> [params]
|
||||
```
|
||||
|
||||
- `<tool-name>`: The name of the tool to execute (as defined in your configuration).
|
||||
- `[params]`: (Optional) A JSON string containing the parameters for the tool.
|
||||
**Arguments:**
|
||||
|
||||
- `tool-name`: The name of the tool to execute (as defined in your configuration).
|
||||
- `params`: (Optional) A JSON string containing the parameters for the tool.
|
||||
|
||||
For more detailed instructions, see [Invoke Tools via CLI](../how-to/invoke_tool.md).
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><code>skills-generate</code></summary>
|
||||
|
||||
Generates a skill package from a specified toolset. Each tool in the toolset will have a corresponding Node.js execution script in the generated skill.
|
||||
|
||||
**Syntax:**
|
||||
|
||||
```bash
|
||||
toolbox skills-generate --name <name> --description <description> --toolset <toolset> --output-dir <output>
|
||||
```
|
||||
|
||||
**Flags:**
|
||||
|
||||
- `--name`: Name of the generated skill.
|
||||
- `--description`: Description of the generated skill.
|
||||
- `--toolset`: (Optional) Name of the toolset to convert into a skill. If not provided, all tools will be included.
|
||||
- `--output-dir`: (Optional) Directory to output generated skills (default: "skills").
|
||||
|
||||
For more detailed instructions, see [Generate Agent Skills](../how-to/generate_skill.md).
|
||||
|
||||
</details>
|
||||
|
||||
## Examples
|
||||
|
||||
|
||||
@@ -44,6 +44,12 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* **Tools:**
|
||||
* `execute_sql`: Executes a SQL query.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
* `list_active_queries`: Lists ongoing queries.
|
||||
* `list_available_extensions`: Discover all PostgreSQL extensions available for installation.
|
||||
* `list_installed_extensions`: List all installed PostgreSQL extensions.
|
||||
* `long_running_transactions`: Identifies and lists database transactions that exceed a specified time limit.
|
||||
* `list_locks`: Identifies all locks held by active processes.
|
||||
* `replication_stats`: Lists each replica's process ID and sync state.
|
||||
* `list_autovacuum_configurations`: Lists autovacuum configurations in the
|
||||
database.
|
||||
* `list_memory_configurations`: Lists memory-related configurations in the
|
||||
@@ -59,12 +65,16 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `list_triggers`: Lists triggers in the database.
|
||||
* `list_indexes`: List available user indexes in a PostgreSQL database.
|
||||
* `list_sequences`: List sequences in a PostgreSQL database.
|
||||
* `list_query_stats`: Lists query statistics.
|
||||
* `get_column_cardinality`: Gets column cardinality.
|
||||
* `list_table_stats`: Lists table statistics.
|
||||
* `list_publication_tables`: List publication tables in a PostgreSQL database.
|
||||
* `list_tablespaces`: Lists tablespaces in the database.
|
||||
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
|
||||
* `list_database_stats`: Lists the key performance and activity statistics for
|
||||
each database in the AlloyDB instance.
|
||||
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
|
||||
* `list_stored_procedure`: Lists stored procedures.
|
||||
|
||||
## AlloyDB Postgres Admin
|
||||
|
||||
@@ -113,6 +123,12 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* **Tools:**
|
||||
* `execute_sql`: Executes a SQL query.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
* `list_active_queries`: Lists ongoing queries.
|
||||
* `list_available_extensions`: Discover all PostgreSQL extensions available for installation.
|
||||
* `list_installed_extensions`: List all installed PostgreSQL extensions.
|
||||
* `long_running_transactions`: Identifies and lists database transactions that exceed a specified time limit.
|
||||
* `list_locks`: Identifies all locks held by active processes.
|
||||
* `replication_stats`: Lists each replica's process ID and sync state.
|
||||
* `list_autovacuum_configurations`: Lists autovacuum configurations in the
|
||||
database.
|
||||
* `list_columnar_configurations`: List AlloyDB Omni columnar-related configurations.
|
||||
@@ -130,12 +146,16 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `list_triggers`: Lists triggers in the database.
|
||||
* `list_indexes`: List available user indexes in a PostgreSQL database.
|
||||
* `list_sequences`: List sequences in a PostgreSQL database.
|
||||
* `list_query_stats`: Lists query statistics.
|
||||
* `get_column_cardinality`: Gets column cardinality.
|
||||
* `list_table_stats`: Lists table statistics.
|
||||
* `list_publication_tables`: List publication tables in a PostgreSQL database.
|
||||
* `list_tablespaces`: Lists tablespaces in the database.
|
||||
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
|
||||
* `list_database_stats`: Lists the key performance and activity statistics for
|
||||
each database in the AlloyDB instance.
|
||||
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
|
||||
* `list_stored_procedure`: Lists stored procedures.
|
||||
|
||||
## BigQuery
|
||||
|
||||
@@ -173,6 +193,21 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `list_table_ids`: Lists tables.
|
||||
* `search_catalog`: Search for entries based on the provided query.
|
||||
|
||||
## ClickHouse
|
||||
|
||||
* `--prebuilt` value: `clickhouse`
|
||||
* **Environment Variables:**
|
||||
* `CLICKHOUSE_HOST`: The hostname or IP address of the ClickHouse server.
|
||||
* `CLICKHOUSE_PORT`: The port number of the ClickHouse server.
|
||||
* `CLICKHOUSE_USER`: The database username.
|
||||
* `CLICKHOUSE_PASSWORD`: The password for the database user.
|
||||
* `CLICKHOUSE_DATABASE`: The name of the database to connect to.
|
||||
* `CLICKHOUSE_PROTOCOL`: The protocol to use (e.g., http).
|
||||
* **Tools:**
|
||||
* `execute_sql`: Use this tool to execute SQL.
|
||||
* `list_databases`: Use this tool to list all databases in ClickHouse.
|
||||
* `list_tables`: Use this tool to list all tables in a specific ClickHouse database.
|
||||
|
||||
## Cloud SQL for MySQL
|
||||
|
||||
* `--prebuilt` value: `cloud-sql-mysql`
|
||||
@@ -270,6 +305,12 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* **Tools:**
|
||||
* `execute_sql`: Executes a SQL query.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
* `list_active_queries`: Lists ongoing queries.
|
||||
* `list_available_extensions`: Discover all PostgreSQL extensions available for installation.
|
||||
* `list_installed_extensions`: List all installed PostgreSQL extensions.
|
||||
* `long_running_transactions`: Identifies and lists database transactions that exceed a specified time limit.
|
||||
* `list_locks`: Identifies all locks held by active processes.
|
||||
* `replication_stats`: Lists each replica's process ID and sync state.
|
||||
* `list_autovacuum_configurations`: Lists autovacuum configurations in the
|
||||
database.
|
||||
* `list_memory_configurations`: Lists memory-related configurations in the
|
||||
@@ -285,12 +326,16 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `list_triggers`: Lists triggers in the database.
|
||||
* `list_indexes`: List available user indexes in a PostgreSQL database.
|
||||
* `list_sequences`: List sequences in a PostgreSQL database.
|
||||
* `list_query_stats`: Lists query statistics.
|
||||
* `get_column_cardinality`: Gets column cardinality.
|
||||
* `list_table_stats`: Lists table statistics.
|
||||
* `list_publication_tables`: List publication tables in a PostgreSQL database.
|
||||
* `list_tablespaces`: Lists tablespaces in the database.
|
||||
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
|
||||
* `list_database_stats`: Lists the key performance and activity statistics for
|
||||
each database in the postgreSQL instance.
|
||||
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
|
||||
* `list_stored_procedure`: Lists stored procedures.
|
||||
|
||||
## Cloud SQL for PostgreSQL Observability
|
||||
|
||||
@@ -336,6 +381,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `create_user`: Creates a new user in a Cloud SQL instance.
|
||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for PostgreSQL instance.
|
||||
* `postgres_upgrade_precheck`: Performs a precheck for a major version upgrade of a Cloud SQL for PostgreSQL instance.
|
||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||
* `restore_backup`: Restores a backup of a Cloud SQL instance.
|
||||
|
||||
@@ -420,6 +466,15 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `search_aspect_types`: Finds aspect types relevant to the
|
||||
query.
|
||||
|
||||
## Elasticsearch
|
||||
|
||||
* `--prebuilt` value: `elasticsearch`
|
||||
* **Environment Variables:**
|
||||
* `ELASTICSEARCH_HOST`: The hostname or IP address of the Elasticsearch server.
|
||||
* `ELASTICSEARCH_APIKEY`: The API key for authentication.
|
||||
* **Tools:**
|
||||
* `execute_esql_query`: Use this tool to execute ES|QL queries.
|
||||
|
||||
## Firestore
|
||||
|
||||
* `--prebuilt` value: `firestore`
|
||||
@@ -488,6 +543,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `create_project_file`: Create a new LookML file.
|
||||
* `update_project_file`: Update an existing LookML file.
|
||||
* `delete_project_file`: Delete a LookML file.
|
||||
* `validate_project`: Check the syntax of a LookML project.
|
||||
* `get_connections`: Get the available connections in a Looker instance.
|
||||
* `get_connection_schemas`: Get the available schemas in a connection.
|
||||
* `get_connection_databases`: Get the available databases in a connection.
|
||||
@@ -536,6 +592,19 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `execute_sql`: Executes a SQL query.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
|
||||
## MindsDB
|
||||
|
||||
* `--prebuilt` value: `mindsdb`
|
||||
* **Environment Variables:**
|
||||
* `MINDSDB_HOST`: The hostname or IP address of the MindsDB server.
|
||||
* `MINDSDB_PORT`: The port number of the MindsDB server.
|
||||
* `MINDSDB_DATABASE`: The name of the database to connect to.
|
||||
* `MINDSDB_USER`: The database username.
|
||||
* `MINDSDB_PASS`: The password for the database user.
|
||||
* **Tools:**
|
||||
* `mindsdb-execute-sql`: Execute SQL queries directly on MindsDB database.
|
||||
* `mindsdb-sql`: Execute parameterized SQL queries on MindsDB database.
|
||||
|
||||
## MySQL
|
||||
|
||||
* `--prebuilt` value: `mysql`
|
||||
@@ -591,6 +660,12 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* **Tools:**
|
||||
* `execute_sql`: Executes a SQL query.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
* `list_active_queries`: Lists ongoing queries.
|
||||
* `list_available_extensions`: Discover all PostgreSQL extensions available for installation.
|
||||
* `list_installed_extensions`: List all installed PostgreSQL extensions.
|
||||
* `long_running_transactions`: Identifies and lists database transactions that exceed a specified time limit.
|
||||
* `list_locks`: Identifies all locks held by active processes.
|
||||
* `replication_stats`: Lists each replica's process ID and sync state.
|
||||
* `list_autovacuum_configurations`: Lists autovacuum configurations in the
|
||||
database.
|
||||
* `list_memory_configurations`: Lists memory-related configurations in the
|
||||
@@ -606,12 +681,16 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `list_triggers`: Lists triggers in the database.
|
||||
* `list_indexes`: List available user indexes in a PostgreSQL database.
|
||||
* `list_sequences`: List sequences in a PostgreSQL database.
|
||||
* `list_query_stats`: Lists query statistics.
|
||||
* `get_column_cardinality`: Gets column cardinality.
|
||||
* `list_table_stats`: Lists table statistics.
|
||||
* `list_publication_tables`: List publication tables in a PostgreSQL database.
|
||||
* `list_tablespaces`: Lists tablespaces in the database.
|
||||
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
|
||||
* `list_database_stats`: Lists the key performance and activity statistics for
|
||||
each database in the PostgreSQL server.
|
||||
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
|
||||
* `list_stored_procedure`: Lists stored procedures.
|
||||
|
||||
## Google Cloud Serverless for Apache Spark
|
||||
|
||||
@@ -626,6 +705,38 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
view serverless batches.
|
||||
* **Tools:**
|
||||
* `list_batches`: Lists Spark batches.
|
||||
* `get_batch`: Gets information about a Spark batch.
|
||||
* `cancel_batch`: Cancels a Spark batch.
|
||||
* `create_pyspark_batch`: Creates a PySpark batch.
|
||||
* `create_spark_batch`: Creates a Spark batch.
|
||||
|
||||
## SingleStore
|
||||
|
||||
* `--prebuilt` value: `singlestore`
|
||||
* **Environment Variables:**
|
||||
* `SINGLESTORE_HOST`: The hostname or IP address of the SingleStore server.
|
||||
* `SINGLESTORE_PORT`: The port number of the SingleStore server.
|
||||
* `SINGLESTORE_DATABASE`: The name of the database to connect to.
|
||||
* `SINGLESTORE_USER`: The database username.
|
||||
* `SINGLESTORE_PASSWORD`: The password for the database user.
|
||||
* **Tools:**
|
||||
* `execute_sql`: Use this tool to execute SQL.
|
||||
* `list_tables`: Lists detailed schema information for user-created tables.
|
||||
|
||||
## Snowflake
|
||||
|
||||
* `--prebuilt` value: `snowflake`
|
||||
* **Environment Variables:**
|
||||
* `SNOWFLAKE_ACCOUNT`: The Snowflake account.
|
||||
* `SNOWFLAKE_USER`: The database username.
|
||||
* `SNOWFLAKE_PASSWORD`: The password for the database user.
|
||||
* `SNOWFLAKE_DATABASE`: The name of the database to connect to.
|
||||
* `SNOWFLAKE_SCHEMA`: The schema name.
|
||||
* `SNOWFLAKE_WAREHOUSE`: The warehouse name.
|
||||
* `SNOWFLAKE_ROLE`: The role name.
|
||||
* **Tools:**
|
||||
* `execute_sql`: Use this tool to execute SQL.
|
||||
* `list_tables`: Lists detailed schema information for user-created tables.
|
||||
|
||||
## Spanner (GoogleSQL dialect)
|
||||
|
||||
|
||||
@@ -194,6 +194,15 @@ Use environment variable replacement with the format ${ENV_NAME}
|
||||
instead of hardcoding your secrets into the configuration file.
|
||||
{{< /notice >}}
|
||||
|
||||
### Managed Connection Pooling
|
||||
|
||||
Toolbox automatically supports [Managed Connection Pooling][alloydb-mcp]. If your AlloyDB instance has Managed Connection Pooling enabled, the connection will immediately benefit from increased throughput and reduced latency.
|
||||
|
||||
The interface is identical, so there's no additional configuration required on the client. For more information on configuring your instance, see the [AlloyDB Managed Connection Pooling documentation][alloydb-mcp-docs].
|
||||
|
||||
[alloydb-mcp]: https://cloud.google.com/blog/products/databases/alloydb-managed-connection-pooling
|
||||
[alloydb-mcp-docs]: https://cloud.google.com/alloydb/docs/configure-managed-connection-pooling
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|
||||
@@ -195,6 +195,15 @@ Use environment variable replacement with the format ${ENV_NAME}
|
||||
instead of hardcoding your secrets into the configuration file.
|
||||
{{< /notice >}}
|
||||
|
||||
### Managed Connection Pooling
|
||||
|
||||
Toolbox automatically supports [Managed Connection Pooling][csql-mcp]. If your Cloud SQL for PostgreSQL instance has Managed Connection Pooling enabled, the connection will immediately benefit from increased throughput and reduced latency.
|
||||
|
||||
The interface is identical, so there's no additional configuration required on the client. For more information on configuring your instance, see the [Cloud SQL Managed Connection Pooling documentation][csql-mcp-docs].
|
||||
|
||||
[csql-mcp]: https://docs.cloud.google.com/sql/docs/postgres/managed-connection-pooling
|
||||
[csql-mcp-docs]: https://docs.cloud.google.com/sql/docs/postgres/configure-mcp
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|
||||
242
docs/en/resources/sources/cockroachdb.md
Normal file
242
docs/en/resources/sources/cockroachdb.md
Normal file
@@ -0,0 +1,242 @@
|
||||
---
|
||||
title: "CockroachDB"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
CockroachDB is a distributed SQL database built for cloud applications.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
[CockroachDB][crdb-docs] is a distributed SQL database designed for cloud-native applications. It provides strong consistency, horizontal scalability, and built-in resilience with automatic failover and recovery. CockroachDB uses the PostgreSQL wire protocol, making it compatible with many PostgreSQL tools and drivers while providing unique features like multi-region deployments and distributed transactions.
|
||||
|
||||
**Minimum Version:** CockroachDB v25.1 or later is recommended for full tool compatibility.
|
||||
|
||||
[crdb-docs]: https://www.cockroachlabs.com/docs/
|
||||
|
||||
## Available Tools
|
||||
|
||||
- [`cockroachdb-sql`](../tools/cockroachdb/cockroachdb-sql.md)
|
||||
Execute SQL queries as prepared statements in CockroachDB (alias for execute-sql).
|
||||
|
||||
- [`cockroachdb-execute-sql`](../tools/cockroachdb/cockroachdb-execute-sql.md)
|
||||
Run parameterized SQL statements in CockroachDB.
|
||||
|
||||
- [`cockroachdb-list-schemas`](../tools/cockroachdb/cockroachdb-list-schemas.md)
|
||||
List schemas in a CockroachDB database.
|
||||
|
||||
- [`cockroachdb-list-tables`](../tools/cockroachdb/cockroachdb-list-tables.md)
|
||||
List tables in a CockroachDB database.
|
||||
|
||||
## Requirements
|
||||
|
||||
### Database User
|
||||
|
||||
This source uses standard authentication. You will need to [create a CockroachDB user][crdb-users] to login to the database with. For CockroachDB Cloud deployments, SSL/TLS is required.
|
||||
|
||||
[crdb-users]: https://www.cockroachlabs.com/docs/stable/create-user.html
|
||||
|
||||
### SSL/TLS Configuration
|
||||
|
||||
CockroachDB Cloud clusters require SSL/TLS connections. Use the `queryParams` section to configure SSL settings:
|
||||
|
||||
- **For CockroachDB Cloud**: Use `sslmode: require` at minimum
|
||||
- **For self-hosted with certificates**: Use `sslmode: verify-full` with certificate paths
|
||||
- **For local development only**: Use `sslmode: disable` (not recommended for production)
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my_cockroachdb:
|
||||
type: cockroachdb
|
||||
host: your-cluster.cockroachlabs.cloud
|
||||
port: "26257"
|
||||
user: myuser
|
||||
password: mypassword
|
||||
database: defaultdb
|
||||
maxRetries: 5
|
||||
retryBaseDelay: 500ms
|
||||
queryParams:
|
||||
sslmode: require
|
||||
application_name: my-app
|
||||
|
||||
# MCP Security Settings (recommended for production)
|
||||
readOnlyMode: true # Read-only by default (MCP best practice)
|
||||
enableWriteMode: false # Set to true to allow write operations
|
||||
maxRowLimit: 1000 # Limit query results
|
||||
queryTimeoutSec: 30 # Prevent long-running queries
|
||||
enableTelemetry: true # Enable observability
|
||||
telemetryVerbose: false # Set true for detailed logs
|
||||
clusterID: "my-cluster" # Optional identifier
|
||||
|
||||
tools:
|
||||
list_expenses:
|
||||
type: cockroachdb-sql
|
||||
source: my_cockroachdb
|
||||
description: List all expenses
|
||||
statement: SELECT id, description, amount, category FROM expenses WHERE user_id = $1
|
||||
parameters:
|
||||
- name: user_id
|
||||
type: string
|
||||
description: The user's ID
|
||||
|
||||
describe_expenses:
|
||||
type: cockroachdb-describe-table
|
||||
source: my_cockroachdb
|
||||
description: Describe the expenses table schema
|
||||
|
||||
list_expenses_indexes:
|
||||
type: cockroachdb-list-indexes
|
||||
source: my_cockroachdb
|
||||
description: List indexes on the expenses table
|
||||
```
|
||||
|
||||
## Configuration Parameters
|
||||
|
||||
### Required Parameters
|
||||
|
||||
| Parameter | Type | Description |
|
||||
|-----------|------|-------------|
|
||||
| `type` | string | Must be `cockroachdb` |
|
||||
| `host` | string | The hostname or IP address of the CockroachDB cluster |
|
||||
| `port` | string | The port number (typically "26257") |
|
||||
| `user` | string | The database user name |
|
||||
| `database` | string | The database name to connect to |
|
||||
|
||||
### Optional Parameters
|
||||
|
||||
| Parameter | Type | Default | Description |
|
||||
|-----------|------|---------|-------------|
|
||||
| `password` | string | "" | The database password (can be empty for certificate-based auth) |
|
||||
| `maxRetries` | integer | 5 | Maximum number of connection retry attempts |
|
||||
| `retryBaseDelay` | string | "500ms" | Base delay between retry attempts (exponential backoff) |
|
||||
| `queryParams` | map | {} | Additional connection parameters (e.g., SSL configuration) |
|
||||
|
||||
### MCP Security Parameters
|
||||
|
||||
CockroachDB integration includes security features following the [Model Context Protocol (MCP)](https://modelcontextprotocol.io/) specification:
|
||||
|
||||
| Parameter | Type | Default | Description |
|
||||
|-----------|------|---------|-------------|
|
||||
| `readOnlyMode` | boolean | true | Enables read-only mode by default (MCP requirement) |
|
||||
| `enableWriteMode` | boolean | false | Explicitly enable write operations (INSERT/UPDATE/DELETE/CREATE/DROP) |
|
||||
| `maxRowLimit` | integer | 1000 | Maximum rows returned per SELECT query (auto-adds LIMIT clause) |
|
||||
| `queryTimeoutSec` | integer | 30 | Query timeout in seconds to prevent long-running queries |
|
||||
| `enableTelemetry` | boolean | true | Enable structured logging of tool invocations |
|
||||
| `telemetryVerbose` | boolean | false | Enable detailed JSON telemetry output |
|
||||
| `clusterID` | string | "" | Optional cluster identifier for telemetry |
|
||||
|
||||
### Query Parameters
|
||||
|
||||
Common query parameters for CockroachDB connections:
|
||||
|
||||
| Parameter | Values | Description |
|
||||
|-----------|--------|-------------|
|
||||
| `sslmode` | `disable`, `require`, `verify-ca`, `verify-full` | SSL/TLS mode (CockroachDB Cloud requires `require` or higher) |
|
||||
| `sslrootcert` | file path | Path to root certificate for SSL verification |
|
||||
| `sslcert` | file path | Path to client certificate |
|
||||
| `sslkey` | file path | Path to client key |
|
||||
| `application_name` | string | Application name for connection tracking |
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Security and MCP Compliance
|
||||
|
||||
**Read-Only by Default**: The integration follows MCP best practices by defaulting to read-only mode. This prevents accidental data modifications:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my_cockroachdb:
|
||||
readOnlyMode: true # Default behavior
|
||||
enableWriteMode: false # Explicit write opt-in required
|
||||
```
|
||||
|
||||
To enable write operations:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my_cockroachdb:
|
||||
readOnlyMode: false # Disable read-only protection
|
||||
enableWriteMode: true # Explicitly allow writes
|
||||
```
|
||||
|
||||
**Query Limits**: Automatic row limits prevent excessive data retrieval:
|
||||
- SELECT queries automatically get `LIMIT 1000` appended (configurable via `maxRowLimit`)
|
||||
- Queries are terminated after 30 seconds (configurable via `queryTimeoutSec`)
|
||||
|
||||
**Observability**: Structured telemetry provides visibility into tool usage:
|
||||
- Tool invocations are logged with status, latency, and row counts
|
||||
- SQL queries are redacted to protect sensitive values
|
||||
- Set `telemetryVerbose: true` for detailed JSON logs
|
||||
|
||||
### Use UUID Primary Keys
|
||||
|
||||
CockroachDB performs best with UUID primary keys rather than sequential integers to avoid transaction hotspots:
|
||||
|
||||
```sql
|
||||
CREATE TABLE expenses (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
description TEXT,
|
||||
amount DECIMAL(10,2)
|
||||
);
|
||||
```
|
||||
|
||||
### Automatic Transaction Retry
|
||||
|
||||
This source uses the official `cockroach-go/v2` library which provides automatic transaction retry for serialization conflicts. For write operations requiring explicit transaction control, tools can use the `ExecuteTxWithRetry` method.
|
||||
|
||||
### Multi-Region Deployments
|
||||
|
||||
CockroachDB supports multi-region deployments with automatic data distribution. Configure your cluster's regions and survival goals separately from the Toolbox configuration. The source will connect to any node in the cluster.
|
||||
|
||||
### Connection Pooling
|
||||
|
||||
The source maintains a connection pool to the CockroachDB cluster. The pool automatically handles:
|
||||
- Load balancing across cluster nodes
|
||||
- Connection retry with exponential backoff
|
||||
- Health checking of connections
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### SSL/TLS Errors
|
||||
|
||||
If you encounter "server requires encryption" errors:
|
||||
|
||||
1. For CockroachDB Cloud, ensure `sslmode` is set to `require` or higher:
|
||||
```yaml
|
||||
queryParams:
|
||||
sslmode: require
|
||||
```
|
||||
|
||||
2. For certificate verification, download your cluster's root certificate and configure:
|
||||
```yaml
|
||||
queryParams:
|
||||
sslmode: verify-full
|
||||
sslrootcert: /path/to/ca.crt
|
||||
```
|
||||
|
||||
### Connection Timeouts
|
||||
|
||||
If experiencing connection timeouts:
|
||||
|
||||
1. Check network connectivity to the CockroachDB cluster
|
||||
2. Verify firewall rules allow connections on port 26257
|
||||
3. For CockroachDB Cloud, ensure IP allowlisting is configured
|
||||
4. Increase `maxRetries` or `retryBaseDelay` if needed
|
||||
|
||||
### Transaction Retry Errors
|
||||
|
||||
CockroachDB may encounter serializable transaction conflicts. The integration automatically handles these retries using the cockroach-go library. If you see retry-related errors, check:
|
||||
|
||||
1. Database load and contention
|
||||
2. Query patterns that might cause conflicts
|
||||
3. Consider using `SELECT FOR UPDATE` for explicit locking
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [CockroachDB Documentation](https://www.cockroachlabs.com/docs/)
|
||||
- [CockroachDB Best Practices](https://www.cockroachlabs.com/docs/stable/performance-best-practices-overview.html)
|
||||
- [Multi-Region Capabilities](https://www.cockroachlabs.com/docs/stable/multiregion-overview.html)
|
||||
- [Connection Parameters](https://www.cockroachlabs.com/docs/stable/connection-parameters.html)
|
||||
273
docs/en/resources/tools/cockroachdb/cockroachdb-execute-sql.md
Normal file
273
docs/en/resources/tools/cockroachdb/cockroachdb-execute-sql.md
Normal file
@@ -0,0 +1,273 @@
|
||||
---
|
||||
title: "cockroachdb-execute-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Execute ad-hoc SQL statements against a CockroachDB database.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `cockroachdb-execute-sql` tool executes ad-hoc SQL statements against a CockroachDB database. This tool is designed for interactive workflows where the SQL query is provided dynamically at runtime, making it ideal for developer assistance and exploratory data analysis.
|
||||
|
||||
The tool takes a single `sql` parameter containing the SQL statement to execute and returns the query results.
|
||||
|
||||
> **Note:** This tool is intended for developer assistant workflows with human-in-the-loop and shouldn't be used for production agents. For production use cases with predefined queries, use [cockroachdb-sql](./cockroachdb-sql.md) instead.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my_cockroachdb:
|
||||
type: cockroachdb
|
||||
host: your-cluster.cockroachlabs.cloud
|
||||
port: "26257"
|
||||
user: myuser
|
||||
password: mypassword
|
||||
database: defaultdb
|
||||
queryParams:
|
||||
sslmode: require
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
type: cockroachdb-execute-sql
|
||||
source: my_cockroachdb
|
||||
description: Execute any SQL statement against the CockroachDB database
|
||||
```
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### Simple SELECT Query
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SELECT * FROM users LIMIT 10"
|
||||
}
|
||||
```
|
||||
|
||||
### Query with Aggregations
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SELECT category, COUNT(*) as count, SUM(amount) as total FROM expenses GROUP BY category ORDER BY total DESC"
|
||||
}
|
||||
```
|
||||
|
||||
### Database Introspection
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SHOW TABLES"
|
||||
}
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SHOW COLUMNS FROM expenses"
|
||||
}
|
||||
```
|
||||
|
||||
### Multi-Region Information
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SHOW REGIONS FROM DATABASE defaultdb"
|
||||
}
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SHOW ZONE CONFIGURATIONS"
|
||||
}
|
||||
```
|
||||
|
||||
## CockroachDB-Specific Features
|
||||
|
||||
### Check Cluster Version
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SELECT version()"
|
||||
}
|
||||
```
|
||||
|
||||
### View Node Status
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SELECT node_id, address, locality, is_live FROM crdb_internal.gossip_nodes"
|
||||
}
|
||||
```
|
||||
|
||||
### Check Replication Status
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SELECT range_id, start_key, end_key, replicas, lease_holder FROM crdb_internal.ranges LIMIT 10"
|
||||
}
|
||||
```
|
||||
|
||||
### View Table Regions
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SHOW REGIONS FROM TABLE expenses"
|
||||
}
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Required Fields
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `type` | string | Must be `cockroachdb-execute-sql` |
|
||||
| `source` | string | Name of the CockroachDB source to use |
|
||||
| `description` | string | Human-readable description for the LLM |
|
||||
|
||||
### Optional Fields
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `authRequired` | array | List of authentication services required |
|
||||
|
||||
## Parameters
|
||||
|
||||
The tool accepts a single runtime parameter:
|
||||
|
||||
| Parameter | Type | Description |
|
||||
|-----------|------|-------------|
|
||||
| `sql` | string | The SQL statement to execute |
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Use for Exploration, Not Production
|
||||
|
||||
This tool is ideal for:
|
||||
- Interactive database exploration
|
||||
- Ad-hoc analysis and reporting
|
||||
- Debugging and troubleshooting
|
||||
- Schema inspection
|
||||
|
||||
For production use cases, use [cockroachdb-sql](./cockroachdb-sql.md) with parameterized queries.
|
||||
|
||||
### Be Cautious with Data Modification
|
||||
|
||||
While this tool can execute any SQL statement, be careful with:
|
||||
- `INSERT`, `UPDATE`, `DELETE` statements
|
||||
- `DROP` or `ALTER` statements
|
||||
- Schema changes in production
|
||||
|
||||
### Use LIMIT for Large Results
|
||||
|
||||
Always use `LIMIT` clauses when exploring data:
|
||||
|
||||
```sql
|
||||
SELECT * FROM large_table LIMIT 100
|
||||
```
|
||||
|
||||
### Leverage CockroachDB's SQL Extensions
|
||||
|
||||
CockroachDB supports PostgreSQL syntax plus extensions:
|
||||
|
||||
```sql
|
||||
-- Show database survival goal
|
||||
SHOW SURVIVAL GOAL FROM DATABASE defaultdb;
|
||||
|
||||
-- View zone configurations
|
||||
SHOW ZONE CONFIGURATION FOR TABLE expenses;
|
||||
|
||||
-- Check table localities
|
||||
SHOW CREATE TABLE expenses;
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
The tool will return descriptive errors for:
|
||||
- **Syntax errors**: Invalid SQL syntax
|
||||
- **Permission errors**: Insufficient user privileges
|
||||
- **Connection errors**: Network or authentication issues
|
||||
- **Runtime errors**: Constraint violations, type mismatches, etc.
|
||||
|
||||
## Security Considerations
|
||||
|
||||
### SQL Injection Risk
|
||||
|
||||
Since this tool executes arbitrary SQL, it should only be used with:
|
||||
- Trusted users in interactive sessions
|
||||
- Human-in-the-loop workflows
|
||||
- Development and testing environments
|
||||
|
||||
Never expose this tool directly to end users without proper authorization controls.
|
||||
|
||||
### Use Authentication
|
||||
|
||||
Configure the `authRequired` field to restrict access:
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
execute_sql:
|
||||
type: cockroachdb-execute-sql
|
||||
source: my_cockroachdb
|
||||
description: Execute SQL statements
|
||||
authRequired:
|
||||
- my-auth-service
|
||||
```
|
||||
|
||||
### Read-Only Users
|
||||
|
||||
For safer exploration, create read-only database users:
|
||||
|
||||
```sql
|
||||
CREATE USER readonly_user;
|
||||
GRANT SELECT ON DATABASE defaultdb TO readonly_user;
|
||||
```
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
### Database Administration
|
||||
|
||||
```sql
|
||||
-- View database size
|
||||
SELECT
|
||||
table_name,
|
||||
pg_size_pretty(pg_total_relation_size(table_name::regclass)) AS size
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
ORDER BY pg_total_relation_size(table_name::regclass) DESC;
|
||||
```
|
||||
|
||||
### Performance Analysis
|
||||
|
||||
```sql
|
||||
-- Find slow queries
|
||||
SELECT query, count, mean_latency
|
||||
FROM crdb_internal.statement_statistics
|
||||
WHERE mean_latency > INTERVAL '1 second'
|
||||
ORDER BY mean_latency DESC
|
||||
LIMIT 10;
|
||||
```
|
||||
|
||||
### Data Quality Checks
|
||||
|
||||
```sql
|
||||
-- Find NULL values
|
||||
SELECT COUNT(*) as null_count
|
||||
FROM expenses
|
||||
WHERE description IS NULL OR amount IS NULL;
|
||||
|
||||
-- Find duplicates
|
||||
SELECT user_id, email, COUNT(*) as count
|
||||
FROM users
|
||||
GROUP BY user_id, email
|
||||
HAVING COUNT(*) > 1;
|
||||
```
|
||||
|
||||
## See Also
|
||||
|
||||
- [cockroachdb-sql](./cockroachdb-sql.md) - For parameterized, production-ready queries
|
||||
- [cockroachdb-list-tables](./cockroachdb-list-tables.md) - List tables in the database
|
||||
- [cockroachdb-list-schemas](./cockroachdb-list-schemas.md) - List database schemas
|
||||
- [CockroachDB Source](../../sources/cockroachdb.md) - Source configuration reference
|
||||
- [CockroachDB SQL Reference](https://www.cockroachlabs.com/docs/stable/sql-statements.html) - Official SQL documentation
|
||||
305
docs/en/resources/tools/cockroachdb/cockroachdb-list-schemas.md
Normal file
305
docs/en/resources/tools/cockroachdb/cockroachdb-list-schemas.md
Normal file
@@ -0,0 +1,305 @@
|
||||
---
|
||||
title: "cockroachdb-list-schemas"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
List schemas in a CockroachDB database.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `cockroachdb-list-schemas` tool retrieves a list of schemas (namespaces) in a CockroachDB database. Schemas are used to organize database objects such as tables, views, and functions into logical groups.
|
||||
|
||||
This tool is useful for:
|
||||
- Understanding database organization
|
||||
- Discovering available schemas
|
||||
- Multi-tenant application analysis
|
||||
- Schema-level access control planning
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my_cockroachdb:
|
||||
type: cockroachdb
|
||||
host: your-cluster.cockroachlabs.cloud
|
||||
port: "26257"
|
||||
user: myuser
|
||||
password: mypassword
|
||||
database: defaultdb
|
||||
queryParams:
|
||||
sslmode: require
|
||||
|
||||
tools:
|
||||
list_schemas:
|
||||
type: cockroachdb-list-schemas
|
||||
source: my_cockroachdb
|
||||
description: List all schemas in the database
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Required Fields
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `type` | string | Must be `cockroachdb-list-schemas` |
|
||||
| `source` | string | Name of the CockroachDB source to use |
|
||||
| `description` | string | Human-readable description for the LLM |
|
||||
|
||||
### Optional Fields
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `authRequired` | array | List of authentication services required |
|
||||
|
||||
## Output Structure
|
||||
|
||||
The tool returns a list of schemas with the following information:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"catalog_name": "defaultdb",
|
||||
"schema_name": "public",
|
||||
"is_user_defined": true
|
||||
},
|
||||
{
|
||||
"catalog_name": "defaultdb",
|
||||
"schema_name": "analytics",
|
||||
"is_user_defined": true
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
### Fields
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `catalog_name` | string | The database (catalog) name |
|
||||
| `schema_name` | string | The schema name |
|
||||
| `is_user_defined` | boolean | Whether this is a user-created schema (excludes system schemas) |
|
||||
|
||||
## Usage Example
|
||||
|
||||
```json
|
||||
{}
|
||||
```
|
||||
|
||||
No parameters are required. The tool automatically lists all user-defined schemas.
|
||||
|
||||
## Default Schemas
|
||||
|
||||
CockroachDB includes several standard schemas:
|
||||
|
||||
- **`public`**: The default schema for user objects
|
||||
- **`pg_catalog`**: PostgreSQL system catalog (excluded from results)
|
||||
- **`information_schema`**: SQL standard metadata views (excluded from results)
|
||||
- **`crdb_internal`**: CockroachDB internal metadata (excluded from results)
|
||||
- **`pg_extension`**: PostgreSQL extension objects (excluded from results)
|
||||
|
||||
The tool filters out system schemas and only returns user-defined schemas.
|
||||
|
||||
## Schema Management in CockroachDB
|
||||
|
||||
### Creating Schemas
|
||||
|
||||
```sql
|
||||
CREATE SCHEMA analytics;
|
||||
```
|
||||
|
||||
### Using Schemas
|
||||
|
||||
```sql
|
||||
-- Create table in specific schema
|
||||
CREATE TABLE analytics.revenue (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
amount DECIMAL(10,2),
|
||||
date DATE
|
||||
);
|
||||
|
||||
-- Query from specific schema
|
||||
SELECT * FROM analytics.revenue;
|
||||
```
|
||||
|
||||
### Schema Search Path
|
||||
|
||||
The search path determines which schemas are searched for unqualified object names:
|
||||
|
||||
```sql
|
||||
-- Show current search path
|
||||
SHOW search_path;
|
||||
|
||||
-- Set search path
|
||||
SET search_path = analytics, public;
|
||||
```
|
||||
|
||||
## Multi-Tenant Applications
|
||||
|
||||
Schemas are commonly used for multi-tenant applications:
|
||||
|
||||
```sql
|
||||
-- Create schema per tenant
|
||||
CREATE SCHEMA tenant_acme;
|
||||
CREATE SCHEMA tenant_globex;
|
||||
|
||||
-- Create same table structure in each schema
|
||||
CREATE TABLE tenant_acme.orders (...);
|
||||
CREATE TABLE tenant_globex.orders (...);
|
||||
```
|
||||
|
||||
The `cockroachdb-list-schemas` tool helps discover all tenant schemas:
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_tenants:
|
||||
type: cockroachdb-list-schemas
|
||||
source: my_cockroachdb
|
||||
description: |
|
||||
List all tenant schemas in the database.
|
||||
Each schema represents a separate tenant's data namespace.
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Use Schemas for Organization
|
||||
|
||||
Group related tables into schemas:
|
||||
|
||||
```sql
|
||||
CREATE SCHEMA sales;
|
||||
CREATE SCHEMA inventory;
|
||||
CREATE SCHEMA hr;
|
||||
|
||||
CREATE TABLE sales.orders (...);
|
||||
CREATE TABLE inventory.products (...);
|
||||
CREATE TABLE hr.employees (...);
|
||||
```
|
||||
|
||||
### Schema Naming Conventions
|
||||
|
||||
Use clear, descriptive schema names:
|
||||
- Lowercase names
|
||||
- Use underscores for multi-word names
|
||||
- Avoid reserved keywords
|
||||
- Use prefixes for grouped schemas (e.g., `tenant_`, `app_`)
|
||||
|
||||
### Schema-Level Permissions
|
||||
|
||||
Schemas enable fine-grained access control:
|
||||
|
||||
```sql
|
||||
-- Grant access to specific schema
|
||||
GRANT USAGE ON SCHEMA analytics TO analyst_role;
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA analytics TO analyst_role;
|
||||
|
||||
-- Revoke access
|
||||
REVOKE ALL ON SCHEMA hr FROM public;
|
||||
```
|
||||
|
||||
## Integration with Other Tools
|
||||
|
||||
### Combined with List Tables
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_schemas:
|
||||
type: cockroachdb-list-schemas
|
||||
source: my_cockroachdb
|
||||
description: List all schemas first
|
||||
|
||||
list_tables:
|
||||
type: cockroachdb-list-tables
|
||||
source: my_cockroachdb
|
||||
description: |
|
||||
List tables in the database.
|
||||
Use list_schemas first to understand schema organization.
|
||||
```
|
||||
|
||||
### Schema Discovery Workflow
|
||||
|
||||
1. Call `cockroachdb-list-schemas` to discover schemas
|
||||
2. Call `cockroachdb-list-tables` to see tables in each schema
|
||||
3. Generate queries using fully qualified names: `schema.table`
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
### Discover Database Structure
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
discover_schemas:
|
||||
type: cockroachdb-list-schemas
|
||||
source: my_cockroachdb
|
||||
description: |
|
||||
Discover how the database is organized into schemas.
|
||||
Use this to understand the logical grouping of tables.
|
||||
```
|
||||
|
||||
### Multi-Tenant Analysis
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_tenant_schemas:
|
||||
type: cockroachdb-list-schemas
|
||||
source: my_cockroachdb
|
||||
description: |
|
||||
List all tenant schemas (each tenant has their own schema).
|
||||
Schema names follow the pattern: tenant_<company_name>
|
||||
```
|
||||
|
||||
### Schema Migration Planning
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
audit_schemas:
|
||||
type: cockroachdb-list-schemas
|
||||
source: my_cockroachdb
|
||||
description: |
|
||||
Audit existing schemas before migration.
|
||||
Identifies all schemas that need to be migrated.
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
The tool handles common errors:
|
||||
- **Connection errors**: Returns connection failure details
|
||||
- **Permission errors**: Returns error if user lacks USAGE privilege
|
||||
- **Empty results**: Returns empty array if no user schemas exist
|
||||
|
||||
## Permissions Required
|
||||
|
||||
To list schemas, the user needs:
|
||||
- `CONNECT` privilege on the database
|
||||
- No specific schema privileges required for listing
|
||||
|
||||
To query objects within schemas, the user needs:
|
||||
- `USAGE` privilege on the schema
|
||||
- Appropriate object privileges (SELECT, INSERT, etc.)
|
||||
|
||||
## CockroachDB-Specific Features
|
||||
|
||||
### System Schemas
|
||||
|
||||
CockroachDB includes PostgreSQL-compatible system schemas plus CockroachDB-specific ones:
|
||||
|
||||
- `crdb_internal.*`: CockroachDB internal metadata and statistics
|
||||
- `pg_catalog.*`: PostgreSQL system catalog
|
||||
- `information_schema.*`: SQL standard information schema
|
||||
|
||||
These are automatically filtered from the results.
|
||||
|
||||
### User-Defined Flag
|
||||
|
||||
The `is_user_defined` field helps distinguish:
|
||||
- `true`: User-created schemas
|
||||
- `false`: System schemas (already filtered out)
|
||||
|
||||
## See Also
|
||||
|
||||
- [cockroachdb-sql](./cockroachdb-sql.md) - Execute parameterized queries
|
||||
- [cockroachdb-execute-sql](./cockroachdb-execute-sql.md) - Execute ad-hoc SQL
|
||||
- [cockroachdb-list-tables](./cockroachdb-list-tables.md) - List tables in the database
|
||||
- [CockroachDB Source](../../sources/cockroachdb.md) - Source configuration reference
|
||||
- [CockroachDB Schema Design](https://www.cockroachlabs.com/docs/stable/schema-design-overview.html) - Official documentation
|
||||
344
docs/en/resources/tools/cockroachdb/cockroachdb-list-tables.md
Normal file
344
docs/en/resources/tools/cockroachdb/cockroachdb-list-tables.md
Normal file
@@ -0,0 +1,344 @@
|
||||
---
|
||||
title: "cockroachdb-list-tables"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
List tables in a CockroachDB database with schema details.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `cockroachdb-list-tables` tool retrieves a list of tables from a CockroachDB database. It provides detailed information about table structure, including columns, constraints, indexes, and foreign key relationships.
|
||||
|
||||
This tool is useful for:
|
||||
- Database schema discovery
|
||||
- Understanding table relationships
|
||||
- Generating context for AI-powered database queries
|
||||
- Documentation and analysis
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my_cockroachdb:
|
||||
type: cockroachdb
|
||||
host: your-cluster.cockroachlabs.cloud
|
||||
port: "26257"
|
||||
user: myuser
|
||||
password: mypassword
|
||||
database: defaultdb
|
||||
queryParams:
|
||||
sslmode: require
|
||||
|
||||
tools:
|
||||
list_all_tables:
|
||||
type: cockroachdb-list-tables
|
||||
source: my_cockroachdb
|
||||
description: List all user tables in the database with their structure
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Required Fields
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `type` | string | Must be `cockroachdb-list-tables` |
|
||||
| `source` | string | Name of the CockroachDB source to use |
|
||||
| `description` | string | Human-readable description for the LLM |
|
||||
|
||||
### Optional Fields
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `authRequired` | array | List of authentication services required |
|
||||
|
||||
## Parameters
|
||||
|
||||
The tool accepts optional runtime parameters:
|
||||
|
||||
| Parameter | Type | Default | Description |
|
||||
|-----------|------|---------|-------------|
|
||||
| `table_names` | array | all tables | List of specific table names to retrieve |
|
||||
| `output_format` | string | "detailed" | Output format: "simple" or "detailed" |
|
||||
|
||||
## Output Formats
|
||||
|
||||
### Simple Format
|
||||
|
||||
Returns basic table information:
|
||||
- Table name
|
||||
- Row count estimate
|
||||
- Size information
|
||||
|
||||
```json
|
||||
{
|
||||
"table_names": ["users"],
|
||||
"output_format": "simple"
|
||||
}
|
||||
```
|
||||
|
||||
### Detailed Format (Default)
|
||||
|
||||
Returns comprehensive table information:
|
||||
- Table name and schema
|
||||
- All columns with types and constraints
|
||||
- Primary keys
|
||||
- Foreign keys and relationships
|
||||
- Indexes
|
||||
- Check constraints
|
||||
- Table size and row counts
|
||||
|
||||
```json
|
||||
{
|
||||
"table_names": ["users", "orders"],
|
||||
"output_format": "detailed"
|
||||
}
|
||||
```
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### List All Tables
|
||||
|
||||
```json
|
||||
{}
|
||||
```
|
||||
|
||||
### List Specific Tables
|
||||
|
||||
```json
|
||||
{
|
||||
"table_names": ["users", "orders", "expenses"]
|
||||
}
|
||||
```
|
||||
|
||||
### Simple Output
|
||||
|
||||
```json
|
||||
{
|
||||
"output_format": "simple"
|
||||
}
|
||||
```
|
||||
|
||||
## Output Structure
|
||||
|
||||
### Simple Format Output
|
||||
|
||||
```json
|
||||
{
|
||||
"table_name": "users",
|
||||
"estimated_rows": 1000,
|
||||
"size": "128 KB"
|
||||
}
|
||||
```
|
||||
|
||||
### Detailed Format Output
|
||||
|
||||
```json
|
||||
{
|
||||
"table_name": "users",
|
||||
"schema": "public",
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"type": "UUID",
|
||||
"nullable": false,
|
||||
"default": "gen_random_uuid()"
|
||||
},
|
||||
{
|
||||
"name": "email",
|
||||
"type": "STRING",
|
||||
"nullable": false,
|
||||
"default": null
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"type": "TIMESTAMP",
|
||||
"nullable": false,
|
||||
"default": "now()"
|
||||
}
|
||||
],
|
||||
"primary_key": ["id"],
|
||||
"indexes": [
|
||||
{
|
||||
"name": "users_pkey",
|
||||
"columns": ["id"],
|
||||
"unique": true,
|
||||
"primary": true
|
||||
},
|
||||
{
|
||||
"name": "users_email_idx",
|
||||
"columns": ["email"],
|
||||
"unique": true,
|
||||
"primary": false
|
||||
}
|
||||
],
|
||||
"foreign_keys": [],
|
||||
"constraints": [
|
||||
{
|
||||
"name": "users_email_check",
|
||||
"type": "CHECK",
|
||||
"definition": "email ~* '^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\\.[A-Z|a-z]{2,}$'"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## CockroachDB-Specific Information
|
||||
|
||||
### UUID Primary Keys
|
||||
|
||||
The tool recognizes CockroachDB's recommended UUID primary key pattern:
|
||||
|
||||
```sql
|
||||
CREATE TABLE users (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
...
|
||||
);
|
||||
```
|
||||
|
||||
### Multi-Region Tables
|
||||
|
||||
For multi-region tables, the output includes locality information:
|
||||
|
||||
```json
|
||||
{
|
||||
"table_name": "users",
|
||||
"locality": "REGIONAL BY ROW",
|
||||
"regions": ["us-east-1", "us-west-2", "eu-west-1"]
|
||||
}
|
||||
```
|
||||
|
||||
### Interleaved Tables
|
||||
|
||||
The tool shows parent-child relationships for interleaved tables (legacy feature):
|
||||
|
||||
```json
|
||||
{
|
||||
"table_name": "order_items",
|
||||
"interleaved_in": "orders"
|
||||
}
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Use for Schema Discovery
|
||||
|
||||
The tool is ideal for helping AI assistants understand your database structure:
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
discover_schema:
|
||||
type: cockroachdb-list-tables
|
||||
source: my_cockroachdb
|
||||
description: |
|
||||
Use this tool first to understand the database schema before generating queries.
|
||||
It shows all tables, their columns, data types, and relationships.
|
||||
```
|
||||
|
||||
### Filter Large Schemas
|
||||
|
||||
For databases with many tables, specify relevant tables:
|
||||
|
||||
```json
|
||||
{
|
||||
"table_names": ["users", "orders", "products"],
|
||||
"output_format": "detailed"
|
||||
}
|
||||
```
|
||||
|
||||
### Use Simple Format for Overviews
|
||||
|
||||
When you need just table names and sizes:
|
||||
|
||||
```json
|
||||
{
|
||||
"output_format": "simple"
|
||||
}
|
||||
```
|
||||
|
||||
## Excluded Tables
|
||||
|
||||
The tool automatically excludes system tables and schemas:
|
||||
- `pg_catalog.*` - PostgreSQL system catalog
|
||||
- `information_schema.*` - SQL standard information schema
|
||||
- `crdb_internal.*` - CockroachDB internal tables
|
||||
- `pg_extension.*` - PostgreSQL extension tables
|
||||
|
||||
Only user-created tables in the public schema (and other user schemas) are returned.
|
||||
|
||||
## Error Handling
|
||||
|
||||
The tool handles common errors:
|
||||
- **Table not found**: Returns empty result for non-existent tables
|
||||
- **Permission errors**: Returns error if user lacks SELECT privileges
|
||||
- **Connection errors**: Returns connection failure details
|
||||
|
||||
## Integration with AI Assistants
|
||||
|
||||
### Prompt Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_tables:
|
||||
type: cockroachdb-list-tables
|
||||
source: my_cockroachdb
|
||||
description: |
|
||||
Lists all tables in the database with detailed schema information.
|
||||
Use this tool to understand:
|
||||
- What tables exist
|
||||
- What columns each table has
|
||||
- Data types and constraints
|
||||
- Relationships between tables (foreign keys)
|
||||
- Available indexes
|
||||
|
||||
Always call this tool before generating SQL queries to ensure
|
||||
you use correct table and column names.
|
||||
```
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
### Generate Context for Queries
|
||||
|
||||
```json
|
||||
{}
|
||||
```
|
||||
|
||||
This provides comprehensive schema information that helps AI assistants generate accurate SQL queries.
|
||||
|
||||
### Analyze Table Structure
|
||||
|
||||
```json
|
||||
{
|
||||
"table_names": ["users"],
|
||||
"output_format": "detailed"
|
||||
}
|
||||
```
|
||||
|
||||
Perfect for understanding a specific table's structure, constraints, and relationships.
|
||||
|
||||
### Quick Schema Overview
|
||||
|
||||
```json
|
||||
{
|
||||
"output_format": "simple"
|
||||
}
|
||||
```
|
||||
|
||||
Gets a quick list of tables with basic statistics.
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
- **Simple format** is faster for large databases
|
||||
- **Detailed format** queries system tables extensively
|
||||
- Specifying `table_names` reduces query time
|
||||
- Results are fetched in a single query for efficiency
|
||||
|
||||
## See Also
|
||||
|
||||
- [cockroachdb-sql](./cockroachdb-sql.md) - Execute parameterized queries
|
||||
- [cockroachdb-execute-sql](./cockroachdb-execute-sql.md) - Execute ad-hoc SQL
|
||||
- [cockroachdb-list-schemas](./cockroachdb-list-schemas.md) - List database schemas
|
||||
- [CockroachDB Source](../../sources/cockroachdb.md) - Source configuration reference
|
||||
- [CockroachDB Schema Design](https://www.cockroachlabs.com/docs/stable/schema-design-overview.html) - Best practices
|
||||
291
docs/en/resources/tools/cockroachdb/cockroachdb-sql.md
Normal file
291
docs/en/resources/tools/cockroachdb/cockroachdb-sql.md
Normal file
@@ -0,0 +1,291 @@
|
||||
---
|
||||
title: "cockroachdb-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Execute parameterized SQL queries in CockroachDB.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `cockroachdb-sql` tool allows you to execute parameterized SQL queries against a CockroachDB database. This tool supports prepared statements with parameter binding, template parameters for dynamic query construction, and automatic transaction retry for resilience against serialization conflicts.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my_cockroachdb:
|
||||
type: cockroachdb
|
||||
host: your-cluster.cockroachlabs.cloud
|
||||
port: "26257"
|
||||
user: myuser
|
||||
password: mypassword
|
||||
database: defaultdb
|
||||
queryParams:
|
||||
sslmode: require
|
||||
|
||||
tools:
|
||||
get_user_orders:
|
||||
type: cockroachdb-sql
|
||||
source: my_cockroachdb
|
||||
description: Get all orders for a specific user
|
||||
statement: |
|
||||
SELECT o.id, o.order_date, o.total_amount, o.status
|
||||
FROM orders o
|
||||
WHERE o.user_id = $1
|
||||
ORDER BY o.order_date DESC
|
||||
parameters:
|
||||
- name: user_id
|
||||
type: string
|
||||
description: The UUID of the user
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Required Fields
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `type` | string | Must be `cockroachdb-sql` |
|
||||
| `source` | string | Name of the CockroachDB source to use |
|
||||
| `description` | string | Human-readable description of what the tool does |
|
||||
| `statement` | string | The SQL query to execute |
|
||||
|
||||
### Optional Fields
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `parameters` | array | List of parameter definitions for the query |
|
||||
| `templateParameters` | array | List of template parameters for dynamic query construction |
|
||||
| `authRequired` | array | List of authentication services required |
|
||||
|
||||
## Parameters
|
||||
|
||||
Parameters allow you to safely pass values into your SQL queries using prepared statements. CockroachDB uses PostgreSQL-style parameter placeholders: `$1`, `$2`, etc.
|
||||
|
||||
### Parameter Types
|
||||
|
||||
- `string`: Text values
|
||||
- `number`: Numeric values (integers or decimals)
|
||||
- `boolean`: True/false values
|
||||
- `array`: Array of values
|
||||
|
||||
### Example with Multiple Parameters
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
filter_expenses:
|
||||
type: cockroachdb-sql
|
||||
source: my_cockroachdb
|
||||
description: Filter expenses by category and date range
|
||||
statement: |
|
||||
SELECT id, description, amount, category, expense_date
|
||||
FROM expenses
|
||||
WHERE user_id = $1
|
||||
AND category = $2
|
||||
AND expense_date >= $3
|
||||
AND expense_date <= $4
|
||||
ORDER BY expense_date DESC
|
||||
parameters:
|
||||
- name: user_id
|
||||
type: string
|
||||
description: The user's UUID
|
||||
- name: category
|
||||
type: string
|
||||
description: Expense category (e.g., "Food", "Transport")
|
||||
- name: start_date
|
||||
type: string
|
||||
description: Start date in YYYY-MM-DD format
|
||||
- name: end_date
|
||||
type: string
|
||||
description: End date in YYYY-MM-DD format
|
||||
```
|
||||
|
||||
## Template Parameters
|
||||
|
||||
Template parameters enable dynamic query construction by replacing placeholders in the SQL statement before parameter binding. This is useful for dynamic table names, column names, or query structure.
|
||||
|
||||
### Example with Template Parameters
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
get_column_data:
|
||||
type: cockroachdb-sql
|
||||
source: my_cockroachdb
|
||||
description: Get data from a specific column
|
||||
statement: |
|
||||
SELECT {{column_name}}
|
||||
FROM {{table_name}}
|
||||
WHERE user_id = $1
|
||||
LIMIT 100
|
||||
templateParameters:
|
||||
- name: table_name
|
||||
type: string
|
||||
description: The table to query
|
||||
- name: column_name
|
||||
type: string
|
||||
description: The column to retrieve
|
||||
parameters:
|
||||
- name: user_id
|
||||
type: string
|
||||
description: The user's UUID
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Use UUID Primary Keys
|
||||
|
||||
CockroachDB performs best with UUID primary keys to avoid transaction hotspots:
|
||||
|
||||
```sql
|
||||
CREATE TABLE orders (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
user_id UUID NOT NULL,
|
||||
order_date TIMESTAMP DEFAULT now(),
|
||||
total_amount DECIMAL(10,2)
|
||||
);
|
||||
```
|
||||
|
||||
### Use Indexes for Performance
|
||||
|
||||
Create indexes on frequently queried columns:
|
||||
|
||||
```sql
|
||||
CREATE INDEX idx_orders_user_id ON orders(user_id);
|
||||
CREATE INDEX idx_orders_date ON orders(order_date DESC);
|
||||
```
|
||||
|
||||
### Use JOINs Efficiently
|
||||
|
||||
CockroachDB supports standard SQL JOINs. Keep joins efficient by:
|
||||
- Adding appropriate indexes
|
||||
- Using UUIDs for foreign keys
|
||||
- Limiting result sets with WHERE clauses
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
get_user_with_orders:
|
||||
type: cockroachdb-sql
|
||||
source: my_cockroachdb
|
||||
description: Get user details with their recent orders
|
||||
statement: |
|
||||
SELECT u.name, u.email, o.id as order_id, o.order_date, o.total_amount
|
||||
FROM users u
|
||||
LEFT JOIN orders o ON u.id = o.user_id
|
||||
WHERE u.id = $1
|
||||
ORDER BY o.order_date DESC
|
||||
LIMIT 10
|
||||
parameters:
|
||||
- name: user_id
|
||||
type: string
|
||||
description: The user's UUID
|
||||
```
|
||||
|
||||
### Handle NULL Values
|
||||
|
||||
Use COALESCE or NULL checks when dealing with nullable columns:
|
||||
|
||||
```sql
|
||||
SELECT id, description, COALESCE(notes, 'No notes') as notes
|
||||
FROM expenses
|
||||
WHERE user_id = $1
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
The tool automatically handles:
|
||||
- **Connection errors**: Retried with exponential backoff
|
||||
- **Serialization conflicts**: Automatically retried using cockroach-go library
|
||||
- **Invalid parameters**: Returns descriptive error messages
|
||||
- **SQL syntax errors**: Returns database error details
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Aggregations
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
expense_summary:
|
||||
type: cockroachdb-sql
|
||||
source: my_cockroachdb
|
||||
description: Get expense summary by category for a user
|
||||
statement: |
|
||||
SELECT
|
||||
category,
|
||||
COUNT(*) as count,
|
||||
SUM(amount) as total_amount,
|
||||
AVG(amount) as avg_amount
|
||||
FROM expenses
|
||||
WHERE user_id = $1
|
||||
AND expense_date >= $2
|
||||
GROUP BY category
|
||||
ORDER BY total_amount DESC
|
||||
parameters:
|
||||
- name: user_id
|
||||
type: string
|
||||
description: The user's UUID
|
||||
- name: start_date
|
||||
type: string
|
||||
description: Start date in YYYY-MM-DD format
|
||||
```
|
||||
|
||||
### Window Functions
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
running_total:
|
||||
type: cockroachdb-sql
|
||||
source: my_cockroachdb
|
||||
description: Get running total of expenses
|
||||
statement: |
|
||||
SELECT
|
||||
expense_date,
|
||||
amount,
|
||||
SUM(amount) OVER (ORDER BY expense_date) as running_total
|
||||
FROM expenses
|
||||
WHERE user_id = $1
|
||||
ORDER BY expense_date
|
||||
parameters:
|
||||
- name: user_id
|
||||
type: string
|
||||
description: The user's UUID
|
||||
```
|
||||
|
||||
### Common Table Expressions (CTEs)
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
top_spenders:
|
||||
type: cockroachdb-sql
|
||||
source: my_cockroachdb
|
||||
description: Find top spending users
|
||||
statement: |
|
||||
WITH user_totals AS (
|
||||
SELECT
|
||||
user_id,
|
||||
SUM(amount) as total_spent
|
||||
FROM expenses
|
||||
WHERE expense_date >= $1
|
||||
GROUP BY user_id
|
||||
)
|
||||
SELECT
|
||||
u.name,
|
||||
u.email,
|
||||
ut.total_spent
|
||||
FROM user_totals ut
|
||||
JOIN users u ON ut.user_id = u.id
|
||||
ORDER BY ut.total_spent DESC
|
||||
LIMIT 10
|
||||
parameters:
|
||||
- name: start_date
|
||||
type: string
|
||||
description: Start date in YYYY-MM-DD format
|
||||
```
|
||||
|
||||
## See Also
|
||||
|
||||
- [cockroachdb-execute-sql](./cockroachdb-execute-sql.md) - For ad-hoc SQL execution
|
||||
- [cockroachdb-list-tables](./cockroachdb-list-tables.md) - List tables in the database
|
||||
- [cockroachdb-list-schemas](./cockroachdb-list-schemas.md) - List database schemas
|
||||
- [CockroachDB Source](../../sources/cockroachdb.md) - Source configuration reference
|
||||
47
docs/en/resources/tools/looker/looker-validate-project.md
Normal file
47
docs/en/resources/tools/looker/looker-validate-project.md
Normal file
@@ -0,0 +1,47 @@
|
||||
---
|
||||
title: "looker-validate-project"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "looker-validate-project" tool checks the syntax of a LookML project and reports any errors
|
||||
aliases:
|
||||
- /resources/tools/looker-validate-project
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A "looker-validate-project" tool checks the syntax of a LookML project and reports any errors
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
`looker-validate-project` accepts a project_id parameter.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
validate_project:
|
||||
kind: looker-validate-project
|
||||
source: looker-source
|
||||
description: |
|
||||
This tool checks a LookML project for syntax errors.
|
||||
|
||||
Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first.
|
||||
|
||||
Parameters:
|
||||
- project_id (required): The unique ID of the LookML project.
|
||||
|
||||
Output:
|
||||
A list of error details including the file path and line number, and also a list of models
|
||||
that are not currently valid due to LookML errors.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|----------------------------------------------------|
|
||||
| kind | string | true | Must be "looker-validate-project". |
|
||||
| source | string | true | Name of the source Looker instance. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -771,7 +771,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.26.0\" # x-release-please-version\n",
|
||||
"version = \"0.27.0\" # x-release-please-version\n",
|
||||
"! curl -L -o /content/toolbox https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -123,7 +123,7 @@ In this section, we will download and install the Toolbox binary.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
export VERSION="0.26.0"
|
||||
export VERSION="0.27.0"
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -220,7 +220,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.26.0\" # x-release-please-version\n",
|
||||
"version = \"0.27.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -179,7 +179,7 @@ to use BigQuery, and then run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
54
docs/en/samples/pre_post_processing/_index.md
Normal file
54
docs/en/samples/pre_post_processing/_index.md
Normal file
@@ -0,0 +1,54 @@
|
||||
---
|
||||
title: "Pre- and Post- Processing"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Intercept and modify interactions between the agent and its tools either before or after a tool is executed.
|
||||
---
|
||||
|
||||
Pre- and post- processing allow developers to intercept and modify interactions between the agent and its tools or the user.
|
||||
|
||||
{{< notice note >}}
|
||||
|
||||
These capabilities are typically features of **orchestration frameworks** (like LangChain, LangGraph, or Agent Builder) rather than the Toolbox SDK itself. However, Toolbox tools are designed to fully leverage these framework capabilities to support robust, secure, and compliant agent architectures.
|
||||
|
||||
{{< /notice >}}
|
||||
|
||||
## Types of Processing
|
||||
|
||||
### Pre-processing
|
||||
|
||||
Pre-processing occurs before a tool is executed or an agent processes a message. Key types include:
|
||||
|
||||
- **Input Sanitization & Redaction**: Detecting and masking sensitive information (like PII) in user queries or tool arguments to prevent it from being logged or sent to unauthorized systems.
|
||||
- **Business Logic Validation**: Verifying that the proposed action complies with business rules (e.g., ensuring a requested hotel stay does not exceed 14 days, or checking if a user has sufficient permission).
|
||||
- **Security Guardrails**: Analyzing inputs for potential prompt injection attacks or malicious payloads.
|
||||
|
||||
### Post-processing
|
||||
|
||||
Post-processing occurs after a tool has executed or the model has generated a response. Key types include:
|
||||
|
||||
- **Response Enrichment**: Injecting additional data into the tool output that wasn't part of the raw API response (e.g., calculating loyalty points earned based on the booking value).
|
||||
- **Output Formatting**: Transforming raw data (like JSON or XML) into a more human-readable or model-friendly format to improve the agent's understanding.
|
||||
- **Compliance Auditing**: Logging the final outcome of transactions, including the original request and the result, to a secure audit trail.
|
||||
|
||||
## Processing Scopes
|
||||
|
||||
While processing logic can be applied at various levels (Agent, Model, Tool), this guide primarily focuses on **Tool Level** processing, which is most relevant for granular control over tool execution.
|
||||
|
||||
### Tool Level (Primary Focus)
|
||||
|
||||
Wraps individual tool executions. This is best for logic specific to a single tool or a set of tools.
|
||||
|
||||
- **Scope**: Intercepts the raw inputs (arguments) to a tool and its outputs.
|
||||
- **Use Cases**: Argument validation, output formatting, specific privacy rules for sensitive tools.
|
||||
|
||||
### Other Levels
|
||||
|
||||
It is helpful to understand how tool-level processing differs from other scopes:
|
||||
|
||||
- **Model Level**: Intercepts individual calls to the LLM (prompts and responses). Unlike tool-level, this applies globally to all text sent/received, making it better for global PII redaction or token tracking.
|
||||
- **Agent Level**: Wraps the high-level execution loop (e.g., a "turn" in the conversation). Unlike tool-level, this envelopes the entire turn (user input to final response), making it suitable for session management or end-to-end auditing.
|
||||
|
||||
|
||||
## Samples
|
||||
40
docs/en/samples/pre_post_processing/python.md
Normal file
40
docs/en/samples/pre_post_processing/python.md
Normal file
@@ -0,0 +1,40 @@
|
||||
---
|
||||
title: "Python"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
How to add pre- and post- processing to your Agents using Python.
|
||||
---
|
||||
|
||||
## Prerequisites
|
||||
|
||||
This tutorial assumes that you have set up Toolbox with a basic agent as described in the [local quickstart](../../getting-started/local_quickstart.md).
|
||||
|
||||
This guide demonstrates how to implement these patterns in your Toolbox applications.
|
||||
|
||||
## Implementation
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{% tab header="ADK" text=true %}}
|
||||
Coming soon.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Langchain" text=true %}}
|
||||
The following example demonstrates how to use `ToolboxClient` with LangChain's middleware to implement pre- and post- processing for tool calls.
|
||||
|
||||
```py
|
||||
{{< include "python/langchain/agent.py" >}}
|
||||
```
|
||||
|
||||
You can also add model-level (`wrap_model`) and agent-level (`before_agent`, `after_agent`) hooks to intercept messages at different stages of the execution loop. See the [LangChain Middleware documentation](https://docs.langchain.com/oss/python/langchain/middleware/custom#wrap-style-hooks) for details on these additional hook types.
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Results
|
||||
|
||||
The output should look similar to the following. Note that exact responses may vary due to the non-deterministic nature of LLMs and differences between orchestration frameworks.
|
||||
|
||||
```
|
||||
AI: Booking Confirmed! You earned 500 Loyalty Points with this stay.
|
||||
|
||||
AI: Error: Maximum stay duration is 14 days.
|
||||
```
|
||||
19
docs/en/samples/pre_post_processing/python/__init__.py
Normal file
19
docs/en/samples/pre_post_processing/python/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Copyright 2026 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
# This file makes the 'pre_post_processing/python' directory a Python package.
|
||||
|
||||
# You can include any package-level initialization logic here if needed.
|
||||
# For now, this file is empty.
|
||||
51
docs/en/samples/pre_post_processing/python/agent_test.py
Normal file
51
docs/en/samples/pre_post_processing/python/agent_test.py
Normal file
@@ -0,0 +1,51 @@
|
||||
# Copyright 2026 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import asyncio
|
||||
import importlib
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
ORCH_NAME = os.environ.get("ORCH_NAME")
|
||||
module_path = f"python.{ORCH_NAME}.agent"
|
||||
agent = importlib.import_module(module_path)
|
||||
|
||||
GOLDEN_KEYWORDS = [
|
||||
"AI:",
|
||||
"Loyalty Points",
|
||||
"POLICY CHECK: Intercepting 'update-hotel'",
|
||||
]
|
||||
|
||||
# --- Execution Tests ---
|
||||
class TestExecution:
|
||||
"""Test framework execution and output validation."""
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def script_output(self, capsys):
|
||||
"""Run the agent function and return its output."""
|
||||
asyncio.run(agent.main())
|
||||
return capsys.readouterr()
|
||||
|
||||
def test_script_runs_without_errors(self, script_output):
|
||||
"""Test that the script runs and produces no stderr."""
|
||||
assert script_output.err == "", f"Script produced stderr: {script_output.err}"
|
||||
|
||||
def test_keywords_in_output(self, script_output):
|
||||
"""Test that expected keywords are present in the script's output."""
|
||||
output = script_output.out
|
||||
print(f"\nAgent Output:\n{output}\n")
|
||||
missing_keywords = [kw for kw in GOLDEN_KEYWORDS if kw not in output]
|
||||
assert not missing_keywords, f"Missing keywords in output: {missing_keywords}"
|
||||
116
docs/en/samples/pre_post_processing/python/langchain/agent.py
Normal file
116
docs/en/samples/pre_post_processing/python/langchain/agent.py
Normal file
@@ -0,0 +1,116 @@
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
|
||||
from langchain.agents import create_agent
|
||||
from langchain.agents.middleware import wrap_tool_call
|
||||
from langchain_core.messages import ToolMessage
|
||||
from langchain_google_vertexai import ChatVertexAI
|
||||
from toolbox_langchain import ToolboxClient
|
||||
|
||||
system_prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
|
||||
# Pre processing
|
||||
@wrap_tool_call
|
||||
async def enforce_business_rules(request, handler):
|
||||
"""
|
||||
Business Logic Validation:
|
||||
Enforces max stay duration (e.g., max 14 days).
|
||||
"""
|
||||
tool_call = request.tool_call
|
||||
name = tool_call["name"]
|
||||
args = tool_call["args"]
|
||||
|
||||
print(f"POLICY CHECK: Intercepting '{name}'")
|
||||
|
||||
if name == "update-hotel":
|
||||
if "checkin_date" in args and "checkout_date" in args:
|
||||
try:
|
||||
start = datetime.fromisoformat(args["checkin_date"])
|
||||
end = datetime.fromisoformat(args["checkout_date"])
|
||||
duration = (end - start).days
|
||||
|
||||
if duration > 14:
|
||||
print("BLOCKED: Stay too long")
|
||||
return ToolMessage(
|
||||
content="Error: Maximum stay duration is 14 days.",
|
||||
tool_call_id=tool_call["id"],
|
||||
)
|
||||
except ValueError:
|
||||
pass # Ignore invalid date formats
|
||||
|
||||
# PRE: Code here runs BEFORE the tool execution
|
||||
|
||||
# EXEC: Execute the tool (or next middleware)
|
||||
result = await handler(request)
|
||||
|
||||
# POST: Code here runs AFTER the tool execution
|
||||
return result
|
||||
|
||||
|
||||
# Post processing
|
||||
@wrap_tool_call
|
||||
async def enrich_response(request, handler):
|
||||
"""
|
||||
Post-Processing & Enrichment:
|
||||
Adds loyalty points information to successful bookings.
|
||||
Standardizes output format.
|
||||
"""
|
||||
# PRE: Code here runs BEFORE the tool execution
|
||||
|
||||
# EXEC: Execute the tool (or next middleware)
|
||||
result = await handler(request)
|
||||
|
||||
# POST: Code here runs AFTER the tool execution
|
||||
if isinstance(result, ToolMessage):
|
||||
content = str(result.content)
|
||||
tool_name = request.tool_call["name"]
|
||||
|
||||
if tool_name == "book-hotel" and "Error" not in content:
|
||||
loyalty_bonus = 500
|
||||
result.content = f"Booking Confirmed!\n You earned {loyalty_bonus} Loyalty Points with this stay.\n\nSystem Details: {content}"
|
||||
|
||||
return result
|
||||
|
||||
|
||||
async def main():
|
||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||
tools = await client.aload_toolset("my-toolset")
|
||||
model = ChatVertexAI(model="gemini-2.5-flash")
|
||||
agent = create_agent(
|
||||
system_prompt=system_prompt,
|
||||
model=model,
|
||||
tools=tools,
|
||||
# add any pre and post processing methods
|
||||
middleware=[enforce_business_rules, enrich_response],
|
||||
)
|
||||
|
||||
user_input = "Book hotel with id 3."
|
||||
response = await agent.ainvoke(
|
||||
{"messages": [{"role": "user", "content": user_input}]}
|
||||
)
|
||||
|
||||
print("-" * 50)
|
||||
last_ai_msg = response["messages"][-1].content
|
||||
print(f"AI: {last_ai_msg}")
|
||||
|
||||
# Test Pre-processing
|
||||
print("-" * 50)
|
||||
user_input = "Update my hotel with id 3 with checkin date 2025-01-18 and checkout date 2025-01-20"
|
||||
response = await agent.ainvoke(
|
||||
{"messages": [{"role": "user", "content": user_input}]}
|
||||
)
|
||||
last_ai_msg = response["messages"][-1].content
|
||||
print(f"AI: {last_ai_msg}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
@@ -0,0 +1,3 @@
|
||||
langchain==1.2.6
|
||||
langchain-google-vertexai==3.2.2
|
||||
toolbox-langchain==0.5.8
|
||||
Reference in New Issue
Block a user