mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-02-13 08:35:15 -05:00
Compare commits
44 Commits
adk-docs
...
pr/dumians
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
251ef22839 | ||
|
|
8dc4bd7dd6 | ||
|
|
a00d0edcd5 | ||
|
|
d1eb1799a0 | ||
|
|
2b81d6099a | ||
|
|
53865b6e21 | ||
|
|
6843b46328 | ||
|
|
05152f732d | ||
|
|
a48101b3c5 | ||
|
|
418d6d791e | ||
|
|
452d686750 | ||
|
|
8fb74263a7 | ||
|
|
09f3bc7959 | ||
|
|
7970b8787e | ||
|
|
0b7a86ae58 | ||
|
|
0797142103 | ||
|
|
f26750e834 | ||
|
|
c46d7d6fa0 | ||
|
|
5e2034d146 | ||
|
|
e2272ccdbc | ||
|
|
97f68129f5 | ||
|
|
fea96fed03 | ||
|
|
195767bdcd | ||
|
|
c5524d32f5 | ||
|
|
e1739abd81 | ||
|
|
478a0bdb59 | ||
|
|
2d341acaa6 | ||
|
|
f032389a07 | ||
|
|
32610d71a3 | ||
|
|
32cb4db712 | ||
|
|
1fdd99a9b6 | ||
|
|
1f8019c50a | ||
|
|
6e8255476a | ||
|
|
4fb5b34a5a | ||
|
|
1664a69dfd | ||
|
|
7f88caa985 | ||
|
|
3c9365dfd9 | ||
|
|
7767e1e019 | ||
|
|
bb40cdf78c | ||
|
|
a15a12873f | ||
|
|
5e4f2a131f | ||
|
|
80ef346214 | ||
|
|
732eaed41d | ||
|
|
a2097ba8eb |
@@ -354,6 +354,30 @@ steps:
|
||||
postgressql \
|
||||
postgresexecutesql
|
||||
|
||||
- id: "cockroachdb"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "COCKROACHDB_DATABASE=$_DATABASE_NAME"
|
||||
- "COCKROACHDB_PORT=$_COCKROACHDB_PORT"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["COCKROACHDB_USER", "COCKROACHDB_HOST","CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"CockroachDB" \
|
||||
cockroachdb \
|
||||
cockroachdbsql \
|
||||
cockroachdbexecutesql \
|
||||
cockroachdblisttables \
|
||||
cockroachdblistschemas
|
||||
|
||||
- id: "spanner"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
@@ -919,7 +943,7 @@ steps:
|
||||
# Install the C compiler and Oracle SDK headers needed for cgo
|
||||
dnf install -y gcc oracle-instantclient-devel
|
||||
# Install Go
|
||||
curl -L -o go.tar.gz "https://go.dev/dl/go1.25.1.linux-amd64.tar.gz"
|
||||
curl -L -o go.tar.gz "https://go.dev/dl/go1.25.5.linux-amd64.tar.gz"
|
||||
tar -C /usr/local -xzf go.tar.gz
|
||||
export PATH="/usr/local/go/bin:$$PATH"
|
||||
|
||||
@@ -1129,6 +1153,11 @@ availableSecrets:
|
||||
env: MARIADB_HOST
|
||||
- versionName: projects/$PROJECT_ID/secrets/mongodb_uri/versions/latest
|
||||
env: MONGODB_URI
|
||||
- versionName: projects/$PROJECT_ID/secrets/cockroachdb_user/versions/latest
|
||||
env: COCKROACHDB_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/cockroachdb_host/versions/latest
|
||||
env: COCKROACHDB_HOST
|
||||
|
||||
|
||||
options:
|
||||
logging: CLOUD_LOGGING_ONLY
|
||||
@@ -1189,6 +1218,9 @@ substitutions:
|
||||
_SINGLESTORE_PORT: "3308"
|
||||
_SINGLESTORE_DATABASE: "singlestore"
|
||||
_SINGLESTORE_USER: "root"
|
||||
_COCKROACHDB_HOST: 127.0.0.1
|
||||
_COCKROACHDB_PORT: "26257"
|
||||
_COCKROACHDB_USER: "root"
|
||||
_MARIADB_PORT: "3307"
|
||||
_MARIADB_DATABASE: test_database
|
||||
_SNOWFLAKE_DATABASE: "test"
|
||||
|
||||
@@ -23,8 +23,8 @@ steps:
|
||||
- |
|
||||
set -ex
|
||||
export VERSION=$(cat ./cmd/version.txt)
|
||||
chmod +x .ci/sample_tests/run_py_tests.sh
|
||||
.ci/sample_tests/run_py_tests.sh
|
||||
chmod +x .ci/sample_tests/run_tests.sh
|
||||
.ci/sample_tests/run_tests.sh
|
||||
env:
|
||||
- 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}'
|
||||
- 'GCP_PROJECT=${_GCP_PROJECT}'
|
||||
|
||||
1
.github/release-please.yml
vendored
1
.github/release-please.yml
vendored
@@ -37,6 +37,7 @@ extraFiles: [
|
||||
"docs/en/how-to/connect-ide/postgres_mcp.md",
|
||||
"docs/en/how-to/connect-ide/neo4j_mcp.md",
|
||||
"docs/en/how-to/connect-ide/sqlite_mcp.md",
|
||||
"docs/en/how-to/connect-ide/oracle_mcp.md",
|
||||
"gemini-extension.json",
|
||||
{
|
||||
"type": "json",
|
||||
|
||||
4
.github/workflows/deploy_dev_docs.yaml
vendored
4
.github/workflows/deploy_dev_docs.yaml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
group: docs-deployment
|
||||
cancel-in-progress: false
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
|
||||
|
||||
@@ -56,7 +56,7 @@ jobs:
|
||||
node-version: "22"
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5
|
||||
with:
|
||||
path: ~/.npm
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
|
||||
@@ -30,14 +30,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout main branch (for latest templates and theme)
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
ref: 'main'
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Checkout old content from tag into a temporary directory
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
ref: ${{ github.event.inputs.version_tag }}
|
||||
path: 'old_version_source' # Checkout into a temp subdir
|
||||
|
||||
2
.github/workflows/deploy_versioned_docs.yaml
vendored
2
.github/workflows/deploy_versioned_docs.yaml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
cancel-in-progress: false
|
||||
steps:
|
||||
- name: Checkout Code at Tag
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
ref: ${{ github.event.release.tag_name }}
|
||||
|
||||
|
||||
2
.github/workflows/docs_preview_clean.yaml
vendored
2
.github/workflows/docs_preview_clean.yaml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
group: "preview-${{ github.event.number }}"
|
||||
cancel-in-progress: true
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
ref: versioned-gh-pages
|
||||
|
||||
|
||||
4
.github/workflows/docs_preview_deploy.yaml
vendored
4
.github/workflows/docs_preview_deploy.yaml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
group: "preview-${{ github.event.number }}"
|
||||
cancel-in-progress: true
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
# Checkout the PR's HEAD commit (supports forks).
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
node-version: "22"
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5
|
||||
with:
|
||||
path: ~/.npm
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
|
||||
30
.github/workflows/link_checker_workflow.yaml
vendored
30
.github/workflows/link_checker_workflow.yaml
vendored
@@ -22,39 +22,47 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
|
||||
- name: Restore lychee cache
|
||||
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5
|
||||
with:
|
||||
path: .lycheecache
|
||||
key: cache-lychee-${{ github.sha }}
|
||||
restore-keys: cache-lychee-
|
||||
|
||||
- name: Link Checker
|
||||
id: lychee-check
|
||||
uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 # v2
|
||||
continue-on-error: true
|
||||
with:
|
||||
args: >
|
||||
--verbose
|
||||
--quiet
|
||||
--no-progress
|
||||
--cache
|
||||
--max-cache-age 1d
|
||||
--exclude '^neo4j\+.*' --exclude '^bolt://.*'
|
||||
README.md
|
||||
docs/
|
||||
output: /tmp/foo.txt
|
||||
fail: true
|
||||
jobSummary: true
|
||||
debug: true
|
||||
output: lychee-report.md
|
||||
format: markdown
|
||||
fail: true
|
||||
jobSummary: false
|
||||
debug: false
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# This step only runs if the 'lychee_check' step fails, ensuring the
|
||||
# context note only appears when the developer needs to troubleshoot.
|
||||
- name: Display Link Context Note on Failure
|
||||
if: ${{ failure() }}
|
||||
|
||||
- name: Display Failure Report
|
||||
# Run this ONLY if the link checker failed
|
||||
if: steps.lychee-check.outcome == 'failure'
|
||||
run: |
|
||||
echo "## Link Resolution Note" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Local links and directory changes work differently on GitHub than on the docsite." >> $GITHUB_STEP_SUMMARY
|
||||
echo "You must ensure fixes pass the **GitHub check** and also work with **\`hugo server\`**." >> $GITHUB_STEP_SUMMARY
|
||||
echo "See [Link Checking and Fixing with Lychee](https://github.com/googleapis/genai-toolbox/blob/main/DEVELOPER.md#link-checking-and-fixing-with-lychee) for more details." >> $GITHUB_STEP_SUMMARY
|
||||
echo "---" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
echo "### Broken Links Found" >> $GITHUB_STEP_SUMMARY
|
||||
cat ./lychee-report.md >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
exit 1
|
||||
|
||||
2
.github/workflows/publish-mcp.yml
vendored
2
.github/workflows/publish-mcp.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
|
||||
- name: Wait for image in Artifact Registry
|
||||
shell: bash
|
||||
|
||||
@@ -51,6 +51,10 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
|
||||
# Add a new version block here before every release
|
||||
# The order of versions in this file is mirrored into the dropdown
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.27.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.27.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.26.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.26.0/"
|
||||
|
||||
@@ -23,8 +23,7 @@ https://cloud.dgraph.io/login
|
||||
https://dgraph.io/docs
|
||||
|
||||
# MySQL Community downloads and main site (often protected by bot mitigation)
|
||||
https://dev.mysql.com/downloads/installer/
|
||||
https://www.mysql.com/
|
||||
^https?://(.*\.)?mysql\.com/.*
|
||||
|
||||
# Claude desktop download link
|
||||
https://claude.ai/download
|
||||
@@ -37,9 +36,9 @@ https://dev.mysql.com/doc/refman/8.4/en/sql-prepared-statements.html
|
||||
https://dev.mysql.com/doc/refman/8.4/en/user-names.html
|
||||
|
||||
# npmjs links can occasionally trigger rate limiting during high-frequency CI builds
|
||||
https://www.npmjs.com/package/@toolbox-sdk/core
|
||||
https://www.npmjs.com/package/@toolbox-sdk/adk
|
||||
^https?://(www\.)?npmjs\.com/.*
|
||||
|
||||
https://www.oceanbase.com/
|
||||
|
||||
# Ignore social media and blog profiles to reduce external request overhead
|
||||
https://medium.com/@mcp_toolbox
|
||||
https://medium.com/@mcp_toolbox
|
||||
26
CHANGELOG.md
26
CHANGELOG.md
@@ -1,5 +1,31 @@
|
||||
# Changelog
|
||||
|
||||
## [0.27.0](https://github.com/googleapis/genai-toolbox/compare/v0.26.0...v0.27.0) (2026-02-12)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* Update configuration file v2 ([#2369](https://github.com/googleapis/genai-toolbox/issues/2369))([293c1d6](https://github.com/googleapis/genai-toolbox/commit/293c1d6889c39807855ba5e01d4c13ba2a4c50ce))
|
||||
* Update/add detailed telemetry for mcp endpoint compliant with OTEL semantic convention ([#1987](https://github.com/googleapis/genai-toolbox/issues/1987)) ([478a0bd](https://github.com/googleapis/genai-toolbox/commit/478a0bdb59288c1213f83862f95a698b4c2c0aab))
|
||||
|
||||
### Features
|
||||
|
||||
* **cli/invoke:** Add support for direct tool invocation from CLI ([#2353](https://github.com/googleapis/genai-toolbox/issues/2353)) ([6e49ba4](https://github.com/googleapis/genai-toolbox/commit/6e49ba436ef2390c13feaf902b29f5907acffb57))
|
||||
* **cli/skills:** Add support for generating agent skills from toolset ([#2392](https://github.com/googleapis/genai-toolbox/issues/2392)) ([80ef346](https://github.com/googleapis/genai-toolbox/commit/80ef34621453b77bdf6a6016c354f102a17ada04))
|
||||
* **cloud-logging-admin:** Add source, tools, integration test and docs ([#2137](https://github.com/googleapis/genai-toolbox/issues/2137)) ([252fc30](https://github.com/googleapis/genai-toolbox/commit/252fc3091af10d25d8d7af7e047b5ac87a5dd041))
|
||||
* **cockroachdb:** Add CockroachDB integration with cockroach-go ([#2006](https://github.com/googleapis/genai-toolbox/issues/2006)) ([1fdd99a](https://github.com/googleapis/genai-toolbox/commit/1fdd99a9b609a5e906acce414226ff44d75d5975))
|
||||
* **prebuiltconfigs/alloydb-omni:** Implement Alloydb omni dataplane tools ([#2340](https://github.com/googleapis/genai-toolbox/issues/2340)) ([e995349](https://github.com/googleapis/genai-toolbox/commit/e995349ea0756c700d188b8f04e9459121219f0c))
|
||||
* **server:** Add Tool call error categories ([#2387](https://github.com/googleapis/genai-toolbox/issues/2387)) ([32cb4db](https://github.com/googleapis/genai-toolbox/commit/32cb4db712d27579c1bf29e61cbd0bed02286c28))
|
||||
* **tools/looker:** support `looker-validate-project` tool ([#2430](https://github.com/googleapis/genai-toolbox/issues/2430)) ([a15a128](https://github.com/googleapis/genai-toolbox/commit/a15a12873f936b0102aeb9500cc3bcd71bb38c34))
|
||||
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **dataplex:** Capture GCP HTTP errors in MCP Toolbox ([#2347](https://github.com/googleapis/genai-toolbox/issues/2347)) ([1d7c498](https://github.com/googleapis/genai-toolbox/commit/1d7c4981164c34b4d7bc8edecfd449f57ad11e15))
|
||||
* **sources/cockroachdb:** Update kind to type ([#2465](https://github.com/googleapis/genai-toolbox/issues/2465)) ([2d341ac](https://github.com/googleapis/genai-toolbox/commit/2d341acaa61c3c1fe908fceee8afbd90fb646d3a))
|
||||
* Surface Dataplex API errors in MCP results ([#2347](https://github.com/googleapis/genai-toolbox/pull/2347))([1d7c498](https://github.com/googleapis/genai-toolbox/commit/1d7c4981164c34b4d7bc8edecfd449f57ad11e15))
|
||||
|
||||
## [0.26.0](https://github.com/googleapis/genai-toolbox/compare/v0.25.0...v0.26.0) (2026-01-22)
|
||||
|
||||
|
||||
|
||||
14
README.md
14
README.md
@@ -142,7 +142,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.26.0
|
||||
> export VERSION=0.27.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -155,7 +155,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.26.0
|
||||
> export VERSION=0.27.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -168,7 +168,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.26.0
|
||||
> export VERSION=0.27.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -181,7 +181,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```cmd
|
||||
> :: see releases page for other versions
|
||||
> set VERSION=0.26.0
|
||||
> set VERSION=0.27.0
|
||||
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
> ```
|
||||
>
|
||||
@@ -193,7 +193,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```powershell
|
||||
> # see releases page for other versions
|
||||
> $VERSION = "0.26.0"
|
||||
> $VERSION = "0.27.0"
|
||||
> curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||
> ```
|
||||
>
|
||||
@@ -206,7 +206,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.26.0
|
||||
export VERSION=0.27.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -230,7 +230,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.26.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.27.0
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
257
cmd/internal/imports.go
Normal file
257
cmd/internal/imports.go
Normal file
@@ -0,0 +1,257 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package internal
|
||||
|
||||
import (
|
||||
// Import prompt packages for side effect of registration
|
||||
_ "github.com/googleapis/genai-toolbox/internal/prompts/custom"
|
||||
|
||||
// Import tool packages for side effect of registration
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreatecluster"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreateinstance"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreateuser"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetcluster"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetinstance"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetuser"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistclusters"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistinstances"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistusers"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbwaitforoperation"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydbainl"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryanalyzecontribution"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryconversationalanalytics"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryforecast"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygetdatasetinfo"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygettableinfo"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylistdatasetids"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylisttableids"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysearchcatalog"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigtable"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cassandra/cassandracql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouseexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselistdatabases"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhousesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudgda"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirfetchpage"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatienteverything"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatientsearch"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdataset"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdicomstore"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdicomstoremetrics"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirresource"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirstore"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirstoremetrics"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarelistdicomstores"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarelistfhirstores"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcareretrieverendereddicominstance"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicominstances"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomseries"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomstudies"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminlistlognames"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminlistresourcetypes"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminquerylogs"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcloneinstance"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatebackup"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatedatabase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreateusers"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistdatabases"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistinstances"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlrestorebackup"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmssql/cloudsqlmssqlcreateinstance"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmysql/cloudsqlmysqlcreateinstance"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlpg/cloudsqlpgcreateinstances"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlpg/cloudsqlpgupgradeprecheck"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cockroachdb/cockroachdbexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cockroachdb/cockroachdblistschemas"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cockroachdb/cockroachdblisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cockroachdb/cockroachdbsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/couchbase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataform/dataformcompilelocal"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexlookupentry"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchaspecttypes"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchentries"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dgraph"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/elasticsearch/elasticsearchesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firebird/firebirdexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firebird/firebirdsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreadddocuments"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoredeletedocuments"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetdocuments"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetrules"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorelistcollections"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequery"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequerycollection"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreupdatedocument"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/http"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardelement"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardfilter"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerconversationalanalytics"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookercreateprojectfile"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdeleteprojectfile"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdevmode"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergenerateembedurl"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiondatabases"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnections"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectionschemas"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiontablecolumns"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiontables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdashboards"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdimensions"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetexplores"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetfilters"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetlooks"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmeasures"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmodels"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetparameters"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojectfile"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojectfiles"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojects"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthanalyze"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthpulse"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthvacuum"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakedashboard"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakelook"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquery"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquerysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerqueryurl"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrundashboard"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrunlook"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerupdateprojectfile"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookervalidateproject"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mindsdb/mindsdbexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mindsdb/mindsdbsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbaggregate"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeletemany"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeleteone"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfind"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfindone"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertmany"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertone"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdatemany"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdateone"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqllisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlgetqueryplan"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllistactivequeries"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttablefragmentation"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttablesmissinguniqueindexes"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jcypher"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jexecutecypher"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbaseexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbasesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/oracle/oracleexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/oracle/oraclesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresdatabaseoverview"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresgetcolumncardinality"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistactivequeries"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistavailableextensions"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistdatabasestats"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistindexes"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistinstalledextensions"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistlocks"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpgsettings"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpublicationtables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistquerystats"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistroles"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistschemas"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistsequences"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresliststoredprocedure"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablespaces"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablestats"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttriggers"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistviews"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslongrunningtransactions"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresreplicationstats"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/redis"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcancelbatch"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatepysparkbatch"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatesparkbatch"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkgetbatch"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparklistbatches"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoreexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoresql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakeexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlistgraphs"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannersql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqliteexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinoexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinosql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/wait"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/valkey"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/yugabytedbsql"
|
||||
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/alloydbadmin"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/alloydbpg"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/bigtable"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cassandra"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/clickhouse"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudgda"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudhealthcare"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudloggingadmin"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudmonitoring"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmssql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cockroachdb"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/couchbase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/dataplex"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/dgraph"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/elasticsearch"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/firebird"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/firestore"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/http"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/looker"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mindsdb"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mongodb"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mssql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/neo4j"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/oceanbase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/oracle"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/postgres"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/redis"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/serverlessspark"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/singlestore"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/snowflake"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/spanner"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/tidb"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/trino"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/valkey"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/yugabytedb"
|
||||
)
|
||||
@@ -18,37 +18,15 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/log"
|
||||
"github.com/googleapis/genai-toolbox/cmd/internal"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// RootCommand defines the interface for required by invoke subcommand.
|
||||
// This allows subcommands to access shared resources and functionality without
|
||||
// direct coupling to the root command's implementation.
|
||||
type RootCommand interface {
|
||||
// Config returns a copy of the current server configuration.
|
||||
Config() server.ServerConfig
|
||||
|
||||
// Out returns the writer used for standard output.
|
||||
Out() io.Writer
|
||||
|
||||
// LoadConfig loads and merges the configuration from files, folders, and prebuilts.
|
||||
LoadConfig(ctx context.Context) error
|
||||
|
||||
// Setup initializes the runtime environment, including logging and telemetry.
|
||||
// It returns the updated context and a shutdown function to be called when finished.
|
||||
Setup(ctx context.Context) (context.Context, func(context.Context) error, error)
|
||||
|
||||
// Logger returns the logger instance.
|
||||
Logger() log.Logger
|
||||
}
|
||||
|
||||
func NewCommand(rootCmd RootCommand) *cobra.Command {
|
||||
func NewCommand(opts *internal.ToolboxOptions) *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "invoke <tool-name> [params]",
|
||||
Short: "Execute a tool directly",
|
||||
@@ -58,17 +36,17 @@ Example:
|
||||
toolbox invoke my-tool '{"param1": "value1"}'`,
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
RunE: func(c *cobra.Command, args []string) error {
|
||||
return runInvoke(c, args, rootCmd)
|
||||
return runInvoke(c, args, opts)
|
||||
},
|
||||
}
|
||||
return cmd
|
||||
}
|
||||
|
||||
func runInvoke(cmd *cobra.Command, args []string, rootCmd RootCommand) error {
|
||||
func runInvoke(cmd *cobra.Command, args []string, opts *internal.ToolboxOptions) error {
|
||||
ctx, cancel := context.WithCancel(cmd.Context())
|
||||
defer cancel()
|
||||
|
||||
ctx, shutdown, err := rootCmd.Setup(ctx)
|
||||
ctx, shutdown, err := opts.Setup(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -76,16 +54,16 @@ func runInvoke(cmd *cobra.Command, args []string, rootCmd RootCommand) error {
|
||||
_ = shutdown(ctx)
|
||||
}()
|
||||
|
||||
// Load and merge tool configurations
|
||||
if err := rootCmd.LoadConfig(ctx); err != nil {
|
||||
_, err = opts.LoadConfig(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Initialize Resources
|
||||
sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, rootCmd.Config())
|
||||
sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, opts.Cfg)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("failed to initialize resources: %w", err)
|
||||
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
@@ -96,7 +74,7 @@ func runInvoke(cmd *cobra.Command, args []string, rootCmd RootCommand) error {
|
||||
tool, ok := resourceMgr.GetTool(toolName)
|
||||
if !ok {
|
||||
errMsg := fmt.Errorf("tool %q not found", toolName)
|
||||
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
@@ -109,7 +87,7 @@ func runInvoke(cmd *cobra.Command, args []string, rootCmd RootCommand) error {
|
||||
if paramsInput != "" {
|
||||
if err := json.Unmarshal([]byte(paramsInput), ¶ms); err != nil {
|
||||
errMsg := fmt.Errorf("params must be a valid JSON string: %w", err)
|
||||
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
}
|
||||
@@ -117,14 +95,14 @@ func runInvoke(cmd *cobra.Command, args []string, rootCmd RootCommand) error {
|
||||
parsedParams, err := parameters.ParseParams(tool.GetParameters(), params, nil)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("invalid parameters: %w", err)
|
||||
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
parsedParams, err = tool.EmbedParams(ctx, parsedParams, resourceMgr.GetEmbeddingModelMap())
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("error embedding parameters: %w", err)
|
||||
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
@@ -132,19 +110,19 @@ func runInvoke(cmd *cobra.Command, args []string, rootCmd RootCommand) error {
|
||||
requiresAuth, err := tool.RequiresClientAuthorization(resourceMgr)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("failed to check auth requirements: %w", err)
|
||||
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
if requiresAuth {
|
||||
errMsg := fmt.Errorf("client authorization is not supported")
|
||||
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
result, err := tool.Invoke(ctx, resourceMgr, parsedParams, "")
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("tool execution failed: %w", err)
|
||||
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
@@ -152,10 +130,10 @@ func runInvoke(cmd *cobra.Command, args []string, rootCmd RootCommand) error {
|
||||
output, err := json.MarshalIndent(result, "", " ")
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("failed to marshal result: %w", err)
|
||||
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
fmt.Fprintln(rootCmd.Out(), string(output))
|
||||
fmt.Fprintln(opts.IOStreams.Out, string(output))
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -12,16 +12,38 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package cmd
|
||||
package invoke
|
||||
|
||||
import (
|
||||
"context"
|
||||
"bytes"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/cmd/internal"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func invokeCommand(args []string) (string, error) {
|
||||
parentCmd := &cobra.Command{Use: "toolbox"}
|
||||
|
||||
buf := new(bytes.Buffer)
|
||||
opts := internal.NewToolboxOptions(internal.WithIOStreams(buf, buf))
|
||||
internal.PersistentFlags(parentCmd, opts)
|
||||
|
||||
cmd := NewCommand(opts)
|
||||
parentCmd.AddCommand(cmd)
|
||||
parentCmd.SetArgs(args)
|
||||
|
||||
err := parentCmd.Execute()
|
||||
return buf.String(), err
|
||||
}
|
||||
|
||||
func TestInvokeTool(t *testing.T) {
|
||||
// Create a temporary tools file
|
||||
tmpDir := t.TempDir()
|
||||
@@ -86,7 +108,7 @@ tools:
|
||||
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
_, got, err := invokeCommandWithContext(context.Background(), tc.args)
|
||||
got, err := invokeCommand(tc.args)
|
||||
if (err != nil) != tc.wantErr {
|
||||
t.Fatalf("got error %v, wantErr %v", err, tc.wantErr)
|
||||
}
|
||||
@@ -121,7 +143,7 @@ tools:
|
||||
}
|
||||
|
||||
args := []string{"invoke", "bq-tool", "--tools-file", toolsFilePath}
|
||||
_, _, err := invokeCommandWithContext(context.Background(), args)
|
||||
_, err := invokeCommand(args)
|
||||
if err == nil {
|
||||
t.Fatal("expected error for tool requiring client auth, but got nil")
|
||||
}
|
||||
251
cmd/internal/options.go
Normal file
251
cmd/internal/options.go
Normal file
@@ -0,0 +1,251 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package internal
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/log"
|
||||
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/telemetry"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
)
|
||||
|
||||
type IOStreams struct {
|
||||
In io.Reader
|
||||
Out io.Writer
|
||||
ErrOut io.Writer
|
||||
}
|
||||
|
||||
// ToolboxOptions holds dependencies shared by all commands.
|
||||
type ToolboxOptions struct {
|
||||
IOStreams IOStreams
|
||||
Logger log.Logger
|
||||
Cfg server.ServerConfig
|
||||
ToolsFile string
|
||||
ToolsFiles []string
|
||||
ToolsFolder string
|
||||
PrebuiltConfigs []string
|
||||
}
|
||||
|
||||
// Option defines a function that modifies the ToolboxOptions struct.
|
||||
type Option func(*ToolboxOptions)
|
||||
|
||||
// NewToolboxOptions creates a new instance with defaults, then applies any
|
||||
// provided options.
|
||||
func NewToolboxOptions(opts ...Option) *ToolboxOptions {
|
||||
o := &ToolboxOptions{
|
||||
IOStreams: IOStreams{
|
||||
In: os.Stdin,
|
||||
Out: os.Stdout,
|
||||
ErrOut: os.Stderr,
|
||||
},
|
||||
}
|
||||
|
||||
for _, opt := range opts {
|
||||
opt(o)
|
||||
}
|
||||
return o
|
||||
}
|
||||
|
||||
// Apply allows you to update an EXISTING ToolboxOptions instance.
|
||||
// This is useful for "late binding".
|
||||
func (o *ToolboxOptions) Apply(opts ...Option) {
|
||||
for _, opt := range opts {
|
||||
opt(o)
|
||||
}
|
||||
}
|
||||
|
||||
// WithIOStreams updates the IO streams.
|
||||
func WithIOStreams(out, err io.Writer) Option {
|
||||
return func(o *ToolboxOptions) {
|
||||
o.IOStreams.Out = out
|
||||
o.IOStreams.ErrOut = err
|
||||
}
|
||||
}
|
||||
|
||||
// Setup create logger and telemetry instrumentations.
|
||||
func (opts *ToolboxOptions) Setup(ctx context.Context) (context.Context, func(context.Context) error, error) {
|
||||
// If stdio, set logger's out stream (usually DEBUG and INFO logs) to
|
||||
// errStream
|
||||
loggerOut := opts.IOStreams.Out
|
||||
if opts.Cfg.Stdio {
|
||||
loggerOut = opts.IOStreams.ErrOut
|
||||
}
|
||||
|
||||
// Handle logger separately from config
|
||||
logger, err := log.NewLogger(opts.Cfg.LoggingFormat.String(), opts.Cfg.LogLevel.String(), loggerOut, opts.IOStreams.ErrOut)
|
||||
if err != nil {
|
||||
return ctx, nil, fmt.Errorf("unable to initialize logger: %w", err)
|
||||
}
|
||||
|
||||
ctx = util.WithLogger(ctx, logger)
|
||||
opts.Logger = logger
|
||||
|
||||
// Set up OpenTelemetry
|
||||
otelShutdown, err := telemetry.SetupOTel(ctx, opts.Cfg.Version, opts.Cfg.TelemetryOTLP, opts.Cfg.TelemetryGCP, opts.Cfg.TelemetryServiceName)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("error setting up OpenTelemetry: %w", err)
|
||||
logger.ErrorContext(ctx, errMsg.Error())
|
||||
return ctx, nil, errMsg
|
||||
}
|
||||
|
||||
shutdownFunc := func(ctx context.Context) error {
|
||||
err := otelShutdown(ctx)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("error shutting down OpenTelemetry: %w", err)
|
||||
logger.ErrorContext(ctx, errMsg.Error())
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
instrumentation, err := telemetry.CreateTelemetryInstrumentation(opts.Cfg.Version)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to create telemetry instrumentation: %w", err)
|
||||
logger.ErrorContext(ctx, errMsg.Error())
|
||||
return ctx, shutdownFunc, errMsg
|
||||
}
|
||||
|
||||
ctx = util.WithInstrumentation(ctx, instrumentation)
|
||||
|
||||
return ctx, shutdownFunc, nil
|
||||
}
|
||||
|
||||
// LoadConfig checks and merge files that should be loaded into the server
|
||||
func (opts *ToolboxOptions) LoadConfig(ctx context.Context) (bool, error) {
|
||||
// Determine if Custom Files should be loaded
|
||||
// Check for explicit custom flags
|
||||
isCustomConfigured := opts.ToolsFile != "" || len(opts.ToolsFiles) > 0 || opts.ToolsFolder != ""
|
||||
|
||||
// Determine if default 'tools.yaml' should be used (No prebuilt AND No custom flags)
|
||||
useDefaultToolsFile := len(opts.PrebuiltConfigs) == 0 && !isCustomConfigured
|
||||
|
||||
if useDefaultToolsFile {
|
||||
opts.ToolsFile = "tools.yaml"
|
||||
isCustomConfigured = true
|
||||
}
|
||||
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return isCustomConfigured, err
|
||||
}
|
||||
|
||||
var allToolsFiles []ToolsFile
|
||||
|
||||
// Load Prebuilt Configuration
|
||||
|
||||
if len(opts.PrebuiltConfigs) > 0 {
|
||||
slices.Sort(opts.PrebuiltConfigs)
|
||||
sourcesList := strings.Join(opts.PrebuiltConfigs, ", ")
|
||||
logMsg := fmt.Sprintf("Using prebuilt tool configurations for: %s", sourcesList)
|
||||
logger.InfoContext(ctx, logMsg)
|
||||
|
||||
for _, configName := range opts.PrebuiltConfigs {
|
||||
buf, err := prebuiltconfigs.Get(configName)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, err.Error())
|
||||
return isCustomConfigured, err
|
||||
}
|
||||
|
||||
// Parse into ToolsFile struct
|
||||
parsed, err := parseToolsFile(ctx, buf)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to parse prebuilt tool configuration for '%s': %w", configName, err)
|
||||
logger.ErrorContext(ctx, errMsg.Error())
|
||||
return isCustomConfigured, errMsg
|
||||
}
|
||||
allToolsFiles = append(allToolsFiles, parsed)
|
||||
}
|
||||
}
|
||||
|
||||
// Load Custom Configurations
|
||||
if isCustomConfigured {
|
||||
// Enforce exclusivity among custom flags (tools-file vs tools-files vs tools-folder)
|
||||
if (opts.ToolsFile != "" && len(opts.ToolsFiles) > 0) ||
|
||||
(opts.ToolsFile != "" && opts.ToolsFolder != "") ||
|
||||
(len(opts.ToolsFiles) > 0 && opts.ToolsFolder != "") {
|
||||
errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously")
|
||||
logger.ErrorContext(ctx, errMsg.Error())
|
||||
return isCustomConfigured, errMsg
|
||||
}
|
||||
|
||||
var customTools ToolsFile
|
||||
var err error
|
||||
|
||||
if len(opts.ToolsFiles) > 0 {
|
||||
// Use tools-files
|
||||
logger.InfoContext(ctx, fmt.Sprintf("Loading and merging %d tool configuration files", len(opts.ToolsFiles)))
|
||||
customTools, err = LoadAndMergeToolsFiles(ctx, opts.ToolsFiles)
|
||||
} else if opts.ToolsFolder != "" {
|
||||
// Use tools-folder
|
||||
logger.InfoContext(ctx, fmt.Sprintf("Loading and merging all YAML files from directory: %s", opts.ToolsFolder))
|
||||
customTools, err = LoadAndMergeToolsFolder(ctx, opts.ToolsFolder)
|
||||
} else {
|
||||
// Use single file (tools-file or default `tools.yaml`)
|
||||
buf, readFileErr := os.ReadFile(opts.ToolsFile)
|
||||
if readFileErr != nil {
|
||||
errMsg := fmt.Errorf("unable to read tool file at %q: %w", opts.ToolsFile, readFileErr)
|
||||
logger.ErrorContext(ctx, errMsg.Error())
|
||||
return isCustomConfigured, errMsg
|
||||
}
|
||||
customTools, err = parseToolsFile(ctx, buf)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("unable to parse tool file at %q: %w", opts.ToolsFile, err)
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, err.Error())
|
||||
return isCustomConfigured, err
|
||||
}
|
||||
allToolsFiles = append(allToolsFiles, customTools)
|
||||
}
|
||||
|
||||
// Modify version string based on loaded configurations
|
||||
if len(opts.PrebuiltConfigs) > 0 {
|
||||
tag := "prebuilt"
|
||||
if isCustomConfigured {
|
||||
tag = "custom"
|
||||
}
|
||||
// prebuiltConfigs is already sorted above
|
||||
for _, configName := range opts.PrebuiltConfigs {
|
||||
opts.Cfg.Version += fmt.Sprintf("+%s.%s", tag, configName)
|
||||
}
|
||||
}
|
||||
|
||||
// Merge Everything
|
||||
// This will error if custom tools collide with prebuilt tools
|
||||
finalToolsFile, err := mergeToolsFiles(allToolsFiles...)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, err.Error())
|
||||
return isCustomConfigured, err
|
||||
}
|
||||
|
||||
opts.Cfg.SourceConfigs = finalToolsFile.Sources
|
||||
opts.Cfg.AuthServiceConfigs = finalToolsFile.AuthServices
|
||||
opts.Cfg.EmbeddingModelConfigs = finalToolsFile.EmbeddingModels
|
||||
opts.Cfg.ToolConfigs = finalToolsFile.Tools
|
||||
opts.Cfg.ToolsetConfigs = finalToolsFile.Toolsets
|
||||
opts.Cfg.PromptConfigs = finalToolsFile.Prompts
|
||||
|
||||
return isCustomConfigured, nil
|
||||
}
|
||||
@@ -12,57 +12,38 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package cmd
|
||||
package internal
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func TestCommandOptions(t *testing.T) {
|
||||
func TestToolboxOptions(t *testing.T) {
|
||||
w := io.Discard
|
||||
tcs := []struct {
|
||||
desc string
|
||||
isValid func(*Command) error
|
||||
isValid func(*ToolboxOptions) error
|
||||
option Option
|
||||
}{
|
||||
{
|
||||
desc: "with logger",
|
||||
isValid: func(c *Command) error {
|
||||
if c.outStream != w || c.errStream != w {
|
||||
isValid: func(o *ToolboxOptions) error {
|
||||
if o.IOStreams.Out != w || o.IOStreams.ErrOut != w {
|
||||
return errors.New("loggers do not match")
|
||||
}
|
||||
return nil
|
||||
},
|
||||
option: WithStreams(w, w),
|
||||
option: WithIOStreams(w, w),
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got, err := invokeProxyWithOption(tc.option)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
got := NewToolboxOptions(tc.option)
|
||||
if err := tc.isValid(got); err != nil {
|
||||
t.Errorf("option did not initialize command correctly: %v", err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func invokeProxyWithOption(o Option) (*Command, error) {
|
||||
c := NewCommand(o)
|
||||
// Keep the test output quiet
|
||||
c.SilenceUsage = true
|
||||
c.SilenceErrors = true
|
||||
// Disable execute behavior
|
||||
c.RunE = func(*cobra.Command, []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
err := c.Execute()
|
||||
return c, err
|
||||
}
|
||||
46
cmd/internal/persistent_flags.go
Normal file
46
cmd/internal/persistent_flags.go
Normal file
@@ -0,0 +1,46 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package internal
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// PersistentFlags sets up flags that are available for all commands and
|
||||
// subcommands
|
||||
// It is also used to set up persistent flags during subcommand unit tests
|
||||
func PersistentFlags(parentCmd *cobra.Command, opts *ToolboxOptions) {
|
||||
persistentFlags := parentCmd.PersistentFlags()
|
||||
|
||||
persistentFlags.StringVar(&opts.ToolsFile, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
|
||||
persistentFlags.StringSliceVar(&opts.ToolsFiles, "tools-files", []string{}, "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file, or --tools-folder.")
|
||||
persistentFlags.StringVar(&opts.ToolsFolder, "tools-folder", "", "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file, or --tools-files.")
|
||||
persistentFlags.Var(&opts.Cfg.LogLevel, "log-level", "Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'.")
|
||||
persistentFlags.Var(&opts.Cfg.LoggingFormat, "logging-format", "Specify logging format to use. Allowed: 'standard' or 'JSON'.")
|
||||
persistentFlags.BoolVar(&opts.Cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.")
|
||||
persistentFlags.StringVar(&opts.Cfg.TelemetryOTLP, "telemetry-otlp", "", "Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318')")
|
||||
persistentFlags.StringVar(&opts.Cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
|
||||
// Fetch prebuilt tools sources to customize the help description
|
||||
prebuiltHelp := fmt.Sprintf(
|
||||
"Use a prebuilt tool configuration by source type. Allowed: '%s'. Can be specified multiple times.",
|
||||
strings.Join(prebuiltconfigs.GetPrebuiltSources(), "', '"),
|
||||
)
|
||||
persistentFlags.StringSliceVar(&opts.PrebuiltConfigs, "prebuilt", []string{}, prebuiltHelp)
|
||||
persistentFlags.StringSliceVar(&opts.Cfg.UserAgentMetadata, "user-agent-metadata", []string{}, "Appends additional metadata to the User-Agent.")
|
||||
}
|
||||
214
cmd/internal/skills/command.go
Normal file
214
cmd/internal/skills/command.go
Normal file
@@ -0,0 +1,214 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package skills
|
||||
|
||||
import (
|
||||
"context"
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/cmd/internal"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// skillsCmd is the command for generating skills.
|
||||
type skillsCmd struct {
|
||||
*cobra.Command
|
||||
name string
|
||||
description string
|
||||
toolset string
|
||||
outputDir string
|
||||
}
|
||||
|
||||
// NewCommand creates a new Command.
|
||||
func NewCommand(opts *internal.ToolboxOptions) *cobra.Command {
|
||||
cmd := &skillsCmd{}
|
||||
cmd.Command = &cobra.Command{
|
||||
Use: "skills-generate",
|
||||
Short: "Generate skills from tool configurations",
|
||||
RunE: func(c *cobra.Command, args []string) error {
|
||||
return run(cmd, opts)
|
||||
},
|
||||
}
|
||||
|
||||
cmd.Flags().StringVar(&cmd.name, "name", "", "Name of the generated skill.")
|
||||
cmd.Flags().StringVar(&cmd.description, "description", "", "Description of the generated skill")
|
||||
cmd.Flags().StringVar(&cmd.toolset, "toolset", "", "Name of the toolset to convert into a skill. If not provided, all tools will be included.")
|
||||
cmd.Flags().StringVar(&cmd.outputDir, "output-dir", "skills", "Directory to output generated skills")
|
||||
|
||||
_ = cmd.MarkFlagRequired("name")
|
||||
_ = cmd.MarkFlagRequired("description")
|
||||
return cmd.Command
|
||||
}
|
||||
|
||||
func run(cmd *skillsCmd, opts *internal.ToolboxOptions) error {
|
||||
ctx, cancel := context.WithCancel(cmd.Context())
|
||||
defer cancel()
|
||||
|
||||
ctx, shutdown, err := opts.Setup(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
_ = shutdown(ctx)
|
||||
}()
|
||||
|
||||
_, err = opts.LoadConfig(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(cmd.outputDir, 0755); err != nil {
|
||||
errMsg := fmt.Errorf("error creating output directory: %w", err)
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
opts.Logger.InfoContext(ctx, fmt.Sprintf("Generating skill '%s'...", cmd.name))
|
||||
|
||||
// Initialize toolbox and collect tools
|
||||
allTools, err := cmd.collectTools(ctx, opts)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("error collecting tools: %w", err)
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
if len(allTools) == 0 {
|
||||
opts.Logger.InfoContext(ctx, "No tools found to generate.")
|
||||
return nil
|
||||
}
|
||||
|
||||
// Generate the combined skill directory
|
||||
skillPath := filepath.Join(cmd.outputDir, cmd.name)
|
||||
if err := os.MkdirAll(skillPath, 0755); err != nil {
|
||||
errMsg := fmt.Errorf("error creating skill directory: %w", err)
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
// Generate assets directory
|
||||
assetsPath := filepath.Join(skillPath, "assets")
|
||||
if err := os.MkdirAll(assetsPath, 0755); err != nil {
|
||||
errMsg := fmt.Errorf("error creating assets dir: %w", err)
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
// Generate scripts directory
|
||||
scriptsPath := filepath.Join(skillPath, "scripts")
|
||||
if err := os.MkdirAll(scriptsPath, 0755); err != nil {
|
||||
errMsg := fmt.Errorf("error creating scripts dir: %w", err)
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
// Iterate over keys to ensure deterministic order
|
||||
var toolNames []string
|
||||
for name := range allTools {
|
||||
toolNames = append(toolNames, name)
|
||||
}
|
||||
sort.Strings(toolNames)
|
||||
|
||||
for _, toolName := range toolNames {
|
||||
// Generate YAML config in asset directory
|
||||
minimizedContent, err := generateToolConfigYAML(opts.Cfg, toolName)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("error generating filtered config for %s: %w", toolName, err)
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
specificToolsFileName := fmt.Sprintf("%s.yaml", toolName)
|
||||
if minimizedContent != nil {
|
||||
destPath := filepath.Join(assetsPath, specificToolsFileName)
|
||||
if err := os.WriteFile(destPath, minimizedContent, 0644); err != nil {
|
||||
errMsg := fmt.Errorf("error writing filtered config for %s: %w", toolName, err)
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
}
|
||||
|
||||
// Generate wrapper script in scripts directory
|
||||
scriptContent, err := generateScriptContent(toolName, specificToolsFileName)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("error generating script content for %s: %w", toolName, err)
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
scriptFilename := filepath.Join(scriptsPath, fmt.Sprintf("%s.js", toolName))
|
||||
if err := os.WriteFile(scriptFilename, []byte(scriptContent), 0755); err != nil {
|
||||
errMsg := fmt.Errorf("error writing script %s: %w", scriptFilename, err)
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
}
|
||||
|
||||
// Generate SKILL.md
|
||||
skillContent, err := generateSkillMarkdown(cmd.name, cmd.description, allTools)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("error generating SKILL.md content: %w", err)
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
skillMdPath := filepath.Join(skillPath, "SKILL.md")
|
||||
if err := os.WriteFile(skillMdPath, []byte(skillContent), 0644); err != nil {
|
||||
errMsg := fmt.Errorf("error writing SKILL.md: %w", err)
|
||||
opts.Logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
opts.Logger.InfoContext(ctx, fmt.Sprintf("Successfully generated skill '%s' with %d tools.", cmd.name, len(allTools)))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *skillsCmd) collectTools(ctx context.Context, opts *internal.ToolboxOptions) (map[string]tools.Tool, error) {
|
||||
// Initialize Resources
|
||||
sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, opts.Cfg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to initialize resources: %w", err)
|
||||
}
|
||||
|
||||
resourceMgr := resources.NewResourceManager(sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap)
|
||||
|
||||
result := make(map[string]tools.Tool)
|
||||
|
||||
if c.toolset == "" {
|
||||
return toolsMap, nil
|
||||
}
|
||||
|
||||
ts, ok := resourceMgr.GetToolset(c.toolset)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("toolset %q not found", c.toolset)
|
||||
}
|
||||
|
||||
for _, t := range ts.Tools {
|
||||
if t != nil {
|
||||
tool := *t
|
||||
result[tool.McpManifest().Name] = tool
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
195
cmd/internal/skills/command_test.go
Normal file
195
cmd/internal/skills/command_test.go
Normal file
@@ -0,0 +1,195 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package skills
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/cmd/internal"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func invokeCommand(args []string) (string, error) {
|
||||
parentCmd := &cobra.Command{Use: "toolbox"}
|
||||
|
||||
buf := new(bytes.Buffer)
|
||||
opts := internal.NewToolboxOptions(internal.WithIOStreams(buf, buf))
|
||||
internal.PersistentFlags(parentCmd, opts)
|
||||
|
||||
cmd := NewCommand(opts)
|
||||
parentCmd.AddCommand(cmd)
|
||||
parentCmd.SetArgs(args)
|
||||
|
||||
err := parentCmd.Execute()
|
||||
return buf.String(), err
|
||||
}
|
||||
|
||||
func TestGenerateSkill(t *testing.T) {
|
||||
// Create a temporary directory for tests
|
||||
tmpDir := t.TempDir()
|
||||
outputDir := filepath.Join(tmpDir, "skills")
|
||||
|
||||
// Create a tools.yaml file with a sqlite tool
|
||||
toolsFileContent := `
|
||||
sources:
|
||||
my-sqlite:
|
||||
kind: sqlite
|
||||
database: test.db
|
||||
tools:
|
||||
hello-sqlite:
|
||||
kind: sqlite-sql
|
||||
source: my-sqlite
|
||||
description: "hello tool"
|
||||
statement: "SELECT 'hello' as greeting"
|
||||
`
|
||||
|
||||
toolsFilePath := filepath.Join(tmpDir, "tools.yaml")
|
||||
if err := os.WriteFile(toolsFilePath, []byte(toolsFileContent), 0644); err != nil {
|
||||
t.Fatalf("failed to write tools file: %v", err)
|
||||
}
|
||||
|
||||
args := []string{
|
||||
"skills-generate",
|
||||
"--tools-file", toolsFilePath,
|
||||
"--output-dir", outputDir,
|
||||
"--name", "hello-sqlite",
|
||||
"--description", "hello tool",
|
||||
}
|
||||
|
||||
got, err := invokeCommand(args)
|
||||
if err != nil {
|
||||
t.Fatalf("command failed: %v\nOutput: %s", err, got)
|
||||
}
|
||||
|
||||
// Verify generated directory structure
|
||||
skillPath := filepath.Join(outputDir, "hello-sqlite")
|
||||
if _, err := os.Stat(skillPath); os.IsNotExist(err) {
|
||||
t.Fatalf("skill directory not created: %s", skillPath)
|
||||
}
|
||||
|
||||
// Check SKILL.md
|
||||
skillMarkdown := filepath.Join(skillPath, "SKILL.md")
|
||||
content, err := os.ReadFile(skillMarkdown)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to read SKILL.md: %v", err)
|
||||
}
|
||||
|
||||
expectedFrontmatter := `---
|
||||
name: hello-sqlite
|
||||
description: hello tool
|
||||
---`
|
||||
if !strings.HasPrefix(string(content), expectedFrontmatter) {
|
||||
t.Errorf("SKILL.md does not have expected frontmatter format.\nExpected prefix:\n%s\nGot:\n%s", expectedFrontmatter, string(content))
|
||||
}
|
||||
|
||||
if !strings.Contains(string(content), "## Usage") {
|
||||
t.Errorf("SKILL.md does not contain '## Usage' section")
|
||||
}
|
||||
|
||||
if !strings.Contains(string(content), "## Scripts") {
|
||||
t.Errorf("SKILL.md does not contain '## Scripts' section")
|
||||
}
|
||||
|
||||
if !strings.Contains(string(content), "### hello-sqlite") {
|
||||
t.Errorf("SKILL.md does not contain '### hello-sqlite' tool header")
|
||||
}
|
||||
|
||||
// Check script file
|
||||
scriptFilename := "hello-sqlite.js"
|
||||
scriptPath := filepath.Join(skillPath, "scripts", scriptFilename)
|
||||
if _, err := os.Stat(scriptPath); os.IsNotExist(err) {
|
||||
t.Fatalf("script file not created: %s", scriptPath)
|
||||
}
|
||||
|
||||
scriptContent, err := os.ReadFile(scriptPath)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to read script file: %v", err)
|
||||
}
|
||||
if !strings.Contains(string(scriptContent), "hello-sqlite") {
|
||||
t.Errorf("script file does not contain expected tool name")
|
||||
}
|
||||
|
||||
// Check assets
|
||||
assetPath := filepath.Join(skillPath, "assets", "hello-sqlite.yaml")
|
||||
if _, err := os.Stat(assetPath); os.IsNotExist(err) {
|
||||
t.Fatalf("asset file not created: %s", assetPath)
|
||||
}
|
||||
assetContent, err := os.ReadFile(assetPath)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to read asset file: %v", err)
|
||||
}
|
||||
if !strings.Contains(string(assetContent), "hello-sqlite") {
|
||||
t.Errorf("asset file does not contain expected tool name")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGenerateSkill_NoConfig(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
outputDir := filepath.Join(tmpDir, "skills")
|
||||
|
||||
args := []string{
|
||||
"skills-generate",
|
||||
"--output-dir", outputDir,
|
||||
"--name", "test",
|
||||
"--description", "test",
|
||||
}
|
||||
|
||||
_, err := invokeCommand(args)
|
||||
if err == nil {
|
||||
t.Fatal("expected command to fail when no configuration is provided and tools.yaml is missing")
|
||||
}
|
||||
|
||||
// Should not have created the directory if no config was processed
|
||||
if _, err := os.Stat(outputDir); !os.IsNotExist(err) {
|
||||
t.Errorf("output directory should not have been created")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGenerateSkill_MissingArguments(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
toolsFilePath := filepath.Join(tmpDir, "tools.yaml")
|
||||
if err := os.WriteFile(toolsFilePath, []byte("tools: {}"), 0644); err != nil {
|
||||
t.Fatalf("failed to write tools file: %v", err)
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
args []string
|
||||
}{
|
||||
{
|
||||
name: "missing name",
|
||||
args: []string{"skills-generate", "--tools-file", toolsFilePath, "--description", "test"},
|
||||
},
|
||||
{
|
||||
name: "missing description",
|
||||
args: []string{"skills-generate", "--tools-file", toolsFilePath, "--name", "test"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, err := invokeCommand(tt.args)
|
||||
if err == nil {
|
||||
t.Fatalf("expected command to fail due to missing arguments, but it succeeded\nOutput: %s", got)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
296
cmd/internal/skills/generator.go
Normal file
296
cmd/internal/skills/generator.go
Normal file
@@ -0,0 +1,296 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package skills
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
)
|
||||
|
||||
const skillTemplate = `---
|
||||
name: {{.SkillName}}
|
||||
description: {{.SkillDescription}}
|
||||
---
|
||||
|
||||
## Usage
|
||||
|
||||
All scripts can be executed using Node.js. Replace ` + "`" + `<param_name>` + "`" + ` and ` + "`" + `<param_value>` + "`" + ` with actual values.
|
||||
|
||||
**Bash:**
|
||||
` + "`" + `node scripts/<script_name>.js '{"<param_name>": "<param_value>"}'` + "`" + `
|
||||
|
||||
**PowerShell:**
|
||||
` + "`" + `node scripts/<script_name>.js '{\"<param_name>\": \"<param_value>\"}'` + "`" + `
|
||||
|
||||
## Scripts
|
||||
|
||||
{{range .Tools}}
|
||||
### {{.Name}}
|
||||
|
||||
{{.Description}}
|
||||
|
||||
{{.ParametersSchema}}
|
||||
|
||||
---
|
||||
{{end}}
|
||||
`
|
||||
|
||||
type toolTemplateData struct {
|
||||
Name string
|
||||
Description string
|
||||
ParametersSchema string
|
||||
}
|
||||
|
||||
type skillTemplateData struct {
|
||||
SkillName string
|
||||
SkillDescription string
|
||||
Tools []toolTemplateData
|
||||
}
|
||||
|
||||
// generateSkillMarkdown generates the content of the SKILL.md file.
|
||||
// It includes usage instructions and a reference section for each tool in the skill,
|
||||
// detailing its description and parameters.
|
||||
func generateSkillMarkdown(skillName, skillDescription string, toolsMap map[string]tools.Tool) (string, error) {
|
||||
var toolsData []toolTemplateData
|
||||
|
||||
// Order tools based on name
|
||||
var toolNames []string
|
||||
for name := range toolsMap {
|
||||
toolNames = append(toolNames, name)
|
||||
}
|
||||
sort.Strings(toolNames)
|
||||
|
||||
for _, name := range toolNames {
|
||||
tool := toolsMap[name]
|
||||
manifest := tool.Manifest()
|
||||
|
||||
parametersSchema, err := formatParameters(manifest.Parameters)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
toolsData = append(toolsData, toolTemplateData{
|
||||
Name: name,
|
||||
Description: manifest.Description,
|
||||
ParametersSchema: parametersSchema,
|
||||
})
|
||||
}
|
||||
|
||||
data := skillTemplateData{
|
||||
SkillName: skillName,
|
||||
SkillDescription: skillDescription,
|
||||
Tools: toolsData,
|
||||
}
|
||||
|
||||
tmpl, err := template.New("markdown").Parse(skillTemplate)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error parsing markdown template: %w", err)
|
||||
}
|
||||
|
||||
var buf strings.Builder
|
||||
if err := tmpl.Execute(&buf, data); err != nil {
|
||||
return "", fmt.Errorf("error executing markdown template: %w", err)
|
||||
}
|
||||
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
const nodeScriptTemplate = `#!/usr/bin/env node
|
||||
|
||||
const { spawn, execSync } = require('child_process');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
|
||||
const toolName = "{{.Name}}";
|
||||
const toolsFileName = "{{.ToolsFileName}}";
|
||||
|
||||
function getToolboxPath() {
|
||||
try {
|
||||
const checkCommand = process.platform === 'win32' ? 'where toolbox' : 'which toolbox';
|
||||
const globalPath = execSync(checkCommand, { stdio: 'pipe', encoding: 'utf-8' }).trim();
|
||||
if (globalPath) {
|
||||
return globalPath.split('\n')[0].trim();
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore error;
|
||||
}
|
||||
const localPath = path.resolve(__dirname, '../../../toolbox');
|
||||
if (fs.existsSync(localPath)) {
|
||||
return localPath;
|
||||
}
|
||||
throw new Error("Toolbox binary not found");
|
||||
}
|
||||
|
||||
let toolboxBinary;
|
||||
try {
|
||||
toolboxBinary = getToolboxPath();
|
||||
} catch (err) {
|
||||
console.error("Error:", err.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let configArgs = [];
|
||||
if (toolsFileName) {
|
||||
configArgs.push("--tools-file", path.join(__dirname, "..", "assets", toolsFileName));
|
||||
}
|
||||
|
||||
const args = process.argv.slice(2);
|
||||
const toolboxArgs = [...configArgs, "invoke", toolName, ...args];
|
||||
|
||||
const child = spawn(toolboxBinary, toolboxArgs, { stdio: 'inherit' });
|
||||
|
||||
child.on('close', (code) => {
|
||||
process.exit(code);
|
||||
});
|
||||
|
||||
child.on('error', (err) => {
|
||||
console.error("Error executing toolbox:", err);
|
||||
process.exit(1);
|
||||
});
|
||||
`
|
||||
|
||||
type scriptData struct {
|
||||
Name string
|
||||
ToolsFileName string
|
||||
}
|
||||
|
||||
// generateScriptContent creates the content for a Node.js wrapper script.
|
||||
// This script invokes the toolbox CLI with the appropriate configuration
|
||||
// (using a generated tools file) and arguments to execute the specific tool.
|
||||
func generateScriptContent(name string, toolsFileName string) (string, error) {
|
||||
data := scriptData{
|
||||
Name: name,
|
||||
ToolsFileName: toolsFileName,
|
||||
}
|
||||
|
||||
tmpl, err := template.New("script").Parse(nodeScriptTemplate)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error parsing script template: %w", err)
|
||||
}
|
||||
|
||||
var buf strings.Builder
|
||||
if err := tmpl.Execute(&buf, data); err != nil {
|
||||
return "", fmt.Errorf("error executing script template: %w", err)
|
||||
}
|
||||
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// formatParameters converts a list of parameter manifests into a formatted JSON schema string.
|
||||
// This schema is used in the skill documentation to describe the input parameters for a tool.
|
||||
func formatParameters(params []parameters.ParameterManifest) (string, error) {
|
||||
if len(params) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
properties := make(map[string]interface{})
|
||||
var required []string
|
||||
|
||||
for _, p := range params {
|
||||
paramMap := map[string]interface{}{
|
||||
"type": p.Type,
|
||||
"description": p.Description,
|
||||
}
|
||||
if p.Default != nil {
|
||||
paramMap["default"] = p.Default
|
||||
}
|
||||
properties[p.Name] = paramMap
|
||||
if p.Required {
|
||||
required = append(required, p.Name)
|
||||
}
|
||||
}
|
||||
|
||||
schema := map[string]interface{}{
|
||||
"type": "object",
|
||||
"properties": properties,
|
||||
}
|
||||
if len(required) > 0 {
|
||||
schema["required"] = required
|
||||
}
|
||||
|
||||
schemaJSON, err := json.MarshalIndent(schema, "", " ")
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error generating parameters schema: %w", err)
|
||||
}
|
||||
|
||||
return fmt.Sprintf("#### Parameters\n\n```json\n%s\n```", string(schemaJSON)), nil
|
||||
}
|
||||
|
||||
// generateToolConfigYAML generates the YAML configuration for a single tool and its dependency (source).
|
||||
// It extracts the relevant tool and source configurations from the server config and formats them
|
||||
// into a YAML document suitable for inclusion in the skill's assets.
|
||||
func generateToolConfigYAML(cfg server.ServerConfig, toolName string) ([]byte, error) {
|
||||
toolCfg, ok := cfg.ToolConfigs[toolName]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("error finding tool config: %s", toolName)
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
encoder := yaml.NewEncoder(&buf)
|
||||
|
||||
// Process Tool Config
|
||||
toolWrapper := struct {
|
||||
Kind string `yaml:"kind"`
|
||||
Config tools.ToolConfig `yaml:",inline"`
|
||||
}{
|
||||
Kind: "tools",
|
||||
Config: toolCfg,
|
||||
}
|
||||
|
||||
if err := encoder.Encode(toolWrapper); err != nil {
|
||||
return nil, fmt.Errorf("error encoding tool config: %w", err)
|
||||
}
|
||||
|
||||
// Process Source Config
|
||||
var toolMap map[string]interface{}
|
||||
b, err := yaml.Marshal(toolCfg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error marshaling tool config: %w", err)
|
||||
}
|
||||
if err := yaml.Unmarshal(b, &toolMap); err != nil {
|
||||
return nil, fmt.Errorf("error unmarshaling tool config map: %w", err)
|
||||
}
|
||||
|
||||
if sourceName, ok := toolMap["source"].(string); ok && sourceName != "" {
|
||||
sourceCfg, ok := cfg.SourceConfigs[sourceName]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("error finding source config: %s", sourceName)
|
||||
}
|
||||
|
||||
sourceWrapper := struct {
|
||||
Kind string `yaml:"kind"`
|
||||
Config sources.SourceConfig `yaml:",inline"`
|
||||
}{
|
||||
Kind: "sources",
|
||||
Config: sourceCfg,
|
||||
}
|
||||
|
||||
if err := encoder.Encode(sourceWrapper); err != nil {
|
||||
return nil, fmt.Errorf("error encoding source config: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
347
cmd/internal/skills/generator_test.go
Normal file
347
cmd/internal/skills/generator_test.go
Normal file
@@ -0,0 +1,347 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package skills
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
type MockToolConfig struct {
|
||||
Name string `yaml:"name"`
|
||||
Type string `yaml:"type"`
|
||||
Source string `yaml:"source"`
|
||||
Other string `yaml:"other"`
|
||||
Parameters parameters.Parameters `yaml:"parameters"`
|
||||
}
|
||||
|
||||
func (m MockToolConfig) ToolConfigType() string {
|
||||
return m.Type
|
||||
}
|
||||
|
||||
func (m MockToolConfig) Initialize(map[string]sources.Source) (tools.Tool, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
type MockSourceConfig struct {
|
||||
Name string `yaml:"name"`
|
||||
Type string `yaml:"type"`
|
||||
ConnectionString string `yaml:"connection_string"`
|
||||
}
|
||||
|
||||
func (m MockSourceConfig) SourceConfigType() string {
|
||||
return m.Type
|
||||
}
|
||||
|
||||
func (m MockSourceConfig) Initialize(context.Context, trace.Tracer) (sources.Source, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func TestFormatParameters(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
params []parameters.ParameterManifest
|
||||
wantContains []string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "empty parameters",
|
||||
params: []parameters.ParameterManifest{},
|
||||
wantContains: []string{""},
|
||||
},
|
||||
{
|
||||
name: "single required string parameter",
|
||||
params: []parameters.ParameterManifest{
|
||||
{
|
||||
Name: "param1",
|
||||
Description: "A test parameter",
|
||||
Type: "string",
|
||||
Required: true,
|
||||
},
|
||||
},
|
||||
wantContains: []string{
|
||||
"## Parameters",
|
||||
"```json",
|
||||
`"type": "object"`,
|
||||
`"properties": {`,
|
||||
`"param1": {`,
|
||||
`"type": "string"`,
|
||||
`"description": "A test parameter"`,
|
||||
`"required": [`,
|
||||
`"param1"`,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "mixed parameters with defaults",
|
||||
params: []parameters.ParameterManifest{
|
||||
{
|
||||
Name: "param1",
|
||||
Description: "Param 1",
|
||||
Type: "string",
|
||||
Required: true,
|
||||
},
|
||||
{
|
||||
Name: "param2",
|
||||
Description: "Param 2",
|
||||
Type: "integer",
|
||||
Default: 42,
|
||||
Required: false,
|
||||
},
|
||||
},
|
||||
wantContains: []string{
|
||||
`"param1": {`,
|
||||
`"param2": {`,
|
||||
`"default": 42`,
|
||||
`"required": [`,
|
||||
`"param1"`,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, err := formatParameters(tt.params)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("formatParameters() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
if tt.wantErr {
|
||||
return
|
||||
}
|
||||
|
||||
if len(tt.params) == 0 {
|
||||
if got != "" {
|
||||
t.Errorf("formatParameters() = %v, want empty string", got)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
for _, want := range tt.wantContains {
|
||||
if !strings.Contains(got, want) {
|
||||
t.Errorf("formatParameters() result missing expected string: %s\nGot:\n%s", want, got)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGenerateSkillMarkdown(t *testing.T) {
|
||||
toolsMap := map[string]tools.Tool{
|
||||
"tool1": server.MockTool{
|
||||
Description: "First tool",
|
||||
Params: []parameters.Parameter{
|
||||
parameters.NewStringParameter("p1", "d1"),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
got, err := generateSkillMarkdown("MySkill", "My Description", toolsMap)
|
||||
if err != nil {
|
||||
t.Fatalf("generateSkillMarkdown() error = %v", err)
|
||||
}
|
||||
|
||||
expectedSubstrings := []string{
|
||||
"name: MySkill",
|
||||
"description: My Description",
|
||||
"## Usage",
|
||||
"All scripts can be executed using Node.js",
|
||||
"**Bash:**",
|
||||
"`node scripts/<script_name>.js '{\"<param_name>\": \"<param_value>\"}'`",
|
||||
"**PowerShell:**",
|
||||
"`node scripts/<script_name>.js '{\"<param_name>\": \"<param_value>\"}'`",
|
||||
"## Scripts",
|
||||
"### tool1",
|
||||
"First tool",
|
||||
"## Parameters",
|
||||
}
|
||||
|
||||
for _, s := range expectedSubstrings {
|
||||
if !strings.Contains(got, s) {
|
||||
t.Errorf("generateSkillMarkdown() missing substring %q", s)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestGenerateScriptContent(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
toolName string
|
||||
toolsFileName string
|
||||
wantContains []string
|
||||
}{
|
||||
{
|
||||
name: "basic script",
|
||||
toolName: "test-tool",
|
||||
toolsFileName: "",
|
||||
wantContains: []string{
|
||||
`const toolName = "test-tool";`,
|
||||
`const toolsFileName = "";`,
|
||||
`const toolboxArgs = [...configArgs, "invoke", toolName, ...args];`,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "script with tools file",
|
||||
toolName: "complex-tool",
|
||||
toolsFileName: "tools.yaml",
|
||||
wantContains: []string{
|
||||
`const toolName = "complex-tool";`,
|
||||
`const toolsFileName = "tools.yaml";`,
|
||||
`configArgs.push("--tools-file", path.join(__dirname, "..", "assets", toolsFileName));`,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, err := generateScriptContent(tt.toolName, tt.toolsFileName)
|
||||
if err != nil {
|
||||
t.Fatalf("generateScriptContent() error = %v", err)
|
||||
}
|
||||
|
||||
for _, s := range tt.wantContains {
|
||||
if !strings.Contains(got, s) {
|
||||
t.Errorf("generateScriptContent() missing substring %q\nGot:\n%s", s, got)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGenerateToolConfigYAML(t *testing.T) {
|
||||
cfg := server.ServerConfig{
|
||||
ToolConfigs: server.ToolConfigs{
|
||||
"tool1": MockToolConfig{
|
||||
Name: "tool1",
|
||||
Type: "custom-tool",
|
||||
Source: "src1",
|
||||
Other: "foo",
|
||||
},
|
||||
"toolNoSource": MockToolConfig{
|
||||
Name: "toolNoSource",
|
||||
Type: "http",
|
||||
},
|
||||
"toolWithParams": MockToolConfig{
|
||||
Name: "toolWithParams",
|
||||
Type: "custom-tool",
|
||||
Parameters: []parameters.Parameter{
|
||||
parameters.NewStringParameter("param1", "desc1"),
|
||||
},
|
||||
},
|
||||
"toolWithMissingSource": MockToolConfig{
|
||||
Name: "toolWithMissingSource",
|
||||
Type: "custom-tool",
|
||||
Source: "missing-src",
|
||||
},
|
||||
},
|
||||
SourceConfigs: server.SourceConfigs{
|
||||
"src1": MockSourceConfig{
|
||||
Name: "src1",
|
||||
Type: "postgres",
|
||||
ConnectionString: "conn1",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
toolName string
|
||||
wantContains []string
|
||||
wantErr bool
|
||||
wantNil bool
|
||||
}{
|
||||
{
|
||||
name: "tool with source",
|
||||
toolName: "tool1",
|
||||
wantContains: []string{
|
||||
"kind: tools",
|
||||
"name: tool1",
|
||||
"type: custom-tool",
|
||||
"source: src1",
|
||||
"other: foo",
|
||||
"---",
|
||||
"kind: sources",
|
||||
"name: src1",
|
||||
"type: postgres",
|
||||
"connection_string: conn1",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "tool without source",
|
||||
toolName: "toolNoSource",
|
||||
wantContains: []string{
|
||||
"kind: tools",
|
||||
"name: toolNoSource",
|
||||
"type: http",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "tool with parameters",
|
||||
toolName: "toolWithParams",
|
||||
wantContains: []string{
|
||||
"kind: tools",
|
||||
"name: toolWithParams",
|
||||
"type: custom-tool",
|
||||
"parameters:",
|
||||
"- name: param1",
|
||||
"type: string",
|
||||
"description: desc1",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "non-existent tool",
|
||||
toolName: "missing-tool",
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "tool with missing source config",
|
||||
toolName: "toolWithMissingSource",
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
gotBytes, err := generateToolConfigYAML(cfg, tt.toolName)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("generateToolConfigYAML() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
if tt.wantErr {
|
||||
return
|
||||
}
|
||||
|
||||
if tt.wantNil {
|
||||
if gotBytes != nil {
|
||||
t.Errorf("generateToolConfigYAML() expected nil, got %s", string(gotBytes))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
got := string(gotBytes)
|
||||
for _, want := range tt.wantContains {
|
||||
if !strings.Contains(got, want) {
|
||||
t.Errorf("generateToolConfigYAML() result missing expected string: %q\nGot:\n%s", want, got)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
349
cmd/internal/tools_file.go
Normal file
349
cmd/internal/tools_file.go
Normal file
@@ -0,0 +1,349 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package internal
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
)
|
||||
|
||||
type ToolsFile struct {
|
||||
Sources server.SourceConfigs `yaml:"sources"`
|
||||
AuthServices server.AuthServiceConfigs `yaml:"authServices"`
|
||||
EmbeddingModels server.EmbeddingModelConfigs `yaml:"embeddingModels"`
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
Toolsets server.ToolsetConfigs `yaml:"toolsets"`
|
||||
Prompts server.PromptConfigs `yaml:"prompts"`
|
||||
}
|
||||
|
||||
// parseEnv replaces environment variables ${ENV_NAME} with their values.
|
||||
// also support ${ENV_NAME:default_value}.
|
||||
func parseEnv(input string) (string, error) {
|
||||
re := regexp.MustCompile(`\$\{(\w+)(:([^}]*))?\}`)
|
||||
|
||||
var err error
|
||||
output := re.ReplaceAllStringFunc(input, func(match string) string {
|
||||
parts := re.FindStringSubmatch(match)
|
||||
|
||||
// extract the variable name
|
||||
variableName := parts[1]
|
||||
if value, found := os.LookupEnv(variableName); found {
|
||||
return value
|
||||
}
|
||||
if len(parts) >= 4 && parts[2] != "" {
|
||||
return parts[3]
|
||||
}
|
||||
err = fmt.Errorf("environment variable not found: %q", variableName)
|
||||
return ""
|
||||
})
|
||||
return output, err
|
||||
}
|
||||
|
||||
// parseToolsFile parses the provided yaml into appropriate configs.
|
||||
func parseToolsFile(ctx context.Context, raw []byte) (ToolsFile, error) {
|
||||
var toolsFile ToolsFile
|
||||
// Replace environment variables if found
|
||||
output, err := parseEnv(string(raw))
|
||||
if err != nil {
|
||||
return toolsFile, fmt.Errorf("error parsing environment variables: %s", err)
|
||||
}
|
||||
raw = []byte(output)
|
||||
|
||||
raw, err = convertToolsFile(raw)
|
||||
if err != nil {
|
||||
return toolsFile, fmt.Errorf("error converting tools file: %s", err)
|
||||
}
|
||||
|
||||
// Parse contents
|
||||
toolsFile.Sources, toolsFile.AuthServices, toolsFile.EmbeddingModels, toolsFile.Tools, toolsFile.Toolsets, toolsFile.Prompts, err = server.UnmarshalResourceConfig(ctx, raw)
|
||||
if err != nil {
|
||||
return toolsFile, err
|
||||
}
|
||||
return toolsFile, nil
|
||||
}
|
||||
|
||||
func convertToolsFile(raw []byte) ([]byte, error) {
|
||||
var input yaml.MapSlice
|
||||
decoder := yaml.NewDecoder(bytes.NewReader(raw), yaml.UseOrderedMap())
|
||||
|
||||
// convert to tools file v2
|
||||
var buf bytes.Buffer
|
||||
encoder := yaml.NewEncoder(&buf)
|
||||
|
||||
v1keys := []string{"sources", "authSources", "authServices", "embeddingModels", "tools", "toolsets", "prompts"}
|
||||
for {
|
||||
if err := decoder.Decode(&input); err != nil {
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
for _, item := range input {
|
||||
key, ok := item.Key.(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected non-string key in input: %v", item.Key)
|
||||
}
|
||||
// check if the key is config file v1's key
|
||||
if slices.Contains(v1keys, key) {
|
||||
// check if value conversion to yaml.MapSlice successfully
|
||||
// fields such as "tools" in toolsets might pass the first check but
|
||||
// fail to convert to MapSlice
|
||||
if slice, ok := item.Value.(yaml.MapSlice); ok {
|
||||
// Deprecated: convert authSources to authServices
|
||||
if key == "authSources" {
|
||||
key = "authServices"
|
||||
}
|
||||
transformed, err := transformDocs(key, slice)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// encode per-doc
|
||||
for _, doc := range transformed {
|
||||
if err := encoder.Encode(doc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// invalid input will be ignored
|
||||
// we don't want to throw error here since the config could
|
||||
// be valid but with a different order such as:
|
||||
// ---
|
||||
// tools:
|
||||
// - tool_a
|
||||
// kind: toolsets
|
||||
// ---
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
// this doc is already v2, encode to buf
|
||||
if err := encoder.Encode(input); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
// transformDocs transforms the configuration file from v1 format to v2
|
||||
// yaml.MapSlice will preserve the order in a map
|
||||
func transformDocs(kind string, input yaml.MapSlice) ([]yaml.MapSlice, error) {
|
||||
var transformed []yaml.MapSlice
|
||||
for _, entry := range input {
|
||||
entryName, ok := entry.Key.(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected non-string key for entry in '%s': %v", kind, entry.Key)
|
||||
}
|
||||
entryBody := ProcessValue(entry.Value, kind == "toolsets")
|
||||
|
||||
currentTransformed := yaml.MapSlice{
|
||||
{Key: "kind", Value: kind},
|
||||
{Key: "name", Value: entryName},
|
||||
}
|
||||
|
||||
// Merge the transformed body into our result
|
||||
if bodySlice, ok := entryBody.(yaml.MapSlice); ok {
|
||||
currentTransformed = append(currentTransformed, bodySlice...)
|
||||
} else {
|
||||
return nil, fmt.Errorf("unable to convert entryBody to MapSlice")
|
||||
}
|
||||
transformed = append(transformed, currentTransformed)
|
||||
}
|
||||
return transformed, nil
|
||||
}
|
||||
|
||||
// ProcessValue recursively looks for MapSlices to rename 'kind' -> 'type'
|
||||
func ProcessValue(v any, isToolset bool) any {
|
||||
switch val := v.(type) {
|
||||
case yaml.MapSlice:
|
||||
// creating a new MapSlice is safer for recursive transformation
|
||||
newVal := make(yaml.MapSlice, len(val))
|
||||
for i, item := range val {
|
||||
// Perform renaming
|
||||
if item.Key == "kind" {
|
||||
item.Key = "type"
|
||||
}
|
||||
// Recursive call for nested values (e.g., nested objects or lists)
|
||||
item.Value = ProcessValue(item.Value, false)
|
||||
newVal[i] = item
|
||||
}
|
||||
return newVal
|
||||
case []any:
|
||||
// Process lists: If it's a toolset top-level list, wrap it.
|
||||
if isToolset {
|
||||
return yaml.MapSlice{{Key: "tools", Value: val}}
|
||||
}
|
||||
// Otherwise, recurse into list items (to catch nested objects)
|
||||
newVal := make([]any, len(val))
|
||||
for i := range val {
|
||||
newVal[i] = ProcessValue(val[i], false)
|
||||
}
|
||||
return newVal
|
||||
default:
|
||||
return val
|
||||
}
|
||||
}
|
||||
|
||||
// mergeToolsFiles merges multiple ToolsFile structs into one.
|
||||
// Detects and raises errors for resource conflicts in sources, authServices, tools, and toolsets.
|
||||
// All resource names (sources, authServices, tools, toolsets) must be unique across all files.
|
||||
func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) {
|
||||
merged := ToolsFile{
|
||||
Sources: make(server.SourceConfigs),
|
||||
AuthServices: make(server.AuthServiceConfigs),
|
||||
EmbeddingModels: make(server.EmbeddingModelConfigs),
|
||||
Tools: make(server.ToolConfigs),
|
||||
Toolsets: make(server.ToolsetConfigs),
|
||||
Prompts: make(server.PromptConfigs),
|
||||
}
|
||||
|
||||
var conflicts []string
|
||||
|
||||
for fileIndex, file := range files {
|
||||
// Check for conflicts and merge sources
|
||||
for name, source := range file.Sources {
|
||||
if _, exists := merged.Sources[name]; exists {
|
||||
conflicts = append(conflicts, fmt.Sprintf("source '%s' (file #%d)", name, fileIndex+1))
|
||||
} else {
|
||||
merged.Sources[name] = source
|
||||
}
|
||||
}
|
||||
|
||||
// Check for conflicts and merge authServices
|
||||
for name, authService := range file.AuthServices {
|
||||
if _, exists := merged.AuthServices[name]; exists {
|
||||
conflicts = append(conflicts, fmt.Sprintf("authService '%s' (file #%d)", name, fileIndex+1))
|
||||
} else {
|
||||
merged.AuthServices[name] = authService
|
||||
}
|
||||
}
|
||||
|
||||
// Check for conflicts and merge embeddingModels
|
||||
for name, em := range file.EmbeddingModels {
|
||||
if _, exists := merged.EmbeddingModels[name]; exists {
|
||||
conflicts = append(conflicts, fmt.Sprintf("embedding model '%s' (file #%d)", name, fileIndex+1))
|
||||
} else {
|
||||
merged.EmbeddingModels[name] = em
|
||||
}
|
||||
}
|
||||
|
||||
// Check for conflicts and merge tools
|
||||
for name, tool := range file.Tools {
|
||||
if _, exists := merged.Tools[name]; exists {
|
||||
conflicts = append(conflicts, fmt.Sprintf("tool '%s' (file #%d)", name, fileIndex+1))
|
||||
} else {
|
||||
merged.Tools[name] = tool
|
||||
}
|
||||
}
|
||||
|
||||
// Check for conflicts and merge toolsets
|
||||
for name, toolset := range file.Toolsets {
|
||||
if _, exists := merged.Toolsets[name]; exists {
|
||||
conflicts = append(conflicts, fmt.Sprintf("toolset '%s' (file #%d)", name, fileIndex+1))
|
||||
} else {
|
||||
merged.Toolsets[name] = toolset
|
||||
}
|
||||
}
|
||||
|
||||
// Check for conflicts and merge prompts
|
||||
for name, prompt := range file.Prompts {
|
||||
if _, exists := merged.Prompts[name]; exists {
|
||||
conflicts = append(conflicts, fmt.Sprintf("prompt '%s' (file #%d)", name, fileIndex+1))
|
||||
} else {
|
||||
merged.Prompts[name] = prompt
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If conflicts were detected, return an error
|
||||
if len(conflicts) > 0 {
|
||||
return ToolsFile{}, fmt.Errorf("resource conflicts detected:\n - %s\n\nPlease ensure each source, authService, tool, toolset and prompt has a unique name across all files", strings.Join(conflicts, "\n - "))
|
||||
}
|
||||
|
||||
return merged, nil
|
||||
}
|
||||
|
||||
// LoadAndMergeToolsFiles loads multiple YAML files and merges them
|
||||
func LoadAndMergeToolsFiles(ctx context.Context, filePaths []string) (ToolsFile, error) {
|
||||
var toolsFiles []ToolsFile
|
||||
|
||||
for _, filePath := range filePaths {
|
||||
buf, err := os.ReadFile(filePath)
|
||||
if err != nil {
|
||||
return ToolsFile{}, fmt.Errorf("unable to read tool file at %q: %w", filePath, err)
|
||||
}
|
||||
|
||||
toolsFile, err := parseToolsFile(ctx, buf)
|
||||
if err != nil {
|
||||
return ToolsFile{}, fmt.Errorf("unable to parse tool file at %q: %w", filePath, err)
|
||||
}
|
||||
|
||||
toolsFiles = append(toolsFiles, toolsFile)
|
||||
}
|
||||
|
||||
mergedFile, err := mergeToolsFiles(toolsFiles...)
|
||||
if err != nil {
|
||||
return ToolsFile{}, fmt.Errorf("unable to merge tools files: %w", err)
|
||||
}
|
||||
|
||||
return mergedFile, nil
|
||||
}
|
||||
|
||||
// LoadAndMergeToolsFolder loads all YAML files from a directory and merges them
|
||||
func LoadAndMergeToolsFolder(ctx context.Context, folderPath string) (ToolsFile, error) {
|
||||
// Check if directory exists
|
||||
info, err := os.Stat(folderPath)
|
||||
if err != nil {
|
||||
return ToolsFile{}, fmt.Errorf("unable to access tools folder at %q: %w", folderPath, err)
|
||||
}
|
||||
if !info.IsDir() {
|
||||
return ToolsFile{}, fmt.Errorf("path %q is not a directory", folderPath)
|
||||
}
|
||||
|
||||
// Find all YAML files in the directory
|
||||
pattern := filepath.Join(folderPath, "*.yaml")
|
||||
yamlFiles, err := filepath.Glob(pattern)
|
||||
if err != nil {
|
||||
return ToolsFile{}, fmt.Errorf("error finding YAML files in %q: %w", folderPath, err)
|
||||
}
|
||||
|
||||
// Also find .yml files
|
||||
ymlPattern := filepath.Join(folderPath, "*.yml")
|
||||
ymlFiles, err := filepath.Glob(ymlPattern)
|
||||
if err != nil {
|
||||
return ToolsFile{}, fmt.Errorf("error finding YML files in %q: %w", folderPath, err)
|
||||
}
|
||||
|
||||
// Combine both file lists
|
||||
allFiles := append(yamlFiles, ymlFiles...)
|
||||
|
||||
if len(allFiles) == 0 {
|
||||
return ToolsFile{}, fmt.Errorf("no YAML files found in directory %q", folderPath)
|
||||
}
|
||||
|
||||
// Use existing LoadAndMergeToolsFiles function
|
||||
return LoadAndMergeToolsFiles(ctx, allFiles)
|
||||
}
|
||||
2141
cmd/internal/tools_file_test.go
Normal file
2141
cmd/internal/tools_file_test.go
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,30 +0,0 @@
|
||||
// Copyright 2024 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
// Option is a function that configures a Command.
|
||||
type Option func(*Command)
|
||||
|
||||
// WithStreams overrides the default writer.
|
||||
func WithStreams(out, err io.Writer) Option {
|
||||
return func(c *Command) {
|
||||
c.outStream = out
|
||||
c.errStream = err
|
||||
}
|
||||
}
|
||||
884
cmd/root.go
884
cmd/root.go
File diff suppressed because it is too large
Load Diff
1747
cmd/root_test.go
1747
cmd/root_test.go
File diff suppressed because it is too large
Load Diff
@@ -1 +1 @@
|
||||
0.26.0
|
||||
0.27.0
|
||||
|
||||
@@ -234,7 +234,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.26.0\" # x-release-please-version\n",
|
||||
"version = \"0.27.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -109,7 +109,7 @@ To install Toolbox as a binary on Linux (AMD64):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.26.0
|
||||
export VERSION=0.27.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -120,7 +120,7 @@ To install Toolbox as a binary on macOS (Apple Silicon):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.26.0
|
||||
export VERSION=0.27.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -131,7 +131,7 @@ To install Toolbox as a binary on macOS (Intel):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.26.0
|
||||
export VERSION=0.27.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -142,7 +142,7 @@ To install Toolbox as a binary on Windows (Command Prompt):
|
||||
|
||||
```cmd
|
||||
:: see releases page for other versions
|
||||
set VERSION=0.26.0
|
||||
set VERSION=0.27.0
|
||||
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
```
|
||||
|
||||
@@ -152,7 +152,7 @@ To install Toolbox as a binary on Windows (PowerShell):
|
||||
|
||||
```powershell
|
||||
# see releases page for other versions
|
||||
$VERSION = "0.26.0"
|
||||
$VERSION = "0.27.0"
|
||||
curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||
```
|
||||
|
||||
@@ -164,7 +164,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.26.0
|
||||
export VERSION=0.27.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -183,7 +183,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.26.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.27.0
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
@@ -115,7 +115,7 @@ pip install google-genai
|
||||
|
||||
1. Update `my_agent/agent.py` with the following content to connect to Toolbox:
|
||||
```py
|
||||
{{< include "quickstart/python/adk/quickstart.py" >}}
|
||||
{{< regionInclude "quickstart/python/adk/quickstart.py" "quickstart" >}}
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
"resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-5.0.2.tgz",
|
||||
"integrity": "sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg==",
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"arrify": "^2.0.0",
|
||||
"extend": "^3.0.2"
|
||||
@@ -31,6 +32,7 @@
|
||||
"resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz",
|
||||
"integrity": "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==",
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=14.0.0"
|
||||
}
|
||||
@@ -40,15 +42,17 @@
|
||||
"resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.0.0.tgz",
|
||||
"integrity": "sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g==",
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
},
|
||||
"node_modules/@google-cloud/storage": {
|
||||
"version": "7.18.0",
|
||||
"resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.18.0.tgz",
|
||||
"integrity": "sha512-r3ZwDMiz4nwW6R922Z1pwpePxyRwE5GdevYX63hRmAQUkUQJcBH/79EnQPDv5cOv1mFBgevdNWQfi3tie3dHrQ==",
|
||||
"version": "7.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.19.0.tgz",
|
||||
"integrity": "sha512-n2FjE7NAOYyshogdc7KQOl/VZb4sneqPjWouSyia9CMDdMhRX5+RIbqalNmC7LOLzuLAN89VlF2HvG8na9G+zQ==",
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@google-cloud/paginator": "^5.0.0",
|
||||
"@google-cloud/projectify": "^4.0.0",
|
||||
@@ -56,7 +60,7 @@
|
||||
"abort-controller": "^3.0.0",
|
||||
"async-retry": "^1.3.3",
|
||||
"duplexify": "^4.1.3",
|
||||
"fast-xml-parser": "^4.4.1",
|
||||
"fast-xml-parser": "^5.3.4",
|
||||
"gaxios": "^6.0.2",
|
||||
"google-auth-library": "^9.6.3",
|
||||
"html-entities": "^2.5.2",
|
||||
@@ -75,6 +79,7 @@
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
|
||||
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"bin": {
|
||||
"uuid": "dist/bin/uuid"
|
||||
}
|
||||
@@ -97,7 +102,6 @@
|
||||
"resolved": "https://registry.npmjs.org/@google/genai/-/genai-1.14.0.tgz",
|
||||
"integrity": "sha512-jirYprAAJU1svjwSDVCzyVq+FrJpJd5CSxR/g2Ga/gZ0ZYZpcWjMS75KJl9y71K1mDN+tcx6s21CzCbB2R840g==",
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"google-auth-library": "^9.14.2",
|
||||
"ws": "^8.18.0"
|
||||
@@ -136,7 +140,6 @@
|
||||
"resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.17.5.tgz",
|
||||
"integrity": "sha512-QakrKIGniGuRVfWBdMsDea/dx1PNE739QJ7gCM41s9q+qaCYTHCdsIBXQVVXry3mfWAiaM9kT22Hyz53Uw8mfg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"ajv": "^6.12.6",
|
||||
"content-type": "^1.0.5",
|
||||
@@ -299,6 +302,7 @@
|
||||
"resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz",
|
||||
"integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
@@ -307,13 +311,15 @@
|
||||
"version": "0.12.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz",
|
||||
"integrity": "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==",
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "24.10.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.1.tgz",
|
||||
"integrity": "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"undici-types": "~7.16.0"
|
||||
}
|
||||
@@ -323,6 +329,7 @@
|
||||
"resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.13.tgz",
|
||||
"integrity": "sha512-FGJ6udDNUCjd19pp0Q3iTiDkwhYup7J8hpMW9c4k53NrccQFFWKRho6hvtPPEhnXWKvukfwAlB6DbDz4yhH5Gg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@types/caseless": "*",
|
||||
"@types/node": "*",
|
||||
@@ -335,6 +342,7 @@
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.5.tgz",
|
||||
"integrity": "sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
@@ -352,6 +360,7 @@
|
||||
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
||||
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
@@ -361,6 +370,7 @@
|
||||
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
||||
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"mime-db": "1.52.0"
|
||||
},
|
||||
@@ -372,13 +382,15 @@
|
||||
"version": "4.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz",
|
||||
"integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==",
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/abort-controller": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
|
||||
"integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"event-target-shim": "^5.0.0"
|
||||
},
|
||||
@@ -453,6 +465,7 @@
|
||||
"resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz",
|
||||
"integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
@@ -462,6 +475,7 @@
|
||||
"resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz",
|
||||
"integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"retry": "0.13.1"
|
||||
}
|
||||
@@ -754,6 +768,7 @@
|
||||
"resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz",
|
||||
"integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"end-of-stream": "^1.4.1",
|
||||
"inherits": "^2.0.3",
|
||||
@@ -802,6 +817,7 @@
|
||||
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz",
|
||||
"integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"once": "^1.4.0"
|
||||
}
|
||||
@@ -871,6 +887,7 @@
|
||||
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
|
||||
"integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
@@ -901,7 +918,6 @@
|
||||
"resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz",
|
||||
"integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"accepts": "^2.0.0",
|
||||
"body-parser": "^2.2.0",
|
||||
@@ -973,9 +989,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/fast-xml-parser": {
|
||||
"version": "4.5.3",
|
||||
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.3.tgz",
|
||||
"integrity": "sha512-RKihhV+SHsIUGXObeVy9AXiBbFwkVk7Syp8XgwN5U3JV416+Gwp/GO9i0JYKmikykgz/UHRrrV4ROuZEo/T0ig==",
|
||||
"version": "5.3.5",
|
||||
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.3.5.tgz",
|
||||
"integrity": "sha512-JeaA2Vm9ffQKp9VjvfzObuMCjUYAp5WDYhRYL5LrBPY/jUDlUtOvDfot0vKSkB9tuX885BDHjtw4fZadD95wnA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
@@ -983,8 +999,9 @@
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"strnum": "^1.1.1"
|
||||
"strnum": "^2.1.2"
|
||||
},
|
||||
"bin": {
|
||||
"fxparser": "src/cli/cli.js"
|
||||
@@ -1333,7 +1350,8 @@
|
||||
"url": "https://patreon.com/mdevils"
|
||||
}
|
||||
],
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/http-errors": {
|
||||
"version": "2.0.0",
|
||||
@@ -1365,6 +1383,7 @@
|
||||
"resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz",
|
||||
"integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@tootallnate/once": "2",
|
||||
"agent-base": "6",
|
||||
@@ -1379,6 +1398,7 @@
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
|
||||
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"debug": "4"
|
||||
},
|
||||
@@ -1555,6 +1575,7 @@
|
||||
"resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz",
|
||||
"integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"bin": {
|
||||
"mime": "cli.js"
|
||||
},
|
||||
@@ -1715,6 +1736,7 @@
|
||||
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
|
||||
"integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"yocto-queue": "^0.1.0"
|
||||
},
|
||||
@@ -1856,6 +1878,7 @@
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
|
||||
"integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"inherits": "^2.0.3",
|
||||
"string_decoder": "^1.1.1",
|
||||
@@ -1870,6 +1893,7 @@
|
||||
"resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz",
|
||||
"integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">= 4"
|
||||
}
|
||||
@@ -1879,6 +1903,7 @@
|
||||
"resolved": "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz",
|
||||
"integrity": "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@types/request": "^2.48.8",
|
||||
"extend": "^3.0.2",
|
||||
@@ -2107,6 +2132,7 @@
|
||||
"resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz",
|
||||
"integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"stubs": "^3.0.0"
|
||||
}
|
||||
@@ -2115,13 +2141,15 @@
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz",
|
||||
"integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==",
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/string_decoder": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
|
||||
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"safe-buffer": "~5.2.0"
|
||||
}
|
||||
@@ -2223,28 +2251,31 @@
|
||||
}
|
||||
},
|
||||
"node_modules/strnum": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/strnum/-/strnum-1.1.2.tgz",
|
||||
"integrity": "sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA==",
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/strnum/-/strnum-2.1.2.tgz",
|
||||
"integrity": "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/NaturalIntelligence"
|
||||
}
|
||||
],
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/stubs": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz",
|
||||
"integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==",
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/teeny-request": {
|
||||
"version": "9.0.0",
|
||||
"resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz",
|
||||
"integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==",
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"http-proxy-agent": "^5.0.0",
|
||||
"https-proxy-agent": "^5.0.0",
|
||||
@@ -2261,6 +2292,7 @@
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
|
||||
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"debug": "4"
|
||||
},
|
||||
@@ -2273,6 +2305,7 @@
|
||||
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz",
|
||||
"integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"agent-base": "6",
|
||||
"debug": "4"
|
||||
@@ -2314,7 +2347,8 @@
|
||||
"version": "7.16.0",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz",
|
||||
"integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==",
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/unpipe": {
|
||||
"version": "1.0.0",
|
||||
@@ -2338,7 +2372,8 @@
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
|
||||
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/uuid": {
|
||||
"version": "9.0.1",
|
||||
@@ -2525,6 +2560,7 @@
|
||||
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
|
||||
"integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
@@ -2537,7 +2573,6 @@
|
||||
"resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
|
||||
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/colinhacks"
|
||||
}
|
||||
|
||||
@@ -3351,13 +3351,13 @@
|
||||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.12.2",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz",
|
||||
"integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==",
|
||||
"version": "1.13.5",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.13.5.tgz",
|
||||
"integrity": "sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.6",
|
||||
"form-data": "^4.0.4",
|
||||
"follow-redirects": "^1.15.11",
|
||||
"form-data": "^4.0.5",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
@@ -4248,9 +4248,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
|
||||
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
|
||||
"version": "4.0.5",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz",
|
||||
"integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
|
||||
@@ -18,7 +18,8 @@
|
||||
"node_modules/@cfworker/json-schema": {
|
||||
"version": "4.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@cfworker/json-schema/-/json-schema-4.1.1.tgz",
|
||||
"integrity": "sha512-gAmrUZSGtKc3AiBL71iNWxDsyUC5uMaKKGdvzYsBoTW/xi42JQHl7eKV2OYzCUqvc+D2RCcf7EXY2iCyFIk6og=="
|
||||
"integrity": "sha512-gAmrUZSGtKc3AiBL71iNWxDsyUC5uMaKKGdvzYsBoTW/xi42JQHl7eKV2OYzCUqvc+D2RCcf7EXY2iCyFIk6og==",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/@google/generative-ai": {
|
||||
"version": "0.24.1",
|
||||
@@ -225,6 +226,7 @@
|
||||
"version": "5.2.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz",
|
||||
"integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
@@ -308,6 +310,7 @@
|
||||
"version": "6.3.0",
|
||||
"resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz",
|
||||
"integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
@@ -420,6 +423,7 @@
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
|
||||
"integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
@@ -821,6 +825,7 @@
|
||||
"version": "1.0.21",
|
||||
"resolved": "https://registry.npmjs.org/js-tiktoken/-/js-tiktoken-1.0.21.tgz",
|
||||
"integrity": "sha512-biOj/6M5qdgx5TKjDnFT1ymSpM5tbd3ylwDtrQvFQSu0Z7bBYko2dF+W/aUkXUPuk6IVpRxk/3Q2sHOzGlS36g==",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"base64-js": "^1.5.1"
|
||||
}
|
||||
@@ -873,9 +878,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/langsmith": {
|
||||
"version": "0.4.3",
|
||||
"resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.4.3.tgz",
|
||||
"integrity": "sha512-vuBAagBZulXj0rpZhUTxmHhrYIBk53z8e2Q8ty4OHVkahN4ul7Im3OZxD9jsXZB0EuncK1xRYtY8J3BW4vj1zw==",
|
||||
"version": "0.5.2",
|
||||
"resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.5.2.tgz",
|
||||
"integrity": "sha512-CfkcQsiajtTWknAcyItvJsKEQdY2VgDpm6U8pRI9wnM07mevnOv5EF+RcqWGwx37SEUxtyi2RXMwnKW8b06JtA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/uuid": "^10.0.0",
|
||||
@@ -969,6 +974,7 @@
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz",
|
||||
"integrity": "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==",
|
||||
"peer": true,
|
||||
"bin": {
|
||||
"mustache": "bin/mustache"
|
||||
}
|
||||
@@ -1407,7 +1413,6 @@
|
||||
"version": "3.25.76",
|
||||
"resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
|
||||
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
|
||||
"peer": true,
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/colinhacks"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,21 @@
|
||||
# [START quickstart]
|
||||
import asyncio
|
||||
|
||||
from google.adk import Agent
|
||||
from google.adk.apps import App
|
||||
from google.adk.runners import InMemoryRunner
|
||||
from google.adk.tools.toolbox_toolset import ToolboxToolset
|
||||
from google.genai.types import Content, Part
|
||||
|
||||
prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
# TODO(developer): update the TOOLBOX_URL to your toolbox endpoint
|
||||
toolset = ToolboxToolset(
|
||||
@@ -8,10 +23,35 @@ toolset = ToolboxToolset(
|
||||
)
|
||||
|
||||
root_agent = Agent(
|
||||
name='root_agent',
|
||||
name='hotel_assistant',
|
||||
model='gemini-2.5-flash',
|
||||
instruction="You are a helpful AI assistant designed to provide accurate and useful information.",
|
||||
instruction=prompt,
|
||||
tools=[toolset],
|
||||
)
|
||||
|
||||
app = App(root_agent=root_agent, name="my_agent")
|
||||
# [END quickstart]
|
||||
|
||||
queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
]
|
||||
|
||||
async def main():
|
||||
runner = InMemoryRunner(app=app)
|
||||
session = await runner.session_service.create_session(
|
||||
app_name=app.name, user_id="test_user"
|
||||
)
|
||||
|
||||
for query in queries:
|
||||
print(f"\nUser: {query}")
|
||||
user_message = Content(parts=[Part.from_text(text=query)])
|
||||
|
||||
async for event in runner.run_async(user_id="test_user", session_id=session.id, new_message=user_message):
|
||||
if event.is_final_response() and event.content and event.content.parts:
|
||||
print(f"Agent: {event.content.parts[0].text}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
|
||||
@@ -41,31 +41,29 @@ def golden_keywords():
|
||||
class TestExecution:
|
||||
"""Test framework execution and output validation."""
|
||||
|
||||
_cached_output = None
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def script_output(self, capsys):
|
||||
"""Run the quickstart function and return its output."""
|
||||
|
||||
# TODO: Add better validation for ADK once we have a way to capture its
|
||||
# output.
|
||||
if ORCH_NAME == "adk":
|
||||
return quickstart.app.root_agent.name
|
||||
else:
|
||||
if TestExecution._cached_output is None:
|
||||
asyncio.run(quickstart.main())
|
||||
|
||||
return capsys.readouterr()
|
||||
out, err = capsys.readouterr()
|
||||
TestExecution._cached_output = (out, err)
|
||||
|
||||
class Output:
|
||||
def __init__(self, out, err):
|
||||
self.out = out
|
||||
self.err = err
|
||||
|
||||
return Output(*TestExecution._cached_output)
|
||||
|
||||
def test_script_runs_without_errors(self, script_output):
|
||||
"""Test that the script runs and produces no stderr."""
|
||||
if ORCH_NAME == "adk":
|
||||
return
|
||||
assert script_output.err == "", f"Script produced stderr: {script_output.err}"
|
||||
|
||||
def test_keywords_in_output(self, script_output, golden_keywords):
|
||||
"""Test that expected keywords are present in the script's output."""
|
||||
|
||||
if ORCH_NAME == "adk":
|
||||
assert script_output == "root_agent"
|
||||
return
|
||||
output = script_output.out
|
||||
missing_keywords = [kw for kw in golden_keywords if kw not in output]
|
||||
assert not missing_keywords, f"Missing keywords in output: {missing_keywords}"
|
||||
|
||||
@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -100,19 +100,19 @@ After you install Looker in the MCP Store, resources and tools from the server a
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -45,19 +45,19 @@ instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -43,19 +43,19 @@ expose your developer assistant tools to a MySQL instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -44,19 +44,19 @@ expose your developer assistant tools to a Neo4j instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
335
docs/en/how-to/connect-ide/oracle_mcp.md
Normal file
335
docs/en/how-to/connect-ide/oracle_mcp.md
Normal file
@@ -0,0 +1,335 @@
|
||||
---
|
||||
title: "Oracle using MCP"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
Connect your IDE to Oracle using Toolbox.
|
||||
---
|
||||
|
||||
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is
|
||||
an open protocol for connecting Large Language Models (LLMs) to data sources
|
||||
like Oracle. This guide covers how to use [MCP Toolbox for Databases][toolbox]
|
||||
to expose your developer assistant tools to an Oracle instance:
|
||||
|
||||
* [Cursor][cursor]
|
||||
* [Windsurf][windsurf] (Codium)
|
||||
* [Visual Studio Code][vscode] (Copilot)
|
||||
* [Cline][cline] (VS Code extension)
|
||||
* [Claude desktop][claudedesktop]
|
||||
* [Claude code][claudecode]
|
||||
* [Gemini CLI][geminicli]
|
||||
* [Gemini Code Assist][geminicodeassist]
|
||||
|
||||
[toolbox]: https://github.com/googleapis/genai-toolbox
|
||||
[cursor]: #configure-your-mcp-client
|
||||
[windsurf]: #configure-your-mcp-client
|
||||
[vscode]: #configure-your-mcp-client
|
||||
[cline]: #configure-your-mcp-client
|
||||
[claudedesktop]: #configure-your-mcp-client
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
[geminicli]: #configure-your-mcp-client
|
||||
[geminicodeassist]: #configure-your-mcp-client
|
||||
|
||||
## Set up the database
|
||||
|
||||
1. Create or select an Oracle instance.
|
||||
|
||||
1. Create or reuse a database user and have the username and password ready.
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct
|
||||
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
|
||||
to your OS and CPU architecture. You are required to use Toolbox version
|
||||
V0.26.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Verify the installation:
|
||||
|
||||
```bash
|
||||
./toolbox --version
|
||||
```
|
||||
|
||||
## Configure your MCP Client
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Claude code" lang="en" %}}
|
||||
|
||||
1. Install [Claude
|
||||
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
1. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"oracle": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","oracledb","--stdio"],
|
||||
"env": {
|
||||
"ORACLE_HOST": "",
|
||||
"ORACLE_PORT": "1521",
|
||||
"ORACLE_SERVICE": "",
|
||||
"ORACLE_USER": "",
|
||||
"ORACLE_PASSWORD": "",
|
||||
"ORACLE_WALLET_LOCATION": "",
|
||||
"ORACLE_USE_OCI": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude code to apply the new configuration.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Claude desktop" lang="en" %}}
|
||||
|
||||
1. Open Claude desktop and navigate to Settings.
|
||||
1. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"oracle": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","oracledb","--stdio"],
|
||||
"env": {
|
||||
"ORACLE_HOST": "",
|
||||
"ORACLE_PORT": "1521",
|
||||
"ORACLE_SERVICE": "",
|
||||
"ORACLE_USER": "",
|
||||
"ORACLE_PASSWORD": "",
|
||||
"ORACLE_WALLET_LOCATION": "",
|
||||
"ORACLE_USE_OCI": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude desktop.
|
||||
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
|
||||
new MCP server available.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Cline" lang="en" %}}
|
||||
|
||||
1. Open the Cline extension in VS Code and tap
|
||||
the **MCP Servers** icon.
|
||||
1. Tap Configure MCP Servers to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"oracle": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","oracledb","--stdio"],
|
||||
"env": {
|
||||
"ORACLE_HOST": "",
|
||||
"ORACLE_PORT": "1521",
|
||||
"ORACLE_SERVICE": "",
|
||||
"ORACLE_USER": "",
|
||||
"ORACLE_PASSWORD": "",
|
||||
"ORACLE_WALLET_LOCATION": "",
|
||||
"ORACLE_USE_OCI": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. You should see a green active status after the server is successfully
|
||||
connected.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Cursor" lang="en" %}}
|
||||
|
||||
1. Create a `.cursor` directory in your project root if it doesn't exist.
|
||||
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"oracle": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","oracledb","--stdio"],
|
||||
"env": {
|
||||
"ORACLE_HOST": "",
|
||||
"ORACLE_PORT": "1521",
|
||||
"ORACLE_SERVICE": "",
|
||||
"ORACLE_USER": "",
|
||||
"ORACLE_PASSWORD": "",
|
||||
"ORACLE_WALLET_LOCATION": "",
|
||||
"ORACLE_USE_OCI": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Cursor and navigate to **Settings > Cursor
|
||||
Settings > MCP**. You should see a green active status after the server is
|
||||
successfully connected.
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
|
||||
|
||||
1. Open VS Code and
|
||||
create a `.vscode` directory in your project root if it doesn't exist.
|
||||
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"servers": {
|
||||
"oracle": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","oracle","--stdio"],
|
||||
"env": {
|
||||
"ORACLE_HOST": "",
|
||||
"ORACLE_PORT": "1521",
|
||||
"ORACLE_SERVICE": "",
|
||||
"ORACLE_USER": "",
|
||||
"ORACLE_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
1. Open Windsurf and navigate to the
|
||||
Cascade assistant.
|
||||
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your
|
||||
values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"oracle": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","oracledb","--stdio"],
|
||||
"env": {
|
||||
"ORACLE_HOST": "",
|
||||
"ORACLE_PORT": "1521",
|
||||
"ORACLE_SERVICE": "",
|
||||
"ORACLE_USER": "",
|
||||
"ORACLE_PASSWORD": "",
|
||||
"ORACLE_WALLET": "",
|
||||
"ORACLE_WALLET_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Gemini CLI" lang="en" %}}
|
||||
|
||||
1. Install the Gemini CLI.
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"oracle": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","oracledb","--stdio"],
|
||||
"env": {
|
||||
"ORACLE_HOST": "",
|
||||
"ORACLE_PORT": "1521",
|
||||
"ORACLE_SERVICE": "",
|
||||
"ORACLE_USER": "",
|
||||
"ORACLE_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
|
||||
1. Install the Gemini Code Assist extension in Visual Studio Code.
|
||||
1. Enable Agent Mode in Gemini Code Assist chat.
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"oracle": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","oracledb","--stdio"],
|
||||
"env": {
|
||||
"ORACLE_HOST": "",
|
||||
"ORACLE_PORT": "1521",
|
||||
"ORACLE_SERVICE": "",
|
||||
"ORACLE_USER": "",
|
||||
"ORACLE_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Use Tools
|
||||
|
||||
Your AI tool is now connected to Oracle using MCP. Try asking your AI
|
||||
assistant to list tables, create a table, or define and execute other SQL
|
||||
statements.
|
||||
|
||||
The following tools are available to the LLM:
|
||||
|
||||
1. **list_tables**: lists tables and descriptions
|
||||
1. **execute_sql**: execute any SQL statement
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
will adapt to the tools available, so this shouldn't affect most users.
|
||||
{{< /notice >}}
|
||||
@@ -32,7 +32,7 @@ to expose your developer assistant tools to a Postgres instance:
|
||||
|
||||
{{< notice tip >}}
|
||||
This guide can be used with [AlloyDB
|
||||
Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
Omni](https://cloud.google.com/alloydb/omni/docs/overview).
|
||||
{{< /notice >}}
|
||||
|
||||
## Set up the database
|
||||
@@ -40,10 +40,10 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
1. Create or select a PostgreSQL instance.
|
||||
|
||||
* [Install PostgreSQL locally](https://www.postgresql.org/download/)
|
||||
* [Install AlloyDB Omni](https://cloud.google.com/alloydb/omni/current/docs/quickstart)
|
||||
* [Install AlloyDB Omni](https://cloud.google.com/alloydb/omni/docs/quickstart)
|
||||
|
||||
1. Create or reuse [a database
|
||||
user](https://cloud.google.com/alloydb/omni/current/docs/database-users/manage-users)
|
||||
user](https://docs.cloud.google.com/alloydb/omni/containers/current/docs/database-users/manage-users)
|
||||
and have the username and password ready.
|
||||
|
||||
## Install MCP Toolbox
|
||||
@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -43,19 +43,19 @@ to expose your developer assistant tools to a SQLite instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -83,15 +83,12 @@ Toolbox instead of the local address.
|
||||
|
||||
2. Open your agent file (`my_agent/agent.py`).
|
||||
|
||||
3. Update the `ToolboxSyncClient` initialization to use your Cloud Run URL.
|
||||
3. Update the `ToolboxToolset` initialization to point to your Cloud Run service URL. Replace the existing initialization code with the following:
|
||||
|
||||
{{% alert color="info" %}}
|
||||
Since Cloud Run services are secured by default, you also need to provide an
|
||||
authentication token.
|
||||
{{% alert color="info" title="Note" %}}
|
||||
Since Cloud Run services are secured by default, you also need to provide a workload identity.
|
||||
{{% /alert %}}
|
||||
|
||||
Replace your existing client initialization code with the following:
|
||||
|
||||
```python
|
||||
from google.adk import Agent
|
||||
from google.adk.apps import App
|
||||
@@ -132,14 +129,14 @@ app = App(root_agent=root_agent, name="my_agent")
|
||||
Run the deployment command:
|
||||
|
||||
```bash
|
||||
make backend
|
||||
make deploy
|
||||
```
|
||||
|
||||
This command will build your agent's container image and deploy it to Vertex AI.
|
||||
|
||||
## Step 6: Test your Deployment
|
||||
|
||||
Once the deployment command (`make backend`) completes, it will output the URL
|
||||
Once the deployment command (`make deploy`) completes, it will output the URL
|
||||
for the Agent Engine Playground. You can click on this URL to open the
|
||||
Playground in your browser and start chatting with your agent to test the tools.
|
||||
|
||||
|
||||
112
docs/en/how-to/generate_skill.md
Normal file
112
docs/en/how-to/generate_skill.md
Normal file
@@ -0,0 +1,112 @@
|
||||
---
|
||||
title: "Generate Agent Skills"
|
||||
type: docs
|
||||
weight: 10
|
||||
description: >
|
||||
How to generate agent skills from a toolset.
|
||||
---
|
||||
|
||||
The `skills-generate` command allows you to convert a **toolset** into an **Agent Skill**. A toolset is a collection of tools, and the generated skill will contain metadata and execution scripts for all tools within that toolset, complying with the [Agent Skill specification](https://agentskills.io/specification).
|
||||
|
||||
## Before you begin
|
||||
|
||||
1. Make sure you have the `toolbox` executable in your PATH.
|
||||
2. Make sure you have [Node.js](https://nodejs.org/) installed on your system.
|
||||
|
||||
## Generating a Skill from a Toolset
|
||||
|
||||
A skill package consists of a `SKILL.md` file (with required YAML frontmatter) and a set of Node.js scripts. Each tool defined in your toolset maps to a corresponding script in the generated Node.js scripts (`.js`) that work across different platforms (Linux, macOS, Windows).
|
||||
|
||||
|
||||
### Command Usage
|
||||
|
||||
The basic syntax for the command is:
|
||||
|
||||
```bash
|
||||
toolbox <tool-source> skills-generate \
|
||||
--name <skill-name> \
|
||||
--toolset <toolset-name> \
|
||||
--description <description> \
|
||||
--output-dir <output-directory>
|
||||
```
|
||||
|
||||
- `<tool-source>`: Can be `--tools-file`, `--tools-files`, `--tools-folder`, and `--prebuilt`. See the [CLI Reference](../reference/cli.md) for details.
|
||||
- `--name`: Name of the generated skill.
|
||||
- `--description`: Description of the generated skill.
|
||||
- `--toolset`: (Optional) Name of the toolset to convert into a skill. If not provided, all tools will be included.
|
||||
- `--output-dir`: (Optional) Directory to output generated skills (default: "skills").
|
||||
|
||||
{{< notice note >}}
|
||||
**Note:** The `<skill-name>` must follow the Agent Skill [naming convention](https://agentskills.io/specification): it must contain only lowercase alphanumeric characters and hyphens, cannot start or end with a hyphen, and cannot contain consecutive hyphens (e.g., `my-skill`, `data-processing`).
|
||||
{{< /notice >}}
|
||||
|
||||
### Example: Custom Tools File
|
||||
|
||||
1. Create a `tools.yaml` file with a toolset and some tools:
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
tool_a:
|
||||
description: "First tool"
|
||||
run:
|
||||
command: "echo 'Tool A'"
|
||||
tool_b:
|
||||
description: "Second tool"
|
||||
run:
|
||||
command: "echo 'Tool B'"
|
||||
toolsets:
|
||||
my_toolset:
|
||||
tools:
|
||||
- tool_a
|
||||
- tool_b
|
||||
```
|
||||
|
||||
2. Generate the skill:
|
||||
|
||||
```bash
|
||||
toolbox --tools-file tools.yaml skills-generate \
|
||||
--name "my-skill" \
|
||||
--toolset "my_toolset" \
|
||||
--description "A skill containing multiple tools" \
|
||||
--output-dir "generated-skills"
|
||||
```
|
||||
|
||||
3. The generated skill directory structure:
|
||||
|
||||
```text
|
||||
generated-skills/
|
||||
└── my-skill/
|
||||
├── SKILL.md
|
||||
├── assets/
|
||||
│ ├── tool_a.yaml
|
||||
│ └── tool_b.yaml
|
||||
└── scripts/
|
||||
├── tool_a.js
|
||||
└── tool_b.js
|
||||
```
|
||||
|
||||
In this example, the skill contains two Node.js scripts (`tool_a.js` and `tool_b.js`), each mapping to a tool in the original toolset.
|
||||
|
||||
### Example: Prebuilt Configuration
|
||||
|
||||
You can also generate skills from prebuilt toolsets:
|
||||
|
||||
```bash
|
||||
toolbox --prebuilt alloydb-postgres-admin skills-generate \
|
||||
--name "alloydb-postgres-admin" \
|
||||
--description "skill for performing administrative operations on alloydb"
|
||||
```
|
||||
|
||||
## Installing the Generated Skill in Gemini CLI
|
||||
|
||||
Once you have generated a skill, you can install it into the Gemini CLI using the `gemini skills install` command.
|
||||
|
||||
### Installation Command
|
||||
|
||||
Provide the path to the directory containing the generated skill:
|
||||
|
||||
```bash
|
||||
gemini skills install /path/to/generated-skills/my-skill
|
||||
```
|
||||
|
||||
Alternatively, use ~/.gemini/skills as the `--output-dir` to generate the skill straight to the Gemini CLI.
|
||||
@@ -20,14 +20,15 @@ The `invoke` command allows you to invoke tools defined in your configuration di
|
||||
1. Make sure you have the `toolbox` binary installed or built.
|
||||
2. Make sure you have a valid tool configuration file (e.g., `tools.yaml`).
|
||||
|
||||
## Basic Usage
|
||||
### Command Usage
|
||||
|
||||
The basic syntax for the command is:
|
||||
|
||||
```bash
|
||||
toolbox [--tools-file <path> | --prebuilt <name>] invoke <tool-name> [params]
|
||||
toolbox <tool-source> invoke <tool-name> [params]
|
||||
```
|
||||
|
||||
- `<tool-source>`: Can be `--tools-file`, `--tools-files`, `--tools-folder`, and `--prebuilt`. See the [CLI Reference](../reference/cli.md) for details.
|
||||
- `<tool-name>`: The name of the tool you want to call. This must match the name defined in your `tools.yaml`.
|
||||
- `[params]`: (Optional) A JSON string representing the arguments for the tool.
|
||||
|
||||
|
||||
@@ -32,7 +32,8 @@ description: >
|
||||
|
||||
## Sub Commands
|
||||
|
||||
### `invoke`
|
||||
<details>
|
||||
<summary><code>invoke</code></summary>
|
||||
|
||||
Executes a tool directly with the provided parameters. This is useful for testing tool configurations and parameters without needing a full client setup.
|
||||
|
||||
@@ -42,8 +43,36 @@ Executes a tool directly with the provided parameters. This is useful for testin
|
||||
toolbox invoke <tool-name> [params]
|
||||
```
|
||||
|
||||
- `<tool-name>`: The name of the tool to execute (as defined in your configuration).
|
||||
- `[params]`: (Optional) A JSON string containing the parameters for the tool.
|
||||
**Arguments:**
|
||||
|
||||
- `tool-name`: The name of the tool to execute (as defined in your configuration).
|
||||
- `params`: (Optional) A JSON string containing the parameters for the tool.
|
||||
|
||||
For more detailed instructions, see [Invoke Tools via CLI](../how-to/invoke_tool.md).
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><code>skills-generate</code></summary>
|
||||
|
||||
Generates a skill package from a specified toolset. Each tool in the toolset will have a corresponding Node.js execution script in the generated skill.
|
||||
|
||||
**Syntax:**
|
||||
|
||||
```bash
|
||||
toolbox skills-generate --name <name> --description <description> --toolset <toolset> --output-dir <output>
|
||||
```
|
||||
|
||||
**Flags:**
|
||||
|
||||
- `--name`: Name of the generated skill.
|
||||
- `--description`: Description of the generated skill.
|
||||
- `--toolset`: (Optional) Name of the toolset to convert into a skill. If not provided, all tools will be included.
|
||||
- `--output-dir`: (Optional) Directory to output generated skills (default: "skills").
|
||||
|
||||
For more detailed instructions, see [Generate Agent Skills](../how-to/generate_skill.md).
|
||||
|
||||
</details>
|
||||
|
||||
## Examples
|
||||
|
||||
|
||||
@@ -488,6 +488,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `create_project_file`: Create a new LookML file.
|
||||
* `update_project_file`: Update an existing LookML file.
|
||||
* `delete_project_file`: Delete a LookML file.
|
||||
* `validate_project`: Check the syntax of a LookML project.
|
||||
* `get_connections`: Get the available connections in a Looker instance.
|
||||
* `get_connection_schemas`: Get the available schemas in a connection.
|
||||
* `get_connection_databases`: Get the available databases in a connection.
|
||||
@@ -691,6 +692,34 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `execute_cypher`: Executes a Cypher query.
|
||||
* `get_schema`: Retrieves the schema of the Neo4j database.
|
||||
|
||||
|
||||
## Oracle
|
||||
|
||||
* `--prebuilt` value: `oracle`
|
||||
* **Environment Variables:**
|
||||
* `ORACLE_HOST`: The hostname or IP address of the Oracle server.
|
||||
* `ORACLE_PORT`: The port number for the Oracle server (Default: 1521).
|
||||
* `ORACLE_CONNECTION_STRING`: The
|
||||
* `ORACLE_USER`: The username for the Oracle DB instance.
|
||||
* `ORACLE_PASSWORD`: The password for the Oracle DB instance
|
||||
* `ORACLE_WALLET`: The path to Oracle DB Wallet file for the databases that support this authentication type
|
||||
* `ORACLE_USE_OCI`: true or false, The flag if the Oracle Database is deployed in cloud deployment. Default is false.
|
||||
* **Permissions:**
|
||||
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
|
||||
* **Tools:**
|
||||
* `execute_sql`: Executes a SQL query.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
* `list_active_sessions`: Lists active database sessions.
|
||||
* `get_query_plan`: Gets the execution plan for a SQL statement.
|
||||
* `list_top_sql_by_resource`: Lists top SQL statements by resource usage.
|
||||
* `list_tablespace_usage`: Lists tablespace usage.
|
||||
* `list_invalid_objects`: Lists invalid objects.
|
||||
* `list_active_sessions`: Lists active database sessions.
|
||||
* `get_query_plan`: Gets the execution plan for a SQL statement.
|
||||
* `list_top_sql_by_resource`: Lists top SQL statements by resource usage.
|
||||
* `list_tablespace_usage`: Lists tablespace usage.
|
||||
* `list_invalid_objects`: Lists invalid objects.
|
||||
|
||||
## Google Cloud Healthcare API
|
||||
* `--prebuilt` value: `cloud-healthcare`
|
||||
* **Environment Variables:**
|
||||
|
||||
@@ -194,6 +194,15 @@ Use environment variable replacement with the format ${ENV_NAME}
|
||||
instead of hardcoding your secrets into the configuration file.
|
||||
{{< /notice >}}
|
||||
|
||||
### Managed Connection Pooling
|
||||
|
||||
Toolbox automatically supports [Managed Connection Pooling][alloydb-mcp]. If your AlloyDB instance has Managed Connection Pooling enabled, the connection will immediately benefit from increased throughput and reduced latency.
|
||||
|
||||
The interface is identical, so there's no additional configuration required on the client. For more information on configuring your instance, see the [AlloyDB Managed Connection Pooling documentation][alloydb-mcp-docs].
|
||||
|
||||
[alloydb-mcp]: https://cloud.google.com/blog/products/databases/alloydb-managed-connection-pooling
|
||||
[alloydb-mcp-docs]: https://cloud.google.com/alloydb/docs/configure-managed-connection-pooling
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|
||||
@@ -195,6 +195,15 @@ Use environment variable replacement with the format ${ENV_NAME}
|
||||
instead of hardcoding your secrets into the configuration file.
|
||||
{{< /notice >}}
|
||||
|
||||
### Managed Connection Pooling
|
||||
|
||||
Toolbox automatically supports [Managed Connection Pooling][csql-mcp]. If your Cloud SQL for PostgreSQL instance has Managed Connection Pooling enabled, the connection will immediately benefit from increased throughput and reduced latency.
|
||||
|
||||
The interface is identical, so there's no additional configuration required on the client. For more information on configuring your instance, see the [Cloud SQL Managed Connection Pooling documentation][csql-mcp-docs].
|
||||
|
||||
[csql-mcp]: https://docs.cloud.google.com/sql/docs/postgres/managed-connection-pooling
|
||||
[csql-mcp-docs]: https://docs.cloud.google.com/sql/docs/postgres/configure-mcp
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|
||||
242
docs/en/resources/sources/cockroachdb.md
Normal file
242
docs/en/resources/sources/cockroachdb.md
Normal file
@@ -0,0 +1,242 @@
|
||||
---
|
||||
title: "CockroachDB"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
CockroachDB is a distributed SQL database built for cloud applications.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
[CockroachDB][crdb-docs] is a distributed SQL database designed for cloud-native applications. It provides strong consistency, horizontal scalability, and built-in resilience with automatic failover and recovery. CockroachDB uses the PostgreSQL wire protocol, making it compatible with many PostgreSQL tools and drivers while providing unique features like multi-region deployments and distributed transactions.
|
||||
|
||||
**Minimum Version:** CockroachDB v25.1 or later is recommended for full tool compatibility.
|
||||
|
||||
[crdb-docs]: https://www.cockroachlabs.com/docs/
|
||||
|
||||
## Available Tools
|
||||
|
||||
- [`cockroachdb-sql`](../tools/cockroachdb/cockroachdb-sql.md)
|
||||
Execute SQL queries as prepared statements in CockroachDB (alias for execute-sql).
|
||||
|
||||
- [`cockroachdb-execute-sql`](../tools/cockroachdb/cockroachdb-execute-sql.md)
|
||||
Run parameterized SQL statements in CockroachDB.
|
||||
|
||||
- [`cockroachdb-list-schemas`](../tools/cockroachdb/cockroachdb-list-schemas.md)
|
||||
List schemas in a CockroachDB database.
|
||||
|
||||
- [`cockroachdb-list-tables`](../tools/cockroachdb/cockroachdb-list-tables.md)
|
||||
List tables in a CockroachDB database.
|
||||
|
||||
## Requirements
|
||||
|
||||
### Database User
|
||||
|
||||
This source uses standard authentication. You will need to [create a CockroachDB user][crdb-users] to login to the database with. For CockroachDB Cloud deployments, SSL/TLS is required.
|
||||
|
||||
[crdb-users]: https://www.cockroachlabs.com/docs/stable/create-user.html
|
||||
|
||||
### SSL/TLS Configuration
|
||||
|
||||
CockroachDB Cloud clusters require SSL/TLS connections. Use the `queryParams` section to configure SSL settings:
|
||||
|
||||
- **For CockroachDB Cloud**: Use `sslmode: require` at minimum
|
||||
- **For self-hosted with certificates**: Use `sslmode: verify-full` with certificate paths
|
||||
- **For local development only**: Use `sslmode: disable` (not recommended for production)
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my_cockroachdb:
|
||||
type: cockroachdb
|
||||
host: your-cluster.cockroachlabs.cloud
|
||||
port: "26257"
|
||||
user: myuser
|
||||
password: mypassword
|
||||
database: defaultdb
|
||||
maxRetries: 5
|
||||
retryBaseDelay: 500ms
|
||||
queryParams:
|
||||
sslmode: require
|
||||
application_name: my-app
|
||||
|
||||
# MCP Security Settings (recommended for production)
|
||||
readOnlyMode: true # Read-only by default (MCP best practice)
|
||||
enableWriteMode: false # Set to true to allow write operations
|
||||
maxRowLimit: 1000 # Limit query results
|
||||
queryTimeoutSec: 30 # Prevent long-running queries
|
||||
enableTelemetry: true # Enable observability
|
||||
telemetryVerbose: false # Set true for detailed logs
|
||||
clusterID: "my-cluster" # Optional identifier
|
||||
|
||||
tools:
|
||||
list_expenses:
|
||||
type: cockroachdb-sql
|
||||
source: my_cockroachdb
|
||||
description: List all expenses
|
||||
statement: SELECT id, description, amount, category FROM expenses WHERE user_id = $1
|
||||
parameters:
|
||||
- name: user_id
|
||||
type: string
|
||||
description: The user's ID
|
||||
|
||||
describe_expenses:
|
||||
type: cockroachdb-describe-table
|
||||
source: my_cockroachdb
|
||||
description: Describe the expenses table schema
|
||||
|
||||
list_expenses_indexes:
|
||||
type: cockroachdb-list-indexes
|
||||
source: my_cockroachdb
|
||||
description: List indexes on the expenses table
|
||||
```
|
||||
|
||||
## Configuration Parameters
|
||||
|
||||
### Required Parameters
|
||||
|
||||
| Parameter | Type | Description |
|
||||
|-----------|------|-------------|
|
||||
| `type` | string | Must be `cockroachdb` |
|
||||
| `host` | string | The hostname or IP address of the CockroachDB cluster |
|
||||
| `port` | string | The port number (typically "26257") |
|
||||
| `user` | string | The database user name |
|
||||
| `database` | string | The database name to connect to |
|
||||
|
||||
### Optional Parameters
|
||||
|
||||
| Parameter | Type | Default | Description |
|
||||
|-----------|------|---------|-------------|
|
||||
| `password` | string | "" | The database password (can be empty for certificate-based auth) |
|
||||
| `maxRetries` | integer | 5 | Maximum number of connection retry attempts |
|
||||
| `retryBaseDelay` | string | "500ms" | Base delay between retry attempts (exponential backoff) |
|
||||
| `queryParams` | map | {} | Additional connection parameters (e.g., SSL configuration) |
|
||||
|
||||
### MCP Security Parameters
|
||||
|
||||
CockroachDB integration includes security features following the [Model Context Protocol (MCP)](https://modelcontextprotocol.io/) specification:
|
||||
|
||||
| Parameter | Type | Default | Description |
|
||||
|-----------|------|---------|-------------|
|
||||
| `readOnlyMode` | boolean | true | Enables read-only mode by default (MCP requirement) |
|
||||
| `enableWriteMode` | boolean | false | Explicitly enable write operations (INSERT/UPDATE/DELETE/CREATE/DROP) |
|
||||
| `maxRowLimit` | integer | 1000 | Maximum rows returned per SELECT query (auto-adds LIMIT clause) |
|
||||
| `queryTimeoutSec` | integer | 30 | Query timeout in seconds to prevent long-running queries |
|
||||
| `enableTelemetry` | boolean | true | Enable structured logging of tool invocations |
|
||||
| `telemetryVerbose` | boolean | false | Enable detailed JSON telemetry output |
|
||||
| `clusterID` | string | "" | Optional cluster identifier for telemetry |
|
||||
|
||||
### Query Parameters
|
||||
|
||||
Common query parameters for CockroachDB connections:
|
||||
|
||||
| Parameter | Values | Description |
|
||||
|-----------|--------|-------------|
|
||||
| `sslmode` | `disable`, `require`, `verify-ca`, `verify-full` | SSL/TLS mode (CockroachDB Cloud requires `require` or higher) |
|
||||
| `sslrootcert` | file path | Path to root certificate for SSL verification |
|
||||
| `sslcert` | file path | Path to client certificate |
|
||||
| `sslkey` | file path | Path to client key |
|
||||
| `application_name` | string | Application name for connection tracking |
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Security and MCP Compliance
|
||||
|
||||
**Read-Only by Default**: The integration follows MCP best practices by defaulting to read-only mode. This prevents accidental data modifications:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my_cockroachdb:
|
||||
readOnlyMode: true # Default behavior
|
||||
enableWriteMode: false # Explicit write opt-in required
|
||||
```
|
||||
|
||||
To enable write operations:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my_cockroachdb:
|
||||
readOnlyMode: false # Disable read-only protection
|
||||
enableWriteMode: true # Explicitly allow writes
|
||||
```
|
||||
|
||||
**Query Limits**: Automatic row limits prevent excessive data retrieval:
|
||||
- SELECT queries automatically get `LIMIT 1000` appended (configurable via `maxRowLimit`)
|
||||
- Queries are terminated after 30 seconds (configurable via `queryTimeoutSec`)
|
||||
|
||||
**Observability**: Structured telemetry provides visibility into tool usage:
|
||||
- Tool invocations are logged with status, latency, and row counts
|
||||
- SQL queries are redacted to protect sensitive values
|
||||
- Set `telemetryVerbose: true` for detailed JSON logs
|
||||
|
||||
### Use UUID Primary Keys
|
||||
|
||||
CockroachDB performs best with UUID primary keys rather than sequential integers to avoid transaction hotspots:
|
||||
|
||||
```sql
|
||||
CREATE TABLE expenses (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
description TEXT,
|
||||
amount DECIMAL(10,2)
|
||||
);
|
||||
```
|
||||
|
||||
### Automatic Transaction Retry
|
||||
|
||||
This source uses the official `cockroach-go/v2` library which provides automatic transaction retry for serialization conflicts. For write operations requiring explicit transaction control, tools can use the `ExecuteTxWithRetry` method.
|
||||
|
||||
### Multi-Region Deployments
|
||||
|
||||
CockroachDB supports multi-region deployments with automatic data distribution. Configure your cluster's regions and survival goals separately from the Toolbox configuration. The source will connect to any node in the cluster.
|
||||
|
||||
### Connection Pooling
|
||||
|
||||
The source maintains a connection pool to the CockroachDB cluster. The pool automatically handles:
|
||||
- Load balancing across cluster nodes
|
||||
- Connection retry with exponential backoff
|
||||
- Health checking of connections
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### SSL/TLS Errors
|
||||
|
||||
If you encounter "server requires encryption" errors:
|
||||
|
||||
1. For CockroachDB Cloud, ensure `sslmode` is set to `require` or higher:
|
||||
```yaml
|
||||
queryParams:
|
||||
sslmode: require
|
||||
```
|
||||
|
||||
2. For certificate verification, download your cluster's root certificate and configure:
|
||||
```yaml
|
||||
queryParams:
|
||||
sslmode: verify-full
|
||||
sslrootcert: /path/to/ca.crt
|
||||
```
|
||||
|
||||
### Connection Timeouts
|
||||
|
||||
If experiencing connection timeouts:
|
||||
|
||||
1. Check network connectivity to the CockroachDB cluster
|
||||
2. Verify firewall rules allow connections on port 26257
|
||||
3. For CockroachDB Cloud, ensure IP allowlisting is configured
|
||||
4. Increase `maxRetries` or `retryBaseDelay` if needed
|
||||
|
||||
### Transaction Retry Errors
|
||||
|
||||
CockroachDB may encounter serializable transaction conflicts. The integration automatically handles these retries using the cockroach-go library. If you see retry-related errors, check:
|
||||
|
||||
1. Database load and contention
|
||||
2. Query patterns that might cause conflicts
|
||||
3. Consider using `SELECT FOR UPDATE` for explicit locking
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [CockroachDB Documentation](https://www.cockroachlabs.com/docs/)
|
||||
- [CockroachDB Best Practices](https://www.cockroachlabs.com/docs/stable/performance-best-practices-overview.html)
|
||||
- [Multi-Region Capabilities](https://www.cockroachlabs.com/docs/stable/multiregion-overview.html)
|
||||
- [Connection Parameters](https://www.cockroachlabs.com/docs/stable/connection-parameters.html)
|
||||
@@ -87,28 +87,41 @@ using a TNS (Transparent Network Substrate) alias.
|
||||
|
||||
## Examples
|
||||
|
||||
This example demonstrates the four connection methods you could choose from:
|
||||
### 1. Basic Connection (Host, Port, and Service Name)
|
||||
|
||||
```yaml
|
||||
kind: sources
|
||||
name: my-oracle-source
|
||||
type: oracle
|
||||
sources:
|
||||
my-oracle-source:
|
||||
kind: oracle
|
||||
host: 127.0.0.1
|
||||
port: 1521
|
||||
serviceName: XEPDB1
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
```
|
||||
|
||||
# --- Choose one connection method ---
|
||||
# 1. Host, Port, and Service Name
|
||||
host: 127.0.0.1
|
||||
port: 1521
|
||||
serviceName: XEPDB1
|
||||
### 2. Direct Connection String
|
||||
|
||||
# 2. Direct Connection String
|
||||
connectionString: "127.0.0.1:1521/XEPDB1"
|
||||
```yaml
|
||||
sources:
|
||||
my-oracle-source:
|
||||
kind: oracle
|
||||
connectionString: "127.0.0.1:1521/XEPDB1"
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
```
|
||||
|
||||
# 3. TNS Alias (requires tnsnames.ora)
|
||||
tnsAlias: "MY_DB_ALIAS"
|
||||
tnsAdmin: "/opt/oracle/network/admin" # Optional: overrides TNS_ADMIN env var
|
||||
### 3. TNS Alias (requires tnsnames.ora)
|
||||
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
```yaml
|
||||
sources:
|
||||
my-oracle-source:
|
||||
kind: oracle
|
||||
tnsAlias: "MY_DB_ALIAS"
|
||||
tnsAdmin: "/opt/oracle/network/admin" # Optional: overrides TNS_ADMIN env var
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
useOCI: true # tnsAlias requires useOCI to be true
|
||||
|
||||
# Optional: Set to true to use the OCI-based driver for advanced features (Requires Oracle Instant Client)
|
||||
```
|
||||
@@ -168,3 +181,4 @@ instead of hardcoding your secrets into the configuration file.
|
||||
| tnsAlias | string | false | A TNS alias from a `tnsnames.ora` file. Use as an alternative to `host`/`port` or `connectionString`. |
|
||||
| tnsAdmin | string | false | Path to the directory containing the `tnsnames.ora` file. This overrides the `TNS_ADMIN` environment variable if it is set. |
|
||||
| useOCI | bool | false | If true, uses the OCI-based driver (godror) which supports Oracle Wallet/Kerberos but requires the Oracle Instant Client libraries to be installed. Defaults to false (pure Go driver). |
|
||||
| walletLocation | string | false | Path to the directory containing the wallet files for the pure Go driver (`useOCI: false`). |
|
||||
|
||||
8
docs/en/resources/tools/cloudloggingadmin/_index.md
Normal file
8
docs/en/resources/tools/cloudloggingadmin/_index.md
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
title: "Cloud Logging Admin"
|
||||
linkTitle: "Cloud Logging Admin"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with Cloud Logging Admin Sources.
|
||||
---
|
||||
273
docs/en/resources/tools/cockroachdb/cockroachdb-execute-sql.md
Normal file
273
docs/en/resources/tools/cockroachdb/cockroachdb-execute-sql.md
Normal file
@@ -0,0 +1,273 @@
|
||||
---
|
||||
title: "cockroachdb-execute-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Execute ad-hoc SQL statements against a CockroachDB database.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `cockroachdb-execute-sql` tool executes ad-hoc SQL statements against a CockroachDB database. This tool is designed for interactive workflows where the SQL query is provided dynamically at runtime, making it ideal for developer assistance and exploratory data analysis.
|
||||
|
||||
The tool takes a single `sql` parameter containing the SQL statement to execute and returns the query results.
|
||||
|
||||
> **Note:** This tool is intended for developer assistant workflows with human-in-the-loop and shouldn't be used for production agents. For production use cases with predefined queries, use [cockroachdb-sql](./cockroachdb-sql.md) instead.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my_cockroachdb:
|
||||
type: cockroachdb
|
||||
host: your-cluster.cockroachlabs.cloud
|
||||
port: "26257"
|
||||
user: myuser
|
||||
password: mypassword
|
||||
database: defaultdb
|
||||
queryParams:
|
||||
sslmode: require
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
type: cockroachdb-execute-sql
|
||||
source: my_cockroachdb
|
||||
description: Execute any SQL statement against the CockroachDB database
|
||||
```
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### Simple SELECT Query
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SELECT * FROM users LIMIT 10"
|
||||
}
|
||||
```
|
||||
|
||||
### Query with Aggregations
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SELECT category, COUNT(*) as count, SUM(amount) as total FROM expenses GROUP BY category ORDER BY total DESC"
|
||||
}
|
||||
```
|
||||
|
||||
### Database Introspection
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SHOW TABLES"
|
||||
}
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SHOW COLUMNS FROM expenses"
|
||||
}
|
||||
```
|
||||
|
||||
### Multi-Region Information
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SHOW REGIONS FROM DATABASE defaultdb"
|
||||
}
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SHOW ZONE CONFIGURATIONS"
|
||||
}
|
||||
```
|
||||
|
||||
## CockroachDB-Specific Features
|
||||
|
||||
### Check Cluster Version
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SELECT version()"
|
||||
}
|
||||
```
|
||||
|
||||
### View Node Status
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SELECT node_id, address, locality, is_live FROM crdb_internal.gossip_nodes"
|
||||
}
|
||||
```
|
||||
|
||||
### Check Replication Status
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SELECT range_id, start_key, end_key, replicas, lease_holder FROM crdb_internal.ranges LIMIT 10"
|
||||
}
|
||||
```
|
||||
|
||||
### View Table Regions
|
||||
|
||||
```json
|
||||
{
|
||||
"sql": "SHOW REGIONS FROM TABLE expenses"
|
||||
}
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Required Fields
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `type` | string | Must be `cockroachdb-execute-sql` |
|
||||
| `source` | string | Name of the CockroachDB source to use |
|
||||
| `description` | string | Human-readable description for the LLM |
|
||||
|
||||
### Optional Fields
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `authRequired` | array | List of authentication services required |
|
||||
|
||||
## Parameters
|
||||
|
||||
The tool accepts a single runtime parameter:
|
||||
|
||||
| Parameter | Type | Description |
|
||||
|-----------|------|-------------|
|
||||
| `sql` | string | The SQL statement to execute |
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Use for Exploration, Not Production
|
||||
|
||||
This tool is ideal for:
|
||||
- Interactive database exploration
|
||||
- Ad-hoc analysis and reporting
|
||||
- Debugging and troubleshooting
|
||||
- Schema inspection
|
||||
|
||||
For production use cases, use [cockroachdb-sql](./cockroachdb-sql.md) with parameterized queries.
|
||||
|
||||
### Be Cautious with Data Modification
|
||||
|
||||
While this tool can execute any SQL statement, be careful with:
|
||||
- `INSERT`, `UPDATE`, `DELETE` statements
|
||||
- `DROP` or `ALTER` statements
|
||||
- Schema changes in production
|
||||
|
||||
### Use LIMIT for Large Results
|
||||
|
||||
Always use `LIMIT` clauses when exploring data:
|
||||
|
||||
```sql
|
||||
SELECT * FROM large_table LIMIT 100
|
||||
```
|
||||
|
||||
### Leverage CockroachDB's SQL Extensions
|
||||
|
||||
CockroachDB supports PostgreSQL syntax plus extensions:
|
||||
|
||||
```sql
|
||||
-- Show database survival goal
|
||||
SHOW SURVIVAL GOAL FROM DATABASE defaultdb;
|
||||
|
||||
-- View zone configurations
|
||||
SHOW ZONE CONFIGURATION FOR TABLE expenses;
|
||||
|
||||
-- Check table localities
|
||||
SHOW CREATE TABLE expenses;
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
The tool will return descriptive errors for:
|
||||
- **Syntax errors**: Invalid SQL syntax
|
||||
- **Permission errors**: Insufficient user privileges
|
||||
- **Connection errors**: Network or authentication issues
|
||||
- **Runtime errors**: Constraint violations, type mismatches, etc.
|
||||
|
||||
## Security Considerations
|
||||
|
||||
### SQL Injection Risk
|
||||
|
||||
Since this tool executes arbitrary SQL, it should only be used with:
|
||||
- Trusted users in interactive sessions
|
||||
- Human-in-the-loop workflows
|
||||
- Development and testing environments
|
||||
|
||||
Never expose this tool directly to end users without proper authorization controls.
|
||||
|
||||
### Use Authentication
|
||||
|
||||
Configure the `authRequired` field to restrict access:
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
execute_sql:
|
||||
type: cockroachdb-execute-sql
|
||||
source: my_cockroachdb
|
||||
description: Execute SQL statements
|
||||
authRequired:
|
||||
- my-auth-service
|
||||
```
|
||||
|
||||
### Read-Only Users
|
||||
|
||||
For safer exploration, create read-only database users:
|
||||
|
||||
```sql
|
||||
CREATE USER readonly_user;
|
||||
GRANT SELECT ON DATABASE defaultdb TO readonly_user;
|
||||
```
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
### Database Administration
|
||||
|
||||
```sql
|
||||
-- View database size
|
||||
SELECT
|
||||
table_name,
|
||||
pg_size_pretty(pg_total_relation_size(table_name::regclass)) AS size
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
ORDER BY pg_total_relation_size(table_name::regclass) DESC;
|
||||
```
|
||||
|
||||
### Performance Analysis
|
||||
|
||||
```sql
|
||||
-- Find slow queries
|
||||
SELECT query, count, mean_latency
|
||||
FROM crdb_internal.statement_statistics
|
||||
WHERE mean_latency > INTERVAL '1 second'
|
||||
ORDER BY mean_latency DESC
|
||||
LIMIT 10;
|
||||
```
|
||||
|
||||
### Data Quality Checks
|
||||
|
||||
```sql
|
||||
-- Find NULL values
|
||||
SELECT COUNT(*) as null_count
|
||||
FROM expenses
|
||||
WHERE description IS NULL OR amount IS NULL;
|
||||
|
||||
-- Find duplicates
|
||||
SELECT user_id, email, COUNT(*) as count
|
||||
FROM users
|
||||
GROUP BY user_id, email
|
||||
HAVING COUNT(*) > 1;
|
||||
```
|
||||
|
||||
## See Also
|
||||
|
||||
- [cockroachdb-sql](./cockroachdb-sql.md) - For parameterized, production-ready queries
|
||||
- [cockroachdb-list-tables](./cockroachdb-list-tables.md) - List tables in the database
|
||||
- [cockroachdb-list-schemas](./cockroachdb-list-schemas.md) - List database schemas
|
||||
- [CockroachDB Source](../../sources/cockroachdb.md) - Source configuration reference
|
||||
- [CockroachDB SQL Reference](https://www.cockroachlabs.com/docs/stable/sql-statements.html) - Official SQL documentation
|
||||
305
docs/en/resources/tools/cockroachdb/cockroachdb-list-schemas.md
Normal file
305
docs/en/resources/tools/cockroachdb/cockroachdb-list-schemas.md
Normal file
@@ -0,0 +1,305 @@
|
||||
---
|
||||
title: "cockroachdb-list-schemas"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
List schemas in a CockroachDB database.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `cockroachdb-list-schemas` tool retrieves a list of schemas (namespaces) in a CockroachDB database. Schemas are used to organize database objects such as tables, views, and functions into logical groups.
|
||||
|
||||
This tool is useful for:
|
||||
- Understanding database organization
|
||||
- Discovering available schemas
|
||||
- Multi-tenant application analysis
|
||||
- Schema-level access control planning
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my_cockroachdb:
|
||||
type: cockroachdb
|
||||
host: your-cluster.cockroachlabs.cloud
|
||||
port: "26257"
|
||||
user: myuser
|
||||
password: mypassword
|
||||
database: defaultdb
|
||||
queryParams:
|
||||
sslmode: require
|
||||
|
||||
tools:
|
||||
list_schemas:
|
||||
type: cockroachdb-list-schemas
|
||||
source: my_cockroachdb
|
||||
description: List all schemas in the database
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Required Fields
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `type` | string | Must be `cockroachdb-list-schemas` |
|
||||
| `source` | string | Name of the CockroachDB source to use |
|
||||
| `description` | string | Human-readable description for the LLM |
|
||||
|
||||
### Optional Fields
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `authRequired` | array | List of authentication services required |
|
||||
|
||||
## Output Structure
|
||||
|
||||
The tool returns a list of schemas with the following information:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"catalog_name": "defaultdb",
|
||||
"schema_name": "public",
|
||||
"is_user_defined": true
|
||||
},
|
||||
{
|
||||
"catalog_name": "defaultdb",
|
||||
"schema_name": "analytics",
|
||||
"is_user_defined": true
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
### Fields
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `catalog_name` | string | The database (catalog) name |
|
||||
| `schema_name` | string | The schema name |
|
||||
| `is_user_defined` | boolean | Whether this is a user-created schema (excludes system schemas) |
|
||||
|
||||
## Usage Example
|
||||
|
||||
```json
|
||||
{}
|
||||
```
|
||||
|
||||
No parameters are required. The tool automatically lists all user-defined schemas.
|
||||
|
||||
## Default Schemas
|
||||
|
||||
CockroachDB includes several standard schemas:
|
||||
|
||||
- **`public`**: The default schema for user objects
|
||||
- **`pg_catalog`**: PostgreSQL system catalog (excluded from results)
|
||||
- **`information_schema`**: SQL standard metadata views (excluded from results)
|
||||
- **`crdb_internal`**: CockroachDB internal metadata (excluded from results)
|
||||
- **`pg_extension`**: PostgreSQL extension objects (excluded from results)
|
||||
|
||||
The tool filters out system schemas and only returns user-defined schemas.
|
||||
|
||||
## Schema Management in CockroachDB
|
||||
|
||||
### Creating Schemas
|
||||
|
||||
```sql
|
||||
CREATE SCHEMA analytics;
|
||||
```
|
||||
|
||||
### Using Schemas
|
||||
|
||||
```sql
|
||||
-- Create table in specific schema
|
||||
CREATE TABLE analytics.revenue (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
amount DECIMAL(10,2),
|
||||
date DATE
|
||||
);
|
||||
|
||||
-- Query from specific schema
|
||||
SELECT * FROM analytics.revenue;
|
||||
```
|
||||
|
||||
### Schema Search Path
|
||||
|
||||
The search path determines which schemas are searched for unqualified object names:
|
||||
|
||||
```sql
|
||||
-- Show current search path
|
||||
SHOW search_path;
|
||||
|
||||
-- Set search path
|
||||
SET search_path = analytics, public;
|
||||
```
|
||||
|
||||
## Multi-Tenant Applications
|
||||
|
||||
Schemas are commonly used for multi-tenant applications:
|
||||
|
||||
```sql
|
||||
-- Create schema per tenant
|
||||
CREATE SCHEMA tenant_acme;
|
||||
CREATE SCHEMA tenant_globex;
|
||||
|
||||
-- Create same table structure in each schema
|
||||
CREATE TABLE tenant_acme.orders (...);
|
||||
CREATE TABLE tenant_globex.orders (...);
|
||||
```
|
||||
|
||||
The `cockroachdb-list-schemas` tool helps discover all tenant schemas:
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_tenants:
|
||||
type: cockroachdb-list-schemas
|
||||
source: my_cockroachdb
|
||||
description: |
|
||||
List all tenant schemas in the database.
|
||||
Each schema represents a separate tenant's data namespace.
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Use Schemas for Organization
|
||||
|
||||
Group related tables into schemas:
|
||||
|
||||
```sql
|
||||
CREATE SCHEMA sales;
|
||||
CREATE SCHEMA inventory;
|
||||
CREATE SCHEMA hr;
|
||||
|
||||
CREATE TABLE sales.orders (...);
|
||||
CREATE TABLE inventory.products (...);
|
||||
CREATE TABLE hr.employees (...);
|
||||
```
|
||||
|
||||
### Schema Naming Conventions
|
||||
|
||||
Use clear, descriptive schema names:
|
||||
- Lowercase names
|
||||
- Use underscores for multi-word names
|
||||
- Avoid reserved keywords
|
||||
- Use prefixes for grouped schemas (e.g., `tenant_`, `app_`)
|
||||
|
||||
### Schema-Level Permissions
|
||||
|
||||
Schemas enable fine-grained access control:
|
||||
|
||||
```sql
|
||||
-- Grant access to specific schema
|
||||
GRANT USAGE ON SCHEMA analytics TO analyst_role;
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA analytics TO analyst_role;
|
||||
|
||||
-- Revoke access
|
||||
REVOKE ALL ON SCHEMA hr FROM public;
|
||||
```
|
||||
|
||||
## Integration with Other Tools
|
||||
|
||||
### Combined with List Tables
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_schemas:
|
||||
type: cockroachdb-list-schemas
|
||||
source: my_cockroachdb
|
||||
description: List all schemas first
|
||||
|
||||
list_tables:
|
||||
type: cockroachdb-list-tables
|
||||
source: my_cockroachdb
|
||||
description: |
|
||||
List tables in the database.
|
||||
Use list_schemas first to understand schema organization.
|
||||
```
|
||||
|
||||
### Schema Discovery Workflow
|
||||
|
||||
1. Call `cockroachdb-list-schemas` to discover schemas
|
||||
2. Call `cockroachdb-list-tables` to see tables in each schema
|
||||
3. Generate queries using fully qualified names: `schema.table`
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
### Discover Database Structure
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
discover_schemas:
|
||||
type: cockroachdb-list-schemas
|
||||
source: my_cockroachdb
|
||||
description: |
|
||||
Discover how the database is organized into schemas.
|
||||
Use this to understand the logical grouping of tables.
|
||||
```
|
||||
|
||||
### Multi-Tenant Analysis
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_tenant_schemas:
|
||||
type: cockroachdb-list-schemas
|
||||
source: my_cockroachdb
|
||||
description: |
|
||||
List all tenant schemas (each tenant has their own schema).
|
||||
Schema names follow the pattern: tenant_<company_name>
|
||||
```
|
||||
|
||||
### Schema Migration Planning
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
audit_schemas:
|
||||
type: cockroachdb-list-schemas
|
||||
source: my_cockroachdb
|
||||
description: |
|
||||
Audit existing schemas before migration.
|
||||
Identifies all schemas that need to be migrated.
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
The tool handles common errors:
|
||||
- **Connection errors**: Returns connection failure details
|
||||
- **Permission errors**: Returns error if user lacks USAGE privilege
|
||||
- **Empty results**: Returns empty array if no user schemas exist
|
||||
|
||||
## Permissions Required
|
||||
|
||||
To list schemas, the user needs:
|
||||
- `CONNECT` privilege on the database
|
||||
- No specific schema privileges required for listing
|
||||
|
||||
To query objects within schemas, the user needs:
|
||||
- `USAGE` privilege on the schema
|
||||
- Appropriate object privileges (SELECT, INSERT, etc.)
|
||||
|
||||
## CockroachDB-Specific Features
|
||||
|
||||
### System Schemas
|
||||
|
||||
CockroachDB includes PostgreSQL-compatible system schemas plus CockroachDB-specific ones:
|
||||
|
||||
- `crdb_internal.*`: CockroachDB internal metadata and statistics
|
||||
- `pg_catalog.*`: PostgreSQL system catalog
|
||||
- `information_schema.*`: SQL standard information schema
|
||||
|
||||
These are automatically filtered from the results.
|
||||
|
||||
### User-Defined Flag
|
||||
|
||||
The `is_user_defined` field helps distinguish:
|
||||
- `true`: User-created schemas
|
||||
- `false`: System schemas (already filtered out)
|
||||
|
||||
## See Also
|
||||
|
||||
- [cockroachdb-sql](./cockroachdb-sql.md) - Execute parameterized queries
|
||||
- [cockroachdb-execute-sql](./cockroachdb-execute-sql.md) - Execute ad-hoc SQL
|
||||
- [cockroachdb-list-tables](./cockroachdb-list-tables.md) - List tables in the database
|
||||
- [CockroachDB Source](../../sources/cockroachdb.md) - Source configuration reference
|
||||
- [CockroachDB Schema Design](https://www.cockroachlabs.com/docs/stable/schema-design-overview.html) - Official documentation
|
||||
344
docs/en/resources/tools/cockroachdb/cockroachdb-list-tables.md
Normal file
344
docs/en/resources/tools/cockroachdb/cockroachdb-list-tables.md
Normal file
@@ -0,0 +1,344 @@
|
||||
---
|
||||
title: "cockroachdb-list-tables"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
List tables in a CockroachDB database with schema details.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `cockroachdb-list-tables` tool retrieves a list of tables from a CockroachDB database. It provides detailed information about table structure, including columns, constraints, indexes, and foreign key relationships.
|
||||
|
||||
This tool is useful for:
|
||||
- Database schema discovery
|
||||
- Understanding table relationships
|
||||
- Generating context for AI-powered database queries
|
||||
- Documentation and analysis
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my_cockroachdb:
|
||||
type: cockroachdb
|
||||
host: your-cluster.cockroachlabs.cloud
|
||||
port: "26257"
|
||||
user: myuser
|
||||
password: mypassword
|
||||
database: defaultdb
|
||||
queryParams:
|
||||
sslmode: require
|
||||
|
||||
tools:
|
||||
list_all_tables:
|
||||
type: cockroachdb-list-tables
|
||||
source: my_cockroachdb
|
||||
description: List all user tables in the database with their structure
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Required Fields
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `type` | string | Must be `cockroachdb-list-tables` |
|
||||
| `source` | string | Name of the CockroachDB source to use |
|
||||
| `description` | string | Human-readable description for the LLM |
|
||||
|
||||
### Optional Fields
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `authRequired` | array | List of authentication services required |
|
||||
|
||||
## Parameters
|
||||
|
||||
The tool accepts optional runtime parameters:
|
||||
|
||||
| Parameter | Type | Default | Description |
|
||||
|-----------|------|---------|-------------|
|
||||
| `table_names` | array | all tables | List of specific table names to retrieve |
|
||||
| `output_format` | string | "detailed" | Output format: "simple" or "detailed" |
|
||||
|
||||
## Output Formats
|
||||
|
||||
### Simple Format
|
||||
|
||||
Returns basic table information:
|
||||
- Table name
|
||||
- Row count estimate
|
||||
- Size information
|
||||
|
||||
```json
|
||||
{
|
||||
"table_names": ["users"],
|
||||
"output_format": "simple"
|
||||
}
|
||||
```
|
||||
|
||||
### Detailed Format (Default)
|
||||
|
||||
Returns comprehensive table information:
|
||||
- Table name and schema
|
||||
- All columns with types and constraints
|
||||
- Primary keys
|
||||
- Foreign keys and relationships
|
||||
- Indexes
|
||||
- Check constraints
|
||||
- Table size and row counts
|
||||
|
||||
```json
|
||||
{
|
||||
"table_names": ["users", "orders"],
|
||||
"output_format": "detailed"
|
||||
}
|
||||
```
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### List All Tables
|
||||
|
||||
```json
|
||||
{}
|
||||
```
|
||||
|
||||
### List Specific Tables
|
||||
|
||||
```json
|
||||
{
|
||||
"table_names": ["users", "orders", "expenses"]
|
||||
}
|
||||
```
|
||||
|
||||
### Simple Output
|
||||
|
||||
```json
|
||||
{
|
||||
"output_format": "simple"
|
||||
}
|
||||
```
|
||||
|
||||
## Output Structure
|
||||
|
||||
### Simple Format Output
|
||||
|
||||
```json
|
||||
{
|
||||
"table_name": "users",
|
||||
"estimated_rows": 1000,
|
||||
"size": "128 KB"
|
||||
}
|
||||
```
|
||||
|
||||
### Detailed Format Output
|
||||
|
||||
```json
|
||||
{
|
||||
"table_name": "users",
|
||||
"schema": "public",
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"type": "UUID",
|
||||
"nullable": false,
|
||||
"default": "gen_random_uuid()"
|
||||
},
|
||||
{
|
||||
"name": "email",
|
||||
"type": "STRING",
|
||||
"nullable": false,
|
||||
"default": null
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"type": "TIMESTAMP",
|
||||
"nullable": false,
|
||||
"default": "now()"
|
||||
}
|
||||
],
|
||||
"primary_key": ["id"],
|
||||
"indexes": [
|
||||
{
|
||||
"name": "users_pkey",
|
||||
"columns": ["id"],
|
||||
"unique": true,
|
||||
"primary": true
|
||||
},
|
||||
{
|
||||
"name": "users_email_idx",
|
||||
"columns": ["email"],
|
||||
"unique": true,
|
||||
"primary": false
|
||||
}
|
||||
],
|
||||
"foreign_keys": [],
|
||||
"constraints": [
|
||||
{
|
||||
"name": "users_email_check",
|
||||
"type": "CHECK",
|
||||
"definition": "email ~* '^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\\.[A-Z|a-z]{2,}$'"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## CockroachDB-Specific Information
|
||||
|
||||
### UUID Primary Keys
|
||||
|
||||
The tool recognizes CockroachDB's recommended UUID primary key pattern:
|
||||
|
||||
```sql
|
||||
CREATE TABLE users (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
...
|
||||
);
|
||||
```
|
||||
|
||||
### Multi-Region Tables
|
||||
|
||||
For multi-region tables, the output includes locality information:
|
||||
|
||||
```json
|
||||
{
|
||||
"table_name": "users",
|
||||
"locality": "REGIONAL BY ROW",
|
||||
"regions": ["us-east-1", "us-west-2", "eu-west-1"]
|
||||
}
|
||||
```
|
||||
|
||||
### Interleaved Tables
|
||||
|
||||
The tool shows parent-child relationships for interleaved tables (legacy feature):
|
||||
|
||||
```json
|
||||
{
|
||||
"table_name": "order_items",
|
||||
"interleaved_in": "orders"
|
||||
}
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Use for Schema Discovery
|
||||
|
||||
The tool is ideal for helping AI assistants understand your database structure:
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
discover_schema:
|
||||
type: cockroachdb-list-tables
|
||||
source: my_cockroachdb
|
||||
description: |
|
||||
Use this tool first to understand the database schema before generating queries.
|
||||
It shows all tables, their columns, data types, and relationships.
|
||||
```
|
||||
|
||||
### Filter Large Schemas
|
||||
|
||||
For databases with many tables, specify relevant tables:
|
||||
|
||||
```json
|
||||
{
|
||||
"table_names": ["users", "orders", "products"],
|
||||
"output_format": "detailed"
|
||||
}
|
||||
```
|
||||
|
||||
### Use Simple Format for Overviews
|
||||
|
||||
When you need just table names and sizes:
|
||||
|
||||
```json
|
||||
{
|
||||
"output_format": "simple"
|
||||
}
|
||||
```
|
||||
|
||||
## Excluded Tables
|
||||
|
||||
The tool automatically excludes system tables and schemas:
|
||||
- `pg_catalog.*` - PostgreSQL system catalog
|
||||
- `information_schema.*` - SQL standard information schema
|
||||
- `crdb_internal.*` - CockroachDB internal tables
|
||||
- `pg_extension.*` - PostgreSQL extension tables
|
||||
|
||||
Only user-created tables in the public schema (and other user schemas) are returned.
|
||||
|
||||
## Error Handling
|
||||
|
||||
The tool handles common errors:
|
||||
- **Table not found**: Returns empty result for non-existent tables
|
||||
- **Permission errors**: Returns error if user lacks SELECT privileges
|
||||
- **Connection errors**: Returns connection failure details
|
||||
|
||||
## Integration with AI Assistants
|
||||
|
||||
### Prompt Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_tables:
|
||||
type: cockroachdb-list-tables
|
||||
source: my_cockroachdb
|
||||
description: |
|
||||
Lists all tables in the database with detailed schema information.
|
||||
Use this tool to understand:
|
||||
- What tables exist
|
||||
- What columns each table has
|
||||
- Data types and constraints
|
||||
- Relationships between tables (foreign keys)
|
||||
- Available indexes
|
||||
|
||||
Always call this tool before generating SQL queries to ensure
|
||||
you use correct table and column names.
|
||||
```
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
### Generate Context for Queries
|
||||
|
||||
```json
|
||||
{}
|
||||
```
|
||||
|
||||
This provides comprehensive schema information that helps AI assistants generate accurate SQL queries.
|
||||
|
||||
### Analyze Table Structure
|
||||
|
||||
```json
|
||||
{
|
||||
"table_names": ["users"],
|
||||
"output_format": "detailed"
|
||||
}
|
||||
```
|
||||
|
||||
Perfect for understanding a specific table's structure, constraints, and relationships.
|
||||
|
||||
### Quick Schema Overview
|
||||
|
||||
```json
|
||||
{
|
||||
"output_format": "simple"
|
||||
}
|
||||
```
|
||||
|
||||
Gets a quick list of tables with basic statistics.
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
- **Simple format** is faster for large databases
|
||||
- **Detailed format** queries system tables extensively
|
||||
- Specifying `table_names` reduces query time
|
||||
- Results are fetched in a single query for efficiency
|
||||
|
||||
## See Also
|
||||
|
||||
- [cockroachdb-sql](./cockroachdb-sql.md) - Execute parameterized queries
|
||||
- [cockroachdb-execute-sql](./cockroachdb-execute-sql.md) - Execute ad-hoc SQL
|
||||
- [cockroachdb-list-schemas](./cockroachdb-list-schemas.md) - List database schemas
|
||||
- [CockroachDB Source](../../sources/cockroachdb.md) - Source configuration reference
|
||||
- [CockroachDB Schema Design](https://www.cockroachlabs.com/docs/stable/schema-design-overview.html) - Best practices
|
||||
291
docs/en/resources/tools/cockroachdb/cockroachdb-sql.md
Normal file
291
docs/en/resources/tools/cockroachdb/cockroachdb-sql.md
Normal file
@@ -0,0 +1,291 @@
|
||||
---
|
||||
title: "cockroachdb-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Execute parameterized SQL queries in CockroachDB.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `cockroachdb-sql` tool allows you to execute parameterized SQL queries against a CockroachDB database. This tool supports prepared statements with parameter binding, template parameters for dynamic query construction, and automatic transaction retry for resilience against serialization conflicts.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my_cockroachdb:
|
||||
type: cockroachdb
|
||||
host: your-cluster.cockroachlabs.cloud
|
||||
port: "26257"
|
||||
user: myuser
|
||||
password: mypassword
|
||||
database: defaultdb
|
||||
queryParams:
|
||||
sslmode: require
|
||||
|
||||
tools:
|
||||
get_user_orders:
|
||||
type: cockroachdb-sql
|
||||
source: my_cockroachdb
|
||||
description: Get all orders for a specific user
|
||||
statement: |
|
||||
SELECT o.id, o.order_date, o.total_amount, o.status
|
||||
FROM orders o
|
||||
WHERE o.user_id = $1
|
||||
ORDER BY o.order_date DESC
|
||||
parameters:
|
||||
- name: user_id
|
||||
type: string
|
||||
description: The UUID of the user
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Required Fields
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `type` | string | Must be `cockroachdb-sql` |
|
||||
| `source` | string | Name of the CockroachDB source to use |
|
||||
| `description` | string | Human-readable description of what the tool does |
|
||||
| `statement` | string | The SQL query to execute |
|
||||
|
||||
### Optional Fields
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `parameters` | array | List of parameter definitions for the query |
|
||||
| `templateParameters` | array | List of template parameters for dynamic query construction |
|
||||
| `authRequired` | array | List of authentication services required |
|
||||
|
||||
## Parameters
|
||||
|
||||
Parameters allow you to safely pass values into your SQL queries using prepared statements. CockroachDB uses PostgreSQL-style parameter placeholders: `$1`, `$2`, etc.
|
||||
|
||||
### Parameter Types
|
||||
|
||||
- `string`: Text values
|
||||
- `number`: Numeric values (integers or decimals)
|
||||
- `boolean`: True/false values
|
||||
- `array`: Array of values
|
||||
|
||||
### Example with Multiple Parameters
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
filter_expenses:
|
||||
type: cockroachdb-sql
|
||||
source: my_cockroachdb
|
||||
description: Filter expenses by category and date range
|
||||
statement: |
|
||||
SELECT id, description, amount, category, expense_date
|
||||
FROM expenses
|
||||
WHERE user_id = $1
|
||||
AND category = $2
|
||||
AND expense_date >= $3
|
||||
AND expense_date <= $4
|
||||
ORDER BY expense_date DESC
|
||||
parameters:
|
||||
- name: user_id
|
||||
type: string
|
||||
description: The user's UUID
|
||||
- name: category
|
||||
type: string
|
||||
description: Expense category (e.g., "Food", "Transport")
|
||||
- name: start_date
|
||||
type: string
|
||||
description: Start date in YYYY-MM-DD format
|
||||
- name: end_date
|
||||
type: string
|
||||
description: End date in YYYY-MM-DD format
|
||||
```
|
||||
|
||||
## Template Parameters
|
||||
|
||||
Template parameters enable dynamic query construction by replacing placeholders in the SQL statement before parameter binding. This is useful for dynamic table names, column names, or query structure.
|
||||
|
||||
### Example with Template Parameters
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
get_column_data:
|
||||
type: cockroachdb-sql
|
||||
source: my_cockroachdb
|
||||
description: Get data from a specific column
|
||||
statement: |
|
||||
SELECT {{column_name}}
|
||||
FROM {{table_name}}
|
||||
WHERE user_id = $1
|
||||
LIMIT 100
|
||||
templateParameters:
|
||||
- name: table_name
|
||||
type: string
|
||||
description: The table to query
|
||||
- name: column_name
|
||||
type: string
|
||||
description: The column to retrieve
|
||||
parameters:
|
||||
- name: user_id
|
||||
type: string
|
||||
description: The user's UUID
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Use UUID Primary Keys
|
||||
|
||||
CockroachDB performs best with UUID primary keys to avoid transaction hotspots:
|
||||
|
||||
```sql
|
||||
CREATE TABLE orders (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
user_id UUID NOT NULL,
|
||||
order_date TIMESTAMP DEFAULT now(),
|
||||
total_amount DECIMAL(10,2)
|
||||
);
|
||||
```
|
||||
|
||||
### Use Indexes for Performance
|
||||
|
||||
Create indexes on frequently queried columns:
|
||||
|
||||
```sql
|
||||
CREATE INDEX idx_orders_user_id ON orders(user_id);
|
||||
CREATE INDEX idx_orders_date ON orders(order_date DESC);
|
||||
```
|
||||
|
||||
### Use JOINs Efficiently
|
||||
|
||||
CockroachDB supports standard SQL JOINs. Keep joins efficient by:
|
||||
- Adding appropriate indexes
|
||||
- Using UUIDs for foreign keys
|
||||
- Limiting result sets with WHERE clauses
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
get_user_with_orders:
|
||||
type: cockroachdb-sql
|
||||
source: my_cockroachdb
|
||||
description: Get user details with their recent orders
|
||||
statement: |
|
||||
SELECT u.name, u.email, o.id as order_id, o.order_date, o.total_amount
|
||||
FROM users u
|
||||
LEFT JOIN orders o ON u.id = o.user_id
|
||||
WHERE u.id = $1
|
||||
ORDER BY o.order_date DESC
|
||||
LIMIT 10
|
||||
parameters:
|
||||
- name: user_id
|
||||
type: string
|
||||
description: The user's UUID
|
||||
```
|
||||
|
||||
### Handle NULL Values
|
||||
|
||||
Use COALESCE or NULL checks when dealing with nullable columns:
|
||||
|
||||
```sql
|
||||
SELECT id, description, COALESCE(notes, 'No notes') as notes
|
||||
FROM expenses
|
||||
WHERE user_id = $1
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
The tool automatically handles:
|
||||
- **Connection errors**: Retried with exponential backoff
|
||||
- **Serialization conflicts**: Automatically retried using cockroach-go library
|
||||
- **Invalid parameters**: Returns descriptive error messages
|
||||
- **SQL syntax errors**: Returns database error details
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Aggregations
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
expense_summary:
|
||||
type: cockroachdb-sql
|
||||
source: my_cockroachdb
|
||||
description: Get expense summary by category for a user
|
||||
statement: |
|
||||
SELECT
|
||||
category,
|
||||
COUNT(*) as count,
|
||||
SUM(amount) as total_amount,
|
||||
AVG(amount) as avg_amount
|
||||
FROM expenses
|
||||
WHERE user_id = $1
|
||||
AND expense_date >= $2
|
||||
GROUP BY category
|
||||
ORDER BY total_amount DESC
|
||||
parameters:
|
||||
- name: user_id
|
||||
type: string
|
||||
description: The user's UUID
|
||||
- name: start_date
|
||||
type: string
|
||||
description: Start date in YYYY-MM-DD format
|
||||
```
|
||||
|
||||
### Window Functions
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
running_total:
|
||||
type: cockroachdb-sql
|
||||
source: my_cockroachdb
|
||||
description: Get running total of expenses
|
||||
statement: |
|
||||
SELECT
|
||||
expense_date,
|
||||
amount,
|
||||
SUM(amount) OVER (ORDER BY expense_date) as running_total
|
||||
FROM expenses
|
||||
WHERE user_id = $1
|
||||
ORDER BY expense_date
|
||||
parameters:
|
||||
- name: user_id
|
||||
type: string
|
||||
description: The user's UUID
|
||||
```
|
||||
|
||||
### Common Table Expressions (CTEs)
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
top_spenders:
|
||||
type: cockroachdb-sql
|
||||
source: my_cockroachdb
|
||||
description: Find top spending users
|
||||
statement: |
|
||||
WITH user_totals AS (
|
||||
SELECT
|
||||
user_id,
|
||||
SUM(amount) as total_spent
|
||||
FROM expenses
|
||||
WHERE expense_date >= $1
|
||||
GROUP BY user_id
|
||||
)
|
||||
SELECT
|
||||
u.name,
|
||||
u.email,
|
||||
ut.total_spent
|
||||
FROM user_totals ut
|
||||
JOIN users u ON ut.user_id = u.id
|
||||
ORDER BY ut.total_spent DESC
|
||||
LIMIT 10
|
||||
parameters:
|
||||
- name: start_date
|
||||
type: string
|
||||
description: Start date in YYYY-MM-DD format
|
||||
```
|
||||
|
||||
## See Also
|
||||
|
||||
- [cockroachdb-execute-sql](./cockroachdb-execute-sql.md) - For ad-hoc SQL execution
|
||||
- [cockroachdb-list-tables](./cockroachdb-list-tables.md) - List tables in the database
|
||||
- [cockroachdb-list-schemas](./cockroachdb-list-schemas.md) - List database schemas
|
||||
- [CockroachDB Source](../../sources/cockroachdb.md) - Source configuration reference
|
||||
47
docs/en/resources/tools/looker/looker-validate-project.md
Normal file
47
docs/en/resources/tools/looker/looker-validate-project.md
Normal file
@@ -0,0 +1,47 @@
|
||||
---
|
||||
title: "looker-validate-project"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "looker-validate-project" tool checks the syntax of a LookML project and reports any errors
|
||||
aliases:
|
||||
- /resources/tools/looker-validate-project
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A "looker-validate-project" tool checks the syntax of a LookML project and reports any errors
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
`looker-validate-project` accepts a project_id parameter.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
validate_project:
|
||||
kind: looker-validate-project
|
||||
source: looker-source
|
||||
description: |
|
||||
This tool checks a LookML project for syntax errors.
|
||||
|
||||
Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first.
|
||||
|
||||
Parameters:
|
||||
- project_id (required): The unique ID of the LookML project.
|
||||
|
||||
Output:
|
||||
A list of error details including the file path and line number, and also a list of models
|
||||
that are not currently valid due to LookML errors.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|----------------------------------------------------|
|
||||
| kind | string | true | Must be "looker-validate-project". |
|
||||
| source | string | true | Name of the source Looker instance. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -771,7 +771,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.26.0\" # x-release-please-version\n",
|
||||
"version = \"0.27.0\" # x-release-please-version\n",
|
||||
"! curl -L -o /content/toolbox https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -123,7 +123,7 @@ In this section, we will download and install the Toolbox binary.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
export VERSION="0.26.0"
|
||||
export VERSION="0.27.0"
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -220,7 +220,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.26.0\" # x-release-please-version\n",
|
||||
"version = \"0.27.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -179,7 +179,7 @@ to use BigQuery, and then run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.27.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
7
docs/en/samples/oracle/_index.md
Normal file
7
docs/en/samples/oracle/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "OracleDB"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
How to get started with Toolbox using Oracle Database.
|
||||
---
|
||||
@@ -1,14 +1,18 @@
|
||||
---
|
||||
title: "Pre and Post processing"
|
||||
title: "Pre- and Post- Processing"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Pre and Post processing in GenAI applications.
|
||||
Intercept and modify interactions between the agent and its tools either before or after a tool is executed.
|
||||
---
|
||||
|
||||
Pre and post processing allow developers to intercept and modify interactions between the agent and its tools or the user.
|
||||
Pre- and post- processing allow developers to intercept and modify interactions between the agent and its tools or the user.
|
||||
|
||||
> **Note**: These capabilities are typically features of **orchestration frameworks** (like LangChain, LangGraph, or Agent Builder) rather than the Toolbox SDK itself. However, Toolbox tools are designed to fully leverage these framework capabilities to support robust, secure, and compliant agent architectures.
|
||||
{{< notice note >}}
|
||||
|
||||
These capabilities are typically features of **orchestration frameworks** (like LangChain, LangGraph, or Agent Builder) rather than the Toolbox SDK itself. However, Toolbox tools are designed to fully leverage these framework capabilities to support robust, secure, and compliant agent architectures.
|
||||
|
||||
{{< /notice >}}
|
||||
|
||||
## Types of Processing
|
||||
|
||||
@@ -39,9 +43,12 @@ Wraps individual tool executions. This is best for logic specific to a single to
|
||||
- **Scope**: Intercepts the raw inputs (arguments) to a tool and its outputs.
|
||||
- **Use Cases**: Argument validation, output formatting, specific privacy rules for sensitive tools.
|
||||
|
||||
### Comparison with Other Levels
|
||||
### Other Levels
|
||||
|
||||
It is helpful to understand how tool-level processing differs from other scopes:
|
||||
|
||||
- **Model Level**: Intercepts individual calls to the LLM (prompts and responses). Unlike tool-level, this applies globally to all text sent/received, making it better for global PII redaction or token tracking.
|
||||
- **Agent Level**: Wraps the high-level execution loop (e.g., a "turn" in the conversation). Unlike tool-level, this envelopes the entire turn (user input to final response), making it suitable for session management or end-to-end auditing.
|
||||
|
||||
|
||||
## Samples
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
Final Client Response:
|
||||
AI:
|
||||
Loyalty Points
|
||||
POLICY CHECK: Intercepting 'update-hotel'
|
||||
@@ -1,31 +1,40 @@
|
||||
---
|
||||
title: "(Python) Pre and post processing"
|
||||
title: "Python"
|
||||
type: docs
|
||||
weight: 4
|
||||
weight: 1
|
||||
description: >
|
||||
How to add pre and post processing to your Python toolbox applications.
|
||||
How to add pre- and post- processing to your Agents using Python.
|
||||
---
|
||||
|
||||
## Prerequisites
|
||||
|
||||
This tutorial assumes that you have set up a basic toolbox application as described in the [local quickstart](../../getting-started/local_quickstart).
|
||||
This tutorial assumes that you have set up Toolbox with a basic agent as described in the [local quickstart](../../getting-started/local_quickstart.md).
|
||||
|
||||
This guide demonstrates how to implement these patterns in your Toolbox applications.
|
||||
|
||||
## Python
|
||||
## Implementation
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{% tab header="ADK" text=true %}}
|
||||
Coming soon.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Langchain" text=true %}}
|
||||
The following example demonstrates how to use `ToolboxClient` with LangChain's middleware to implement pre and post processing for tool calls.
|
||||
The following example demonstrates how to use `ToolboxClient` with LangChain's middleware to implement pre- and post- processing for tool calls.
|
||||
|
||||
```py
|
||||
{{< include "python/langchain/agent.py" >}}
|
||||
```
|
||||
|
||||
For more information, see the [LangChain Middleware documentation](https://docs.langchain.com/oss/python/langchain/middleware/custom#wrap-style-hooks).
|
||||
You can also add model-level (`wrap_model`) and agent-level (`before_agent`, `after_agent`) hooks to intercept messages at different stages of the execution loop. See the [LangChain Middleware documentation](https://docs.langchain.com/oss/python/langchain/middleware/custom#wrap-style-hooks) for details on these additional hook types.
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Results
|
||||
|
||||
The output should look similar to the following. Note that exact responses may vary due to the non-deterministic nature of LLMs and differences between orchestration frameworks.
|
||||
|
||||
```
|
||||
AI: Booking Confirmed! You earned 500 Loyalty Points with this stay.
|
||||
|
||||
AI: Error: Maximum stay duration is 14 days.
|
||||
```
|
||||
|
||||
@@ -1,3 +1,18 @@
|
||||
# Copyright 2026 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
# This file makes the 'pre_post_processing/python' directory a Python package.
|
||||
|
||||
# You can include any package-level initialization logic here if needed.
|
||||
|
||||
@@ -1,110 +0,0 @@
|
||||
# Copyright 2026 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, Optional, Awaitable
|
||||
|
||||
from google.adk import Agent
|
||||
from google.adk.apps import App
|
||||
from google.adk.runners import Runner
|
||||
from google.adk.sessions.in_memory_session_service import InMemorySessionService
|
||||
from google.genai import types
|
||||
from toolbox_adk import ToolboxToolset, CredentialStrategy
|
||||
|
||||
system_prompt = """
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
"""
|
||||
|
||||
async def before_tool_callback(context: Any, args: Dict[str, Any]):
|
||||
"""Enforces business logic: Max stay duration is 14 days."""
|
||||
tool_name = getattr(context, "name", "unknown_tool")
|
||||
print(f"POLICY CHECK: Intercepting '{tool_name}'")
|
||||
|
||||
if tool_name == "update-hotel" or ("checkin_date" in args and "checkout_date" in args):
|
||||
try:
|
||||
start = datetime.fromisoformat(args["checkin_date"])
|
||||
end = datetime.fromisoformat(args["checkout_date"])
|
||||
if (end - start).days > 14:
|
||||
print("BLOCKED: Stay too long")
|
||||
raise ValueError("Error: Maximum stay duration is 14 days.")
|
||||
except ValueError as e:
|
||||
if "Maximum stay duration" in str(e): raise
|
||||
return args
|
||||
|
||||
async def after_tool_callback(context: Any, args: Dict[str, Any], result: Any, error: Optional[Exception]) -> Awaitable[Any]:
|
||||
"""Enriches response for successful bookings."""
|
||||
tool_name = getattr(context, "name", "unknown_tool")
|
||||
if error:
|
||||
print(f"[Tool-Level] Tool '{tool_name}' failed: {error}")
|
||||
return None
|
||||
|
||||
if isinstance(result, str) and "Error" not in result:
|
||||
is_booking = tool_name == "book-hotel" or "booking" in str(result).lower()
|
||||
if is_booking:
|
||||
return f"Booking Confirmed!\n You earned 500 Loyalty Points with this stay.\n\nSystem Details: {result}"
|
||||
return result
|
||||
|
||||
async def run_turn(runner: Runner, user_id: str, session_id: str, text: str):
|
||||
"""Helper to run a single turn."""
|
||||
print(f"\nUSER: '{text}'")
|
||||
response_text = ""
|
||||
async for event in runner.run_async(
|
||||
user_id=user_id, session_id=session_id,
|
||||
new_message=types.Content(role="user", parts=[types.Part(text=text)])
|
||||
):
|
||||
if event.content and event.content.parts:
|
||||
for part in event.content.parts:
|
||||
if part.text:
|
||||
response_text += part.text
|
||||
pass
|
||||
print(f"AI: {response_text}")
|
||||
|
||||
queries = [
|
||||
"Book hotel with id 3.",
|
||||
"Update my hotel with id 3 with checkin date 2025-01-18 and checkout date 2025-02-10",
|
||||
]
|
||||
|
||||
async def main():
|
||||
print("🚀 Initializing ADK Agent with Toolbox...")
|
||||
|
||||
toolset = ToolboxToolset(
|
||||
server_url="http://127.0.0.1:5000",
|
||||
toolset_name="my-toolset",
|
||||
credentials=CredentialStrategy.toolbox_identity(),
|
||||
pre_hook=before_tool_callback,
|
||||
post_hook=after_tool_callback
|
||||
)
|
||||
|
||||
app = App(
|
||||
root_agent=Agent(name='root_agent', model='gemini-2.5-flash', instruction=system_prompt, tools=[toolset]),
|
||||
name="my_agent"
|
||||
)
|
||||
runner = Runner(app=app, session_service=InMemorySessionService())
|
||||
|
||||
user_id, session_id = "test-user", "test-session"
|
||||
await runner.session_service.create_session(app_name=app.name, user_id=user_id, session_id=session_id)
|
||||
|
||||
for query in queries:
|
||||
await run_turn(runner, user_id, session_id, query)
|
||||
print("-" * 50)
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
@@ -1,2 +0,0 @@
|
||||
google-adk[toolbox]==1.23.0
|
||||
pytest==9.0.2
|
||||
@@ -23,19 +23,11 @@ ORCH_NAME = os.environ.get("ORCH_NAME")
|
||||
module_path = f"python.{ORCH_NAME}.agent"
|
||||
agent = importlib.import_module(module_path)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def golden_keywords():
|
||||
"""Loads expected keywords from the golden.txt file."""
|
||||
golden_file_path = Path(__file__).resolve().parent.parent / "golden.txt"
|
||||
if not golden_file_path.exists():
|
||||
pytest.fail(f"Golden file not found: {golden_file_path}")
|
||||
try:
|
||||
with open(golden_file_path, "r") as f:
|
||||
return [line.strip() for line in f.readlines() if line.strip()]
|
||||
except Exception as e:
|
||||
pytest.fail(f"Could not read golden.txt: {e}")
|
||||
|
||||
GOLDEN_KEYWORDS = [
|
||||
"AI:",
|
||||
"Loyalty Points",
|
||||
"POLICY CHECK: Intercepting 'update-hotel'",
|
||||
]
|
||||
|
||||
# --- Execution Tests ---
|
||||
class TestExecution:
|
||||
@@ -51,9 +43,9 @@ class TestExecution:
|
||||
"""Test that the script runs and produces no stderr."""
|
||||
assert script_output.err == "", f"Script produced stderr: {script_output.err}"
|
||||
|
||||
def test_keywords_in_output(self, script_output, golden_keywords):
|
||||
def test_keywords_in_output(self, script_output):
|
||||
"""Test that expected keywords are present in the script's output."""
|
||||
output = script_output.out
|
||||
print(f"\nAgent Output:\n{output}\n")
|
||||
missing_keywords = [kw for kw in golden_keywords if kw not in output]
|
||||
missing_keywords = [kw for kw in GOLDEN_KEYWORDS if kw not in output]
|
||||
assert not missing_keywords, f"Missing keywords in output: {missing_keywords}"
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
# Empty init for package resolution
|
||||
@@ -1,17 +1,3 @@
|
||||
# Copyright 2026 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
|
||||
@@ -61,7 +47,13 @@ async def enforce_business_rules(request, handler):
|
||||
except ValueError:
|
||||
pass # Ignore invalid date formats
|
||||
|
||||
return await handler(request)
|
||||
# PRE: Code here runs BEFORE the tool execution
|
||||
|
||||
# EXEC: Execute the tool (or next middleware)
|
||||
result = await handler(request)
|
||||
|
||||
# POST: Code here runs AFTER the tool execution
|
||||
return result
|
||||
|
||||
|
||||
# Post processing
|
||||
@@ -72,8 +64,12 @@ async def enrich_response(request, handler):
|
||||
Adds loyalty points information to successful bookings.
|
||||
Standardizes output format.
|
||||
"""
|
||||
# PRE: Code here runs BEFORE the tool execution
|
||||
|
||||
# EXEC: Execute the tool (or next middleware)
|
||||
result = await handler(request)
|
||||
|
||||
# POST: Code here runs AFTER the tool execution
|
||||
if isinstance(result, ToolMessage):
|
||||
content = str(result.content)
|
||||
tool_name = request.tool_call["name"]
|
||||
@@ -93,6 +89,7 @@ async def main():
|
||||
system_prompt=system_prompt,
|
||||
model=model,
|
||||
tools=tools,
|
||||
# add any pre and post processing methods
|
||||
middleware=[enforce_business_rules, enrich_response],
|
||||
)
|
||||
|
||||
@@ -102,7 +99,6 @@ async def main():
|
||||
)
|
||||
|
||||
print("-" * 50)
|
||||
print("Final Client Response:")
|
||||
last_ai_msg = response["messages"][-1].content
|
||||
print(f"AI: {last_ai_msg}")
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "mcp-toolbox-for-databases",
|
||||
"version": "0.26.0",
|
||||
"version": "0.27.0",
|
||||
"description": "MCP Toolbox for Databases is an open-source MCP server for more than 30 different datasources.",
|
||||
"contextFileName": "MCP-TOOLBOX-EXTENSION.md"
|
||||
}
|
||||
1
go.mod
1
go.mod
@@ -21,6 +21,7 @@ require (
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.30.0
|
||||
github.com/apache/cassandra-gocql-driver/v2 v2.0.0
|
||||
github.com/cenkalti/backoff/v5 v5.0.3
|
||||
github.com/cockroachdb/cockroach-go/v2 v2.4.2
|
||||
github.com/couchbase/gocb/v2 v2.11.1
|
||||
github.com/couchbase/tools-common/http v1.0.9
|
||||
github.com/elastic/elastic-transport-go/v8 v8.8.0
|
||||
|
||||
4
go.sum
4
go.sum
@@ -800,6 +800,8 @@ github.com/cncf/xds/go v0.0.0-20230105202645-06c439db220b/go.mod h1:eXthEFrGJvWH
|
||||
github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
||||
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv1aFbZMiM9vblcSArJRf2Irls=
|
||||
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8=
|
||||
github.com/cockroachdb/cockroach-go/v2 v2.4.2 h1:QB0ozDWQUUJ0GP8Zw63X/qHefPTCpLvtfCs6TLrPgyE=
|
||||
github.com/cockroachdb/cockroach-go/v2 v2.4.2/go.mod h1:9U179XbCx4qFWtNhc7BiWLPfuyMVQ7qdAhfrwLz1vH0=
|
||||
github.com/containerd/continuity v0.4.5 h1:ZRoN1sXq9u7V6QoHMcVWGhOwDFqZ4B9i5H6un1Wh0x4=
|
||||
github.com/containerd/continuity v0.4.5/go.mod h1:/lNJvtJKUQStBzpVQ1+rasXO1LAWtUQssk28EZvJ3nE=
|
||||
github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI=
|
||||
@@ -960,6 +962,8 @@ github.com/godror/godror v0.49.6 h1:ts4ZGw8uLJ42e1D7aXmVuSrld0/lzUzmIUjuUuQOgGM=
|
||||
github.com/godror/godror v0.49.6/go.mod h1:kTMcxZzRw73RT5kn9v3JkBK4kHI6dqowHotqV72ebU8=
|
||||
github.com/godror/knownpb v0.3.0 h1:+caUdy8hTtl7X05aPl3tdL540TvCcaQA6woZQroLZMw=
|
||||
github.com/godror/knownpb v0.3.0/go.mod h1:PpTyfJwiOEAzQl7NtVCM8kdPCnp3uhxsZYIzZ5PV4zU=
|
||||
github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E=
|
||||
github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0=
|
||||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||
github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo=
|
||||
github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
|
||||
|
||||
@@ -47,6 +47,7 @@ var expectedToolSources = []string{
|
||||
"mysql",
|
||||
"neo4j",
|
||||
"oceanbase",
|
||||
"oracledb",
|
||||
"postgres",
|
||||
"serverless-spark",
|
||||
"singlestore",
|
||||
@@ -131,6 +132,8 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
neo4jconfig := getOrFatal(t, "neo4j")
|
||||
healthcare_config := getOrFatal(t, "cloud-healthcare")
|
||||
snowflake_config := getOrFatal(t, "snowflake")
|
||||
oracle_config := getOrFatal(t,"oracledb")
|
||||
|
||||
if len(alloydb_omni_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch alloydb omni prebuilt tools yaml")
|
||||
}
|
||||
@@ -230,6 +233,10 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
if len(snowflake_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch snowflake prebuilt tools yaml")
|
||||
}
|
||||
|
||||
if len(oracle_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch oracle prebuilt tools yaml")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFailGetPrebuiltTool(t *testing.T) {
|
||||
|
||||
@@ -959,6 +959,21 @@ tools:
|
||||
Output:
|
||||
A confirmation message upon successful file deletion.
|
||||
|
||||
validate_project:
|
||||
kind: looker-validate-project
|
||||
source: looker-source
|
||||
description: |
|
||||
This tool checks a LookML project for syntax errors.
|
||||
|
||||
Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first.
|
||||
|
||||
Parameters:
|
||||
- project_id (required): The unique ID of the LookML project.
|
||||
|
||||
Output:
|
||||
A list of error details including the file path and line number, and also a list of models
|
||||
that are not currently valid due to LookML errors.
|
||||
|
||||
get_connections:
|
||||
kind: looker-get-connections
|
||||
source: looker-source
|
||||
@@ -1072,6 +1087,7 @@ toolsets:
|
||||
- create_project_file
|
||||
- update_project_file
|
||||
- delete_project_file
|
||||
- validate_project
|
||||
- get_connections
|
||||
- get_connection_schemas
|
||||
- get_connection_databases
|
||||
|
||||
121
internal/prebuiltconfigs/tools/oracledb.yaml
Normal file
121
internal/prebuiltconfigs/tools/oracledb.yaml
Normal file
@@ -0,0 +1,121 @@
|
||||
# Copyright 2026 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
sources:
|
||||
oracle-source:
|
||||
kind: "oracle"
|
||||
connectionString: ${ORACLE_CONNECTION_STRING}
|
||||
walletLocation: ${ORACLE_WALLET:}
|
||||
user: ${ORACLE_USER}
|
||||
password: ${ORACLE_PASSWORD}
|
||||
useOCI: ${ORACLE_USE_OCI:false}
|
||||
|
||||
tools:
|
||||
|
||||
list_tables:
|
||||
kind: oracle-sql
|
||||
source: oracle-source
|
||||
description: "Lists all user tables in the connected schema, including segment size, row count, and last analyzed date. Filters by a comma-separated list of names. If names are omitted, lists all tables in the current user's schema."
|
||||
statement: SELECT table_name from user_tables;
|
||||
|
||||
list_active_sessions:
|
||||
kind: oracle-sql
|
||||
source: oracle-source
|
||||
description: "List the top N (default 50) currently running database sessions (STATUS='ACTIVE'), showing SID, OS User, Program, and the current SQL statement text."
|
||||
statement: SELECT
|
||||
s.sid,
|
||||
s.serial#,
|
||||
s.username,
|
||||
s.osuser,
|
||||
s.program,
|
||||
s.status,
|
||||
s.wait_class,
|
||||
s.event,
|
||||
sql.sql_text
|
||||
FROM
|
||||
v$session s,
|
||||
v$sql sql
|
||||
WHERE
|
||||
s.status = 'ACTIVE'
|
||||
AND s.sql_id = sql.sql_id (+)
|
||||
AND s.audsid != userenv('sessionid') -- Exclude current session
|
||||
ORDER BY s.last_call_et DESC
|
||||
FETCH FIRST COALESCE(10) ROWS ONLY;
|
||||
|
||||
get_query_plan:
|
||||
kind: oracle-sql
|
||||
source: oracle-source
|
||||
description: "Generate a full execution plan for a single SQL statement. This can be used to analyze query performance without execution. Requires the SQL statement as input. following is an example EXPLAIN PLAN FOR {{&query}};"
|
||||
statement: SELECT PLAN_TABLE_OUTPUT FROM TABLE(DBMS_XPLAN.DISPLAY());
|
||||
|
||||
list_top_sql_by_resource:
|
||||
kind: oracle-sql
|
||||
source: oracle-source
|
||||
description: "List the top N SQL statements from the library cache based on a chosen resource metric (CPU, I/O, or Elapsed Time), following is an example of the sql"
|
||||
statement: SELECT
|
||||
sql_id,
|
||||
executions,
|
||||
buffer_gets,
|
||||
disk_reads,
|
||||
cpu_time / 1000000 AS cpu_seconds,
|
||||
elapsed_time / 1000000 AS elapsed_seconds
|
||||
FROM
|
||||
v$sql
|
||||
FETCH FIRST 5 ROWS ONLY;
|
||||
|
||||
list_tablespace_usage:
|
||||
kind: oracle-sql
|
||||
source: oracle-source
|
||||
description: "List tablespace names, total size, free space, and used percentage to monitor storage utilization."
|
||||
statement: SELECT
|
||||
t.tablespace_name,
|
||||
TO_CHAR(t.total_bytes / 1024 / 1024, '99,999.00') AS total_mb,
|
||||
TO_CHAR(SUM(d.bytes) / 1024 / 1024, '99,999.00') AS free_mb,
|
||||
TO_CHAR((t.total_bytes - SUM(d.bytes)) / t.total_bytes * 100, '99.00') AS used_pct
|
||||
FROM
|
||||
(SELECT tablespace_name, SUM(bytes) AS total_bytes FROM dba_data_files GROUP BY tablespace_name) t,
|
||||
dba_free_space d
|
||||
WHERE
|
||||
t.tablespace_name = d.tablespace_name (+)
|
||||
GROUP BY
|
||||
t.tablespace_name, t.total_bytes
|
||||
ORDER BY
|
||||
used_pct DESC;
|
||||
|
||||
list_invalid_objects:
|
||||
kind: oracle-sql
|
||||
source: oracle-source
|
||||
description: "Lists all database objects that are in an invalid state, requiring recompilation (e.g., procedures, functions, views)."
|
||||
statement: SELECT
|
||||
owner,
|
||||
object_type,
|
||||
object_name,
|
||||
status
|
||||
FROM
|
||||
dba_objects
|
||||
WHERE
|
||||
status = 'INVALID'
|
||||
AND owner NOT IN ('SYS', 'SYSTEM') -- Exclude system schemas for clarity
|
||||
ORDER BY
|
||||
owner, object_type, object_name;
|
||||
|
||||
toolsets:
|
||||
oracle_database_tools:
|
||||
- execute_sql
|
||||
- list_tables
|
||||
- list_active_sessions
|
||||
- get_query_plan
|
||||
- list_top_sql_by_resource
|
||||
- list_tablespace_usage
|
||||
- list_invalid_objects
|
||||
@@ -19,7 +19,6 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/go-chi/chi/v5/middleware"
|
||||
@@ -216,7 +215,7 @@ func toolInvokeHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
// Check if any of the specified auth services is verified
|
||||
isAuthorized := tool.Authorized(verifiedAuthServices)
|
||||
if !isAuthorized {
|
||||
err = fmt.Errorf("tool invocation not authorized. Please make sure your specify correct auth headers")
|
||||
err = fmt.Errorf("tool invocation not authorized. Please make sure you specify correct auth headers")
|
||||
s.logger.DebugContext(ctx, err.Error())
|
||||
_ = render.Render(w, r, newErrResponse(err, http.StatusUnauthorized))
|
||||
return
|
||||
@@ -234,15 +233,28 @@ func toolInvokeHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
params, err := parameters.ParseParams(tool.GetParameters(), data, claimsFromAuth)
|
||||
if err != nil {
|
||||
// If auth error, return 401
|
||||
if errors.Is(err, util.ErrUnauthorized) {
|
||||
s.logger.DebugContext(ctx, fmt.Sprintf("error parsing authenticated parameters from ID token: %s", err))
|
||||
var clientServerErr *util.ClientServerError
|
||||
|
||||
// Return 401 Authentication errors
|
||||
if errors.As(err, &clientServerErr) && clientServerErr.Code == http.StatusUnauthorized {
|
||||
s.logger.DebugContext(ctx, fmt.Sprintf("auth error: %v", err))
|
||||
_ = render.Render(w, r, newErrResponse(err, http.StatusUnauthorized))
|
||||
return
|
||||
}
|
||||
err = fmt.Errorf("provided parameters were invalid: %w", err)
|
||||
s.logger.DebugContext(ctx, err.Error())
|
||||
_ = render.Render(w, r, newErrResponse(err, http.StatusBadRequest))
|
||||
|
||||
var agentErr *util.AgentError
|
||||
if errors.As(err, &agentErr) {
|
||||
s.logger.DebugContext(ctx, fmt.Sprintf("agent validation error: %v", err))
|
||||
errMap := map[string]string{"error": err.Error()}
|
||||
errMarshal, _ := json.Marshal(errMap)
|
||||
|
||||
_ = render.Render(w, r, &resultResponse{Result: string(errMarshal)})
|
||||
return
|
||||
}
|
||||
|
||||
// Return 500 if it's a specific ClientServerError that isn't a 401, or any other unexpected error
|
||||
s.logger.ErrorContext(ctx, fmt.Sprintf("internal server error: %v", err))
|
||||
_ = render.Render(w, r, newErrResponse(err, http.StatusInternalServerError))
|
||||
return
|
||||
}
|
||||
s.logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
||||
@@ -259,34 +271,50 @@ func toolInvokeHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
// Determine what error to return to the users.
|
||||
if err != nil {
|
||||
errStr := err.Error()
|
||||
var statusCode int
|
||||
var tbErr util.ToolboxError
|
||||
|
||||
// Upstream API auth error propagation
|
||||
switch {
|
||||
case strings.Contains(errStr, "Error 401"):
|
||||
statusCode = http.StatusUnauthorized
|
||||
case strings.Contains(errStr, "Error 403"):
|
||||
statusCode = http.StatusForbidden
|
||||
}
|
||||
if errors.As(err, &tbErr) {
|
||||
switch tbErr.Category() {
|
||||
case util.CategoryAgent:
|
||||
// Agent Errors -> 200 OK
|
||||
s.logger.DebugContext(ctx, fmt.Sprintf("Tool invocation agent error: %v", err))
|
||||
res = map[string]string{
|
||||
"error": err.Error(),
|
||||
}
|
||||
|
||||
if statusCode == http.StatusUnauthorized || statusCode == http.StatusForbidden {
|
||||
if clientAuth {
|
||||
// Propagate the original 401/403 error.
|
||||
s.logger.DebugContext(ctx, fmt.Sprintf("error invoking tool. Client credentials lack authorization to the source: %v", err))
|
||||
case util.CategoryServer:
|
||||
// Server Errors -> Check the specific code inside
|
||||
var clientServerErr *util.ClientServerError
|
||||
statusCode := http.StatusInternalServerError // Default to 500
|
||||
|
||||
if errors.As(err, &clientServerErr) {
|
||||
if clientServerErr.Code != 0 {
|
||||
statusCode = clientServerErr.Code
|
||||
}
|
||||
}
|
||||
|
||||
// Process auth error
|
||||
if statusCode == http.StatusUnauthorized || statusCode == http.StatusForbidden {
|
||||
if clientAuth {
|
||||
// Token error, pass through 401/403
|
||||
s.logger.DebugContext(ctx, fmt.Sprintf("Client credentials lack authorization: %v", err))
|
||||
_ = render.Render(w, r, newErrResponse(err, statusCode))
|
||||
return
|
||||
}
|
||||
// ADC/Config error, return 500
|
||||
statusCode = http.StatusInternalServerError
|
||||
}
|
||||
|
||||
s.logger.ErrorContext(ctx, fmt.Sprintf("Tool invocation server error: %v", err))
|
||||
_ = render.Render(w, r, newErrResponse(err, statusCode))
|
||||
return
|
||||
}
|
||||
// ADC lacking permission or credentials configuration error.
|
||||
internalErr := fmt.Errorf("unexpected auth error occured during Tool invocation: %w", err)
|
||||
s.logger.ErrorContext(ctx, internalErr.Error())
|
||||
_ = render.Render(w, r, newErrResponse(internalErr, http.StatusInternalServerError))
|
||||
} else {
|
||||
// Unknown error -> 500
|
||||
s.logger.ErrorContext(ctx, fmt.Sprintf("Tool invocation unknown error: %v", err))
|
||||
_ = render.Render(w, r, newErrResponse(err, http.StatusInternalServerError))
|
||||
return
|
||||
}
|
||||
err = fmt.Errorf("error while invoking tool: %w", err)
|
||||
s.logger.DebugContext(ctx, err.Error())
|
||||
_ = render.Render(w, r, newErrResponse(err, http.StatusBadRequest))
|
||||
return
|
||||
}
|
||||
|
||||
resMarshal, err := json.Marshal(res)
|
||||
|
||||
@@ -24,7 +24,6 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/log"
|
||||
"github.com/googleapis/genai-toolbox/internal/prompts"
|
||||
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
||||
@@ -41,140 +40,6 @@ var (
|
||||
_ prompts.Prompt = MockPrompt{}
|
||||
)
|
||||
|
||||
// MockTool is used to mock tools in tests
|
||||
type MockTool struct {
|
||||
Name string
|
||||
Description string
|
||||
Params []parameters.Parameter
|
||||
manifest tools.Manifest
|
||||
unauthorized bool
|
||||
requiresClientAuthrorization bool
|
||||
}
|
||||
|
||||
func (t MockTool) Invoke(context.Context, tools.SourceProvider, parameters.ParamValues, tools.AccessToken) (any, error) {
|
||||
mock := []any{t.Name}
|
||||
return mock, nil
|
||||
}
|
||||
|
||||
func (t MockTool) ToConfig() tools.ToolConfig {
|
||||
return nil
|
||||
}
|
||||
|
||||
// claims is a map of user info decoded from an auth token
|
||||
func (t MockTool) ParseParams(data map[string]any, claimsMap map[string]map[string]any) (parameters.ParamValues, error) {
|
||||
return parameters.ParseParams(t.Params, data, claimsMap)
|
||||
}
|
||||
|
||||
func (t MockTool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.Params, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
func (t MockTool) Manifest() tools.Manifest {
|
||||
pMs := make([]parameters.ParameterManifest, 0, len(t.Params))
|
||||
for _, p := range t.Params {
|
||||
pMs = append(pMs, p.Manifest())
|
||||
}
|
||||
return tools.Manifest{Description: t.Description, Parameters: pMs}
|
||||
}
|
||||
|
||||
func (t MockTool) Authorized(verifiedAuthServices []string) bool {
|
||||
// defaulted to true
|
||||
return !t.unauthorized
|
||||
}
|
||||
|
||||
func (t MockTool) RequiresClientAuthorization(tools.SourceProvider) (bool, error) {
|
||||
// defaulted to false
|
||||
return t.requiresClientAuthrorization, nil
|
||||
}
|
||||
|
||||
func (t MockTool) GetParameters() parameters.Parameters {
|
||||
return t.Params
|
||||
}
|
||||
|
||||
func (t MockTool) McpManifest() tools.McpManifest {
|
||||
properties := make(map[string]parameters.ParameterMcpManifest)
|
||||
required := make([]string, 0)
|
||||
authParams := make(map[string][]string)
|
||||
|
||||
for _, p := range t.Params {
|
||||
name := p.GetName()
|
||||
paramManifest, authParamList := p.McpManifest()
|
||||
properties[name] = paramManifest
|
||||
required = append(required, name)
|
||||
|
||||
if len(authParamList) > 0 {
|
||||
authParams[name] = authParamList
|
||||
}
|
||||
}
|
||||
|
||||
toolsSchema := parameters.McpToolsSchema{
|
||||
Type: "object",
|
||||
Properties: properties,
|
||||
Required: required,
|
||||
}
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: t.Name,
|
||||
Description: t.Description,
|
||||
InputSchema: toolsSchema,
|
||||
}
|
||||
|
||||
if len(authParams) > 0 {
|
||||
mcpManifest.Metadata = map[string]any{
|
||||
"toolbox/authParams": authParams,
|
||||
}
|
||||
}
|
||||
|
||||
return mcpManifest
|
||||
}
|
||||
|
||||
func (t MockTool) GetAuthTokenHeaderName(tools.SourceProvider) (string, error) {
|
||||
return "Authorization", nil
|
||||
}
|
||||
|
||||
// MockPrompt is used to mock prompts in tests
|
||||
type MockPrompt struct {
|
||||
Name string
|
||||
Description string
|
||||
Args prompts.Arguments
|
||||
}
|
||||
|
||||
func (p MockPrompt) SubstituteParams(vals parameters.ParamValues) (any, error) {
|
||||
return []prompts.Message{
|
||||
{
|
||||
Role: "user",
|
||||
Content: fmt.Sprintf("substituted %s", p.Name),
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (p MockPrompt) ParseArgs(data map[string]any, claimsMap map[string]map[string]any) (parameters.ParamValues, error) {
|
||||
var params parameters.Parameters
|
||||
for _, arg := range p.Args {
|
||||
params = append(params, arg.Parameter)
|
||||
}
|
||||
return parameters.ParseParams(params, data, claimsMap)
|
||||
}
|
||||
|
||||
func (p MockPrompt) Manifest() prompts.Manifest {
|
||||
var argManifests []parameters.ParameterManifest
|
||||
for _, arg := range p.Args {
|
||||
argManifests = append(argManifests, arg.Manifest())
|
||||
}
|
||||
return prompts.Manifest{
|
||||
Description: p.Description,
|
||||
Arguments: argManifests,
|
||||
}
|
||||
}
|
||||
|
||||
func (p MockPrompt) McpManifest() prompts.McpManifest {
|
||||
return prompts.GetMcpManifest(p.Name, p.Description, p.Args)
|
||||
}
|
||||
|
||||
func (p MockPrompt) ToConfig() prompts.PromptConfig {
|
||||
return nil
|
||||
}
|
||||
|
||||
var tool1 = MockTool{
|
||||
Name: "no_params",
|
||||
Params: []parameters.Parameter{},
|
||||
|
||||
@@ -23,7 +23,6 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
@@ -37,9 +36,11 @@ import (
|
||||
v20241105 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20241105"
|
||||
v20250326 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20250326"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"go.opentelemetry.io/otel"
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
"go.opentelemetry.io/otel/codes"
|
||||
"go.opentelemetry.io/otel/metric"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
type sseSession struct {
|
||||
@@ -117,6 +118,55 @@ type stdioSession struct {
|
||||
writer io.Writer
|
||||
}
|
||||
|
||||
// traceContextCarrier implements propagation.TextMapCarrier for extracting trace context from _meta
|
||||
type traceContextCarrier map[string]string
|
||||
|
||||
func (c traceContextCarrier) Get(key string) string {
|
||||
return c[key]
|
||||
}
|
||||
|
||||
func (c traceContextCarrier) Set(key, value string) {
|
||||
c[key] = value
|
||||
}
|
||||
|
||||
func (c traceContextCarrier) Keys() []string {
|
||||
keys := make([]string, 0, len(c))
|
||||
for k := range c {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
return keys
|
||||
}
|
||||
|
||||
// extractTraceContext extracts W3C Trace Context from params._meta
|
||||
func extractTraceContext(ctx context.Context, body []byte) context.Context {
|
||||
// Try to parse the request to extract _meta
|
||||
var req struct {
|
||||
Params struct {
|
||||
Meta struct {
|
||||
Traceparent string `json:"traceparent,omitempty"`
|
||||
Tracestate string `json:"tracestate,omitempty"`
|
||||
} `json:"_meta,omitempty"`
|
||||
} `json:"params,omitempty"`
|
||||
}
|
||||
|
||||
if err := json.Unmarshal(body, &req); err != nil {
|
||||
return ctx
|
||||
}
|
||||
|
||||
// If traceparent is present, extract the context
|
||||
if req.Params.Meta.Traceparent != "" {
|
||||
carrier := traceContextCarrier{
|
||||
"traceparent": req.Params.Meta.Traceparent,
|
||||
}
|
||||
if req.Params.Meta.Tracestate != "" {
|
||||
carrier["tracestate"] = req.Params.Meta.Tracestate
|
||||
}
|
||||
return otel.GetTextMapPropagator().Extract(ctx, carrier)
|
||||
}
|
||||
|
||||
return ctx
|
||||
}
|
||||
|
||||
func NewStdioSession(s *Server, stdin io.Reader, stdout io.Writer) *stdioSession {
|
||||
stdioSession := &stdioSession{
|
||||
server: s,
|
||||
@@ -143,18 +193,29 @@ func (s *stdioSession) readInputStream(ctx context.Context) error {
|
||||
}
|
||||
return err
|
||||
}
|
||||
v, res, err := processMcpMessage(ctx, []byte(line), s.server, s.protocol, "", "", nil)
|
||||
// This ensures the transport span becomes a child of the client span
|
||||
msgCtx := extractTraceContext(ctx, []byte(line))
|
||||
|
||||
// Create span for STDIO transport
|
||||
msgCtx, span := s.server.instrumentation.Tracer.Start(msgCtx, "toolbox/server/mcp/stdio",
|
||||
trace.WithSpanKind(trace.SpanKindServer),
|
||||
)
|
||||
defer span.End()
|
||||
|
||||
v, res, err := processMcpMessage(msgCtx, []byte(line), s.server, s.protocol, "", "", nil, "")
|
||||
if err != nil {
|
||||
// errors during the processing of message will generate a valid MCP Error response.
|
||||
// server can continue to run.
|
||||
s.server.logger.ErrorContext(ctx, err.Error())
|
||||
s.server.logger.ErrorContext(msgCtx, err.Error())
|
||||
span.SetStatus(codes.Error, err.Error())
|
||||
}
|
||||
|
||||
if v != "" {
|
||||
s.protocol = v
|
||||
}
|
||||
// no responses for notifications
|
||||
if res != nil {
|
||||
if err = s.write(ctx, res); err != nil {
|
||||
if err = s.write(msgCtx, res); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
@@ -240,7 +301,9 @@ func mcpRouter(s *Server) (chi.Router, error) {
|
||||
|
||||
// sseHandler handles sse initialization and message.
|
||||
func sseHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
ctx, span := s.instrumentation.Tracer.Start(r.Context(), "toolbox/server/mcp/sse")
|
||||
ctx, span := s.instrumentation.Tracer.Start(r.Context(), "toolbox/server/mcp/sse",
|
||||
trace.WithSpanKind(trace.SpanKindServer),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
|
||||
sessionId := uuid.New().String()
|
||||
@@ -336,9 +399,27 @@ func methodNotAllowed(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
func httpHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
ctx, span := s.instrumentation.Tracer.Start(r.Context(), "toolbox/server/mcp")
|
||||
ctx := r.Context()
|
||||
ctx = util.WithLogger(ctx, s.logger)
|
||||
|
||||
// Read body first so we can extract trace context
|
||||
body, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
// Generate a new uuid if unable to decode
|
||||
id := uuid.New().String()
|
||||
s.logger.DebugContext(ctx, err.Error())
|
||||
render.JSON(w, r, jsonrpc.NewError(id, jsonrpc.PARSE_ERROR, err.Error(), nil))
|
||||
return
|
||||
}
|
||||
|
||||
// This ensures the transport span becomes a child of the client span
|
||||
ctx = extractTraceContext(ctx, body)
|
||||
|
||||
// Create span for HTTP transport
|
||||
ctx, span := s.instrumentation.Tracer.Start(ctx, "toolbox/server/mcp/http",
|
||||
trace.WithSpanKind(trace.SpanKindServer),
|
||||
)
|
||||
r = r.WithContext(ctx)
|
||||
ctx = util.WithLogger(r.Context(), s.logger)
|
||||
|
||||
var sessionId, protocolVersion string
|
||||
var session *sseSession
|
||||
@@ -380,7 +461,6 @@ func httpHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
s.logger.DebugContext(ctx, fmt.Sprintf("toolset name: %s", toolsetName))
|
||||
span.SetAttributes(attribute.String("toolset_name", toolsetName))
|
||||
|
||||
var err error
|
||||
defer func() {
|
||||
if err != nil {
|
||||
span.SetStatus(codes.Error, err.Error())
|
||||
@@ -399,17 +479,9 @@ func httpHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
)
|
||||
}()
|
||||
|
||||
// Read and returns a body from io.Reader
|
||||
body, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
// Generate a new uuid if unable to decode
|
||||
id := uuid.New().String()
|
||||
s.logger.DebugContext(ctx, err.Error())
|
||||
render.JSON(w, r, jsonrpc.NewError(id, jsonrpc.PARSE_ERROR, err.Error(), nil))
|
||||
return
|
||||
}
|
||||
networkProtocolVersion := fmt.Sprintf("%d.%d", r.ProtoMajor, r.ProtoMinor)
|
||||
|
||||
v, res, err := processMcpMessage(ctx, body, s, protocolVersion, toolsetName, promptsetName, r.Header)
|
||||
v, res, err := processMcpMessage(ctx, body, s, protocolVersion, toolsetName, promptsetName, r.Header, networkProtocolVersion)
|
||||
if err != nil {
|
||||
s.logger.DebugContext(ctx, fmt.Errorf("error processing message: %w", err).Error())
|
||||
}
|
||||
@@ -444,15 +516,12 @@ func httpHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
code := rpcResponse.Error.Code
|
||||
switch code {
|
||||
case jsonrpc.INTERNAL_ERROR:
|
||||
// Map Internal RPC Error (-32603) to HTTP 500
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
case jsonrpc.INVALID_REQUEST:
|
||||
errStr := err.Error()
|
||||
if errors.Is(err, util.ErrUnauthorized) {
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
} else if strings.Contains(errStr, "Error 401") {
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
} else if strings.Contains(errStr, "Error 403") {
|
||||
w.WriteHeader(http.StatusForbidden)
|
||||
var clientServerErr *util.ClientServerError
|
||||
if errors.As(err, &clientServerErr) {
|
||||
w.WriteHeader(clientServerErr.Code)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -462,7 +531,7 @@ func httpHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
// processMcpMessage process the messages received from clients
|
||||
func processMcpMessage(ctx context.Context, body []byte, s *Server, protocolVersion string, toolsetName string, promptsetName string, header http.Header) (string, any, error) {
|
||||
func processMcpMessage(ctx context.Context, body []byte, s *Server, protocolVersion string, toolsetName string, promptsetName string, header http.Header, networkProtocolVersion string) (string, any, error) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return "", jsonrpc.NewError("", jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
@@ -498,31 +567,95 @@ func processMcpMessage(ctx context.Context, body []byte, s *Server, protocolVers
|
||||
return "", jsonrpc.NewError(baseMessage.Id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
|
||||
// Create method-specific span with semantic conventions
|
||||
// Note: Trace context is already extracted and set in ctx by the caller
|
||||
ctx, span := s.instrumentation.Tracer.Start(ctx, baseMessage.Method,
|
||||
trace.WithSpanKind(trace.SpanKindServer),
|
||||
)
|
||||
defer span.End()
|
||||
|
||||
// Determine network transport and protocol based on header presence
|
||||
networkTransport := "pipe" // default for stdio
|
||||
networkProtocolName := "stdio"
|
||||
if header != nil {
|
||||
networkTransport = "tcp" // HTTP/SSE transport
|
||||
networkProtocolName = "http"
|
||||
}
|
||||
|
||||
// Set required semantic attributes for span according to OTEL MCP semcov
|
||||
// ref: https://opentelemetry.io/docs/specs/semconv/gen-ai/mcp/#server
|
||||
span.SetAttributes(
|
||||
attribute.String("mcp.method.name", baseMessage.Method),
|
||||
attribute.String("network.transport", networkTransport),
|
||||
attribute.String("network.protocol.name", networkProtocolName),
|
||||
)
|
||||
|
||||
// Set network protocol version if available
|
||||
if networkProtocolVersion != "" {
|
||||
span.SetAttributes(attribute.String("network.protocol.version", networkProtocolVersion))
|
||||
}
|
||||
|
||||
// Set MCP protocol version if available
|
||||
if protocolVersion != "" {
|
||||
span.SetAttributes(attribute.String("mcp.protocol.version", protocolVersion))
|
||||
}
|
||||
|
||||
// Set request ID
|
||||
if baseMessage.Id != nil {
|
||||
span.SetAttributes(attribute.String("jsonrpc.request.id", fmt.Sprintf("%v", baseMessage.Id)))
|
||||
}
|
||||
|
||||
// Set toolset name
|
||||
span.SetAttributes(attribute.String("toolset.name", toolsetName))
|
||||
|
||||
// Check if message is a notification
|
||||
if baseMessage.Id == nil {
|
||||
err := mcp.NotificationHandler(ctx, body)
|
||||
if err != nil {
|
||||
span.SetStatus(codes.Error, err.Error())
|
||||
}
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
// Process the method
|
||||
switch baseMessage.Method {
|
||||
case mcputil.INITIALIZE:
|
||||
res, v, err := mcp.InitializeResponse(ctx, baseMessage.Id, body, s.version)
|
||||
result, version, err := mcp.InitializeResponse(ctx, baseMessage.Id, body, s.version)
|
||||
if err != nil {
|
||||
return "", res, err
|
||||
span.SetStatus(codes.Error, err.Error())
|
||||
if rpcErr, ok := result.(jsonrpc.JSONRPCError); ok {
|
||||
span.SetAttributes(attribute.String("error.type", rpcErr.Error.String()))
|
||||
}
|
||||
return "", result, err
|
||||
}
|
||||
return v, res, err
|
||||
span.SetAttributes(attribute.String("mcp.protocol.version", version))
|
||||
return version, result, err
|
||||
default:
|
||||
toolset, ok := s.ResourceMgr.GetToolset(toolsetName)
|
||||
if !ok {
|
||||
err = fmt.Errorf("toolset does not exist")
|
||||
return "", jsonrpc.NewError(baseMessage.Id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
err := fmt.Errorf("toolset does not exist")
|
||||
rpcErr := jsonrpc.NewError(baseMessage.Id, jsonrpc.INVALID_REQUEST, err.Error(), nil)
|
||||
span.SetStatus(codes.Error, err.Error())
|
||||
span.SetAttributes(attribute.String("error.type", rpcErr.Error.String()))
|
||||
return "", rpcErr, err
|
||||
}
|
||||
promptset, ok := s.ResourceMgr.GetPromptset(promptsetName)
|
||||
if !ok {
|
||||
err = fmt.Errorf("promptset does not exist")
|
||||
return "", jsonrpc.NewError(baseMessage.Id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
err := fmt.Errorf("promptset does not exist")
|
||||
rpcErr := jsonrpc.NewError(baseMessage.Id, jsonrpc.INVALID_REQUEST, err.Error(), nil)
|
||||
span.SetStatus(codes.Error, err.Error())
|
||||
span.SetAttributes(attribute.String("error.type", rpcErr.Error.String()))
|
||||
return "", rpcErr, err
|
||||
}
|
||||
res, err := mcp.ProcessMethod(ctx, protocolVersion, baseMessage.Id, baseMessage.Method, toolset, promptset, s.ResourceMgr, body, header)
|
||||
return "", res, err
|
||||
result, err := mcp.ProcessMethod(ctx, protocolVersion, baseMessage.Id, baseMessage.Method, toolset, promptset, s.ResourceMgr, body, header)
|
||||
if err != nil {
|
||||
span.SetStatus(codes.Error, err.Error())
|
||||
// Set error.type based on JSON-RPC error code
|
||||
if rpcErr, ok := result.(jsonrpc.JSONRPCError); ok {
|
||||
span.SetAttributes(attribute.Int("jsonrpc.error.code", rpcErr.Error.Code))
|
||||
span.SetAttributes(attribute.String("error.type", rpcErr.Error.String()))
|
||||
}
|
||||
}
|
||||
return "", result, err
|
||||
}
|
||||
}
|
||||
|
||||
@@ -45,6 +45,9 @@ type Request struct {
|
||||
// notifications. The receiver is not obligated to provide these
|
||||
// notifications.
|
||||
ProgressToken ProgressToken `json:"progressToken,omitempty"`
|
||||
// W3C Trace Context fields for distributed tracing
|
||||
Traceparent string `json:"traceparent,omitempty"`
|
||||
Tracestate string `json:"tracestate,omitempty"`
|
||||
} `json:"_meta,omitempty"`
|
||||
} `json:"params,omitempty"`
|
||||
}
|
||||
@@ -97,6 +100,24 @@ type Error struct {
|
||||
Data interface{} `json:"data,omitempty"`
|
||||
}
|
||||
|
||||
// String returns the error type as a string based on the error code.
|
||||
func (e Error) String() string {
|
||||
switch e.Code {
|
||||
case METHOD_NOT_FOUND:
|
||||
return "method_not_found"
|
||||
case INVALID_PARAMS:
|
||||
return "invalid_params"
|
||||
case INTERNAL_ERROR:
|
||||
return "internal_error"
|
||||
case PARSE_ERROR:
|
||||
return "parse_error"
|
||||
case INVALID_REQUEST:
|
||||
return "invalid_request"
|
||||
default:
|
||||
return "jsonrpc_error"
|
||||
}
|
||||
}
|
||||
|
||||
// JSONRPCError represents a non-successful (error) response to a request.
|
||||
type JSONRPCError struct {
|
||||
Jsonrpc string `json:"jsonrpc"`
|
||||
|
||||
@@ -21,7 +21,6 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/prompts"
|
||||
"github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc"
|
||||
@@ -29,6 +28,8 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
// ProcessMethod returns a response for the request.
|
||||
@@ -102,6 +103,14 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
||||
toolName := req.Params.Name
|
||||
toolArgument := req.Params.Arguments
|
||||
logger.DebugContext(ctx, fmt.Sprintf("tool name: %s", toolName))
|
||||
|
||||
// Update span name and set gen_ai attributes
|
||||
span := trace.SpanFromContext(ctx)
|
||||
span.SetName(fmt.Sprintf("%s %s", TOOLS_CALL, toolName))
|
||||
span.SetAttributes(
|
||||
attribute.String("gen_ai.tool.name", toolName),
|
||||
attribute.String("gen_ai.operation.name", "execute_tool"),
|
||||
)
|
||||
tool, ok := resourceMgr.GetTool(toolName)
|
||||
if !ok {
|
||||
err = fmt.Errorf("invalid tool name: tool with name %q does not exist", toolName)
|
||||
@@ -124,7 +133,12 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
||||
}
|
||||
if clientAuth {
|
||||
if accessToken == "" {
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, "missing access token in the 'Authorization' header", nil), util.ErrUnauthorized
|
||||
err := util.NewClientServerError(
|
||||
"missing access token in the 'Authorization' header",
|
||||
http.StatusUnauthorized,
|
||||
nil,
|
||||
)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
}
|
||||
|
||||
@@ -172,7 +186,11 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
||||
// Check if any of the specified auth services is verified
|
||||
isAuthorized := tool.Authorized(verifiedAuthServices)
|
||||
if !isAuthorized {
|
||||
err = fmt.Errorf("unauthorized Tool call: Please make sure your specify correct auth headers: %w", util.ErrUnauthorized)
|
||||
err = util.NewClientServerError(
|
||||
"unauthorized Tool call: Please make sure you specify correct auth headers",
|
||||
http.StatusUnauthorized,
|
||||
nil,
|
||||
)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
logger.DebugContext(ctx, "tool invocation authorized")
|
||||
@@ -194,30 +212,44 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
||||
// run tool invocation and generate response.
|
||||
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
||||
if err != nil {
|
||||
errStr := err.Error()
|
||||
// Missing authService tokens.
|
||||
if errors.Is(err, util.ErrUnauthorized) {
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
// Upstream auth error
|
||||
if strings.Contains(errStr, "Error 401") || strings.Contains(errStr, "Error 403") {
|
||||
if clientAuth {
|
||||
// Error with client credentials should pass down to the client
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
var tbErr util.ToolboxError
|
||||
|
||||
if errors.As(err, &tbErr) {
|
||||
switch tbErr.Category() {
|
||||
case util.CategoryAgent:
|
||||
// MCP - Tool execution error
|
||||
// Return SUCCESS but with IsError: true
|
||||
text := TextContent{
|
||||
Type: "text",
|
||||
Text: err.Error(),
|
||||
}
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: CallToolResult{Content: []TextContent{text}, IsError: true},
|
||||
}, nil
|
||||
|
||||
case util.CategoryServer:
|
||||
// MCP Spec - Protocol error
|
||||
// Return JSON-RPC ERROR
|
||||
var clientServerErr *util.ClientServerError
|
||||
rpcCode := jsonrpc.INTERNAL_ERROR // Default to Internal Error (-32603)
|
||||
|
||||
if errors.As(err, &clientServerErr) {
|
||||
if clientServerErr.Code == http.StatusUnauthorized || clientServerErr.Code == http.StatusForbidden {
|
||||
if clientAuth {
|
||||
rpcCode = jsonrpc.INVALID_REQUEST
|
||||
} else {
|
||||
rpcCode = jsonrpc.INTERNAL_ERROR
|
||||
}
|
||||
}
|
||||
}
|
||||
return jsonrpc.NewError(id, rpcCode, err.Error(), nil), err
|
||||
}
|
||||
// Auth error with ADC should raise internal 500 error
|
||||
} else {
|
||||
// Unknown error -> 500
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
|
||||
text := TextContent{
|
||||
Type: "text",
|
||||
Text: err.Error(),
|
||||
}
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: CallToolResult{Content: []TextContent{text}, IsError: true},
|
||||
}, nil
|
||||
}
|
||||
|
||||
content := make([]TextContent, 0)
|
||||
@@ -288,6 +320,11 @@ func promptsGetHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *r
|
||||
|
||||
promptName := req.Params.Name
|
||||
logger.DebugContext(ctx, fmt.Sprintf("prompt name: %s", promptName))
|
||||
|
||||
// Update span name and set gen_ai attributes
|
||||
span := trace.SpanFromContext(ctx)
|
||||
span.SetName(fmt.Sprintf("%s %s", PROMPTS_GET, promptName))
|
||||
span.SetAttributes(attribute.String("gen_ai.prompt.name", promptName))
|
||||
prompt, ok := resourceMgr.GetPrompt(promptName)
|
||||
if !ok {
|
||||
err := fmt.Errorf("prompt with name %q does not exist", promptName)
|
||||
|
||||
@@ -21,7 +21,6 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/prompts"
|
||||
"github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc"
|
||||
@@ -29,6 +28,8 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
// ProcessMethod returns a response for the request.
|
||||
@@ -102,6 +103,15 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
||||
toolName := req.Params.Name
|
||||
toolArgument := req.Params.Arguments
|
||||
logger.DebugContext(ctx, fmt.Sprintf("tool name: %s", toolName))
|
||||
|
||||
// Update span name and set gen_ai attributes
|
||||
span := trace.SpanFromContext(ctx)
|
||||
span.SetName(fmt.Sprintf("%s %s", TOOLS_CALL, toolName))
|
||||
span.SetAttributes(
|
||||
attribute.String("gen_ai.tool.name", toolName),
|
||||
attribute.String("gen_ai.operation.name", "execute_tool"),
|
||||
)
|
||||
|
||||
tool, ok := resourceMgr.GetTool(toolName)
|
||||
if !ok {
|
||||
err = fmt.Errorf("invalid tool name: tool with name %q does not exist", toolName)
|
||||
@@ -124,7 +134,12 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
||||
}
|
||||
if clientAuth {
|
||||
if accessToken == "" {
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, "missing access token in the 'Authorization' header", nil), util.ErrUnauthorized
|
||||
err := util.NewClientServerError(
|
||||
"missing access token in the 'Authorization' header",
|
||||
http.StatusUnauthorized,
|
||||
nil,
|
||||
)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
}
|
||||
|
||||
@@ -172,7 +187,11 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
||||
// Check if any of the specified auth services is verified
|
||||
isAuthorized := tool.Authorized(verifiedAuthServices)
|
||||
if !isAuthorized {
|
||||
err = fmt.Errorf("unauthorized Tool call: Please make sure your specify correct auth headers: %w", util.ErrUnauthorized)
|
||||
err = util.NewClientServerError(
|
||||
"unauthorized Tool call: Please make sure you specify correct auth headers",
|
||||
http.StatusUnauthorized,
|
||||
nil,
|
||||
)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
logger.DebugContext(ctx, "tool invocation authorized")
|
||||
@@ -194,31 +213,45 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
||||
// run tool invocation and generate response.
|
||||
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
||||
if err != nil {
|
||||
errStr := err.Error()
|
||||
// Missing authService tokens.
|
||||
if errors.Is(err, util.ErrUnauthorized) {
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
// Upstream auth error
|
||||
if strings.Contains(errStr, "Error 401") || strings.Contains(errStr, "Error 403") {
|
||||
if clientAuth {
|
||||
// Error with client credentials should pass down to the client
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
var tbErr util.ToolboxError
|
||||
|
||||
if errors.As(err, &tbErr) {
|
||||
switch tbErr.Category() {
|
||||
case util.CategoryAgent:
|
||||
// MCP - Tool execution error
|
||||
// Return SUCCESS but with IsError: true
|
||||
text := TextContent{
|
||||
Type: "text",
|
||||
Text: err.Error(),
|
||||
}
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: CallToolResult{Content: []TextContent{text}, IsError: true},
|
||||
}, nil
|
||||
|
||||
case util.CategoryServer:
|
||||
// MCP Spec - Protocol error
|
||||
// Return JSON-RPC ERROR
|
||||
var clientServerErr *util.ClientServerError
|
||||
rpcCode := jsonrpc.INTERNAL_ERROR // Default to Internal Error (-32603)
|
||||
|
||||
if errors.As(err, &clientServerErr) {
|
||||
if clientServerErr.Code == http.StatusUnauthorized || clientServerErr.Code == http.StatusForbidden {
|
||||
if clientAuth {
|
||||
rpcCode = jsonrpc.INVALID_REQUEST
|
||||
} else {
|
||||
rpcCode = jsonrpc.INTERNAL_ERROR
|
||||
}
|
||||
}
|
||||
}
|
||||
return jsonrpc.NewError(id, rpcCode, err.Error(), nil), err
|
||||
}
|
||||
// Auth error with ADC should raise internal 500 error
|
||||
} else {
|
||||
// Unknown error -> 500
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
text := TextContent{
|
||||
Type: "text",
|
||||
Text: err.Error(),
|
||||
}
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: CallToolResult{Content: []TextContent{text}, IsError: true},
|
||||
}, nil
|
||||
}
|
||||
|
||||
content := make([]TextContent, 0)
|
||||
|
||||
sliceRes, ok := results.([]any)
|
||||
@@ -287,6 +320,12 @@ func promptsGetHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *r
|
||||
|
||||
promptName := req.Params.Name
|
||||
logger.DebugContext(ctx, fmt.Sprintf("prompt name: %s", promptName))
|
||||
|
||||
// Update span name and set gen_ai attributes
|
||||
span := trace.SpanFromContext(ctx)
|
||||
span.SetName(fmt.Sprintf("%s %s", PROMPTS_GET, promptName))
|
||||
span.SetAttributes(attribute.String("gen_ai.prompt.name", promptName))
|
||||
|
||||
prompt, ok := resourceMgr.GetPrompt(promptName)
|
||||
if !ok {
|
||||
err := fmt.Errorf("prompt with name %q does not exist", promptName)
|
||||
|
||||
@@ -21,7 +21,6 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/prompts"
|
||||
"github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc"
|
||||
@@ -29,6 +28,8 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
// ProcessMethod returns a response for the request.
|
||||
@@ -95,6 +96,15 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
||||
toolName := req.Params.Name
|
||||
toolArgument := req.Params.Arguments
|
||||
logger.DebugContext(ctx, fmt.Sprintf("tool name: %s", toolName))
|
||||
|
||||
// Update span name and set gen_ai attributes
|
||||
span := trace.SpanFromContext(ctx)
|
||||
span.SetName(fmt.Sprintf("%s %s", TOOLS_CALL, toolName))
|
||||
span.SetAttributes(
|
||||
attribute.String("gen_ai.tool.name", toolName),
|
||||
attribute.String("gen_ai.operation.name", "execute_tool"),
|
||||
)
|
||||
|
||||
tool, ok := resourceMgr.GetTool(toolName)
|
||||
if !ok {
|
||||
err = fmt.Errorf("invalid tool name: tool with name %q does not exist", toolName)
|
||||
@@ -117,7 +127,12 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
||||
}
|
||||
if clientAuth {
|
||||
if accessToken == "" {
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, "missing access token in the 'Authorization' header", nil), util.ErrUnauthorized
|
||||
err := util.NewClientServerError(
|
||||
"missing access token in the 'Authorization' header",
|
||||
http.StatusUnauthorized,
|
||||
nil,
|
||||
)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
}
|
||||
|
||||
@@ -165,7 +180,11 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
||||
// Check if any of the specified auth services is verified
|
||||
isAuthorized := tool.Authorized(verifiedAuthServices)
|
||||
if !isAuthorized {
|
||||
err = fmt.Errorf("unauthorized Tool call: Please make sure your specify correct auth headers: %w", util.ErrUnauthorized)
|
||||
err = util.NewClientServerError(
|
||||
"unauthorized Tool call: Please make sure you specify correct auth headers",
|
||||
http.StatusUnauthorized,
|
||||
nil,
|
||||
)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
logger.DebugContext(ctx, "tool invocation authorized")
|
||||
@@ -187,29 +206,44 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
||||
// run tool invocation and generate response.
|
||||
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
||||
if err != nil {
|
||||
errStr := err.Error()
|
||||
// Missing authService tokens.
|
||||
if errors.Is(err, util.ErrUnauthorized) {
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
// Upstream auth error
|
||||
if strings.Contains(errStr, "Error 401") || strings.Contains(errStr, "Error 403") {
|
||||
if clientAuth {
|
||||
// Error with client credentials should pass down to the client
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
var tbErr util.ToolboxError
|
||||
|
||||
if errors.As(err, &tbErr) {
|
||||
switch tbErr.Category() {
|
||||
case util.CategoryAgent:
|
||||
// MCP - Tool execution error
|
||||
// Return SUCCESS but with IsError: true
|
||||
text := TextContent{
|
||||
Type: "text",
|
||||
Text: err.Error(),
|
||||
}
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: CallToolResult{Content: []TextContent{text}, IsError: true},
|
||||
}, nil
|
||||
|
||||
case util.CategoryServer:
|
||||
// MCP Spec - Protocol error
|
||||
// Return JSON-RPC ERROR
|
||||
var clientServerErr *util.ClientServerError
|
||||
rpcCode := jsonrpc.INTERNAL_ERROR // Default to Internal Error (-32603)
|
||||
|
||||
if errors.As(err, &clientServerErr) {
|
||||
if clientServerErr.Code == http.StatusUnauthorized || clientServerErr.Code == http.StatusForbidden {
|
||||
if clientAuth {
|
||||
rpcCode = jsonrpc.INVALID_REQUEST
|
||||
} else {
|
||||
rpcCode = jsonrpc.INTERNAL_ERROR
|
||||
}
|
||||
}
|
||||
}
|
||||
return jsonrpc.NewError(id, rpcCode, err.Error(), nil), err
|
||||
}
|
||||
// Auth error with ADC should raise internal 500 error
|
||||
} else {
|
||||
// Unknown error -> 500
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
text := TextContent{
|
||||
Type: "text",
|
||||
Text: err.Error(),
|
||||
}
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: CallToolResult{Content: []TextContent{text}, IsError: true},
|
||||
}, nil
|
||||
}
|
||||
|
||||
content := make([]TextContent, 0)
|
||||
@@ -280,6 +314,12 @@ func promptsGetHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *r
|
||||
|
||||
promptName := req.Params.Name
|
||||
logger.DebugContext(ctx, fmt.Sprintf("prompt name: %s", promptName))
|
||||
|
||||
// Update span name and set gen_ai attributes
|
||||
span := trace.SpanFromContext(ctx)
|
||||
span.SetName(fmt.Sprintf("%s %s", PROMPTS_GET, promptName))
|
||||
span.SetAttributes(attribute.String("gen_ai.prompt.name", promptName))
|
||||
|
||||
prompt, ok := resourceMgr.GetPrompt(promptName)
|
||||
if !ok {
|
||||
err := fmt.Errorf("prompt with name %q does not exist", promptName)
|
||||
|
||||
@@ -21,7 +21,6 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/prompts"
|
||||
"github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc"
|
||||
@@ -29,6 +28,8 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
// ProcessMethod returns a response for the request.
|
||||
@@ -95,6 +96,15 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
||||
toolName := req.Params.Name
|
||||
toolArgument := req.Params.Arguments
|
||||
logger.DebugContext(ctx, fmt.Sprintf("tool name: %s", toolName))
|
||||
|
||||
// Update span name and set gen_ai attributes
|
||||
span := trace.SpanFromContext(ctx)
|
||||
span.SetName(fmt.Sprintf("%s %s", TOOLS_CALL, toolName))
|
||||
span.SetAttributes(
|
||||
attribute.String("gen_ai.tool.name", toolName),
|
||||
attribute.String("gen_ai.operation.name", "execute_tool"),
|
||||
)
|
||||
|
||||
tool, ok := resourceMgr.GetTool(toolName)
|
||||
if !ok {
|
||||
err = fmt.Errorf("invalid tool name: tool with name %q does not exist", toolName)
|
||||
@@ -117,7 +127,12 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
||||
}
|
||||
if clientAuth {
|
||||
if accessToken == "" {
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, "missing access token in the 'Authorization' header", nil), util.ErrUnauthorized
|
||||
err := util.NewClientServerError(
|
||||
"missing access token in the 'Authorization' header",
|
||||
http.StatusUnauthorized,
|
||||
nil,
|
||||
)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
}
|
||||
|
||||
@@ -165,7 +180,11 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
||||
// Check if any of the specified auth services is verified
|
||||
isAuthorized := tool.Authorized(verifiedAuthServices)
|
||||
if !isAuthorized {
|
||||
err = fmt.Errorf("unauthorized Tool call: Please make sure your specify correct auth headers: %w", util.ErrUnauthorized)
|
||||
err = util.NewClientServerError(
|
||||
"unauthorized Tool call: Please make sure you specify correct auth headers",
|
||||
http.StatusUnauthorized,
|
||||
nil,
|
||||
)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
logger.DebugContext(ctx, "tool invocation authorized")
|
||||
@@ -187,29 +206,44 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
||||
// run tool invocation and generate response.
|
||||
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
||||
if err != nil {
|
||||
errStr := err.Error()
|
||||
// Missing authService tokens.
|
||||
if errors.Is(err, util.ErrUnauthorized) {
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
// Upstream auth error
|
||||
if strings.Contains(errStr, "Error 401") || strings.Contains(errStr, "Error 403") {
|
||||
if clientAuth {
|
||||
// Error with client credentials should pass down to the client
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
var tbErr util.ToolboxError
|
||||
|
||||
if errors.As(err, &tbErr) {
|
||||
switch tbErr.Category() {
|
||||
case util.CategoryAgent:
|
||||
// MCP - Tool execution error
|
||||
// Return SUCCESS but with IsError: true
|
||||
text := TextContent{
|
||||
Type: "text",
|
||||
Text: err.Error(),
|
||||
}
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: CallToolResult{Content: []TextContent{text}, IsError: true},
|
||||
}, nil
|
||||
|
||||
case util.CategoryServer:
|
||||
// MCP Spec - Protocol error
|
||||
// Return JSON-RPC ERROR
|
||||
var clientServerErr *util.ClientServerError
|
||||
rpcCode := jsonrpc.INTERNAL_ERROR // Default to Internal Error (-32603)
|
||||
|
||||
if errors.As(err, &clientServerErr) {
|
||||
if clientServerErr.Code == http.StatusUnauthorized || clientServerErr.Code == http.StatusForbidden {
|
||||
if clientAuth {
|
||||
rpcCode = jsonrpc.INVALID_REQUEST
|
||||
} else {
|
||||
rpcCode = jsonrpc.INTERNAL_ERROR
|
||||
}
|
||||
}
|
||||
}
|
||||
return jsonrpc.NewError(id, rpcCode, err.Error(), nil), err
|
||||
}
|
||||
// Auth error with ADC should raise internal 500 error
|
||||
} else {
|
||||
// Unknown error -> 500
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
text := TextContent{
|
||||
Type: "text",
|
||||
Text: err.Error(),
|
||||
}
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: CallToolResult{Content: []TextContent{text}, IsError: true},
|
||||
}, nil
|
||||
}
|
||||
|
||||
content := make([]TextContent, 0)
|
||||
@@ -280,6 +314,12 @@ func promptsGetHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *r
|
||||
|
||||
promptName := req.Params.Name
|
||||
logger.DebugContext(ctx, fmt.Sprintf("prompt name: %s", promptName))
|
||||
|
||||
// Update span name and set gen_ai attributes
|
||||
span := trace.SpanFromContext(ctx)
|
||||
span.SetName(fmt.Sprintf("%s %s", PROMPTS_GET, promptName))
|
||||
span.SetAttributes(attribute.String("gen_ai.prompt.name", promptName))
|
||||
|
||||
prompt, ok := resourceMgr.GetPrompt(promptName)
|
||||
if !ok {
|
||||
err := fmt.Errorf("prompt with name %q does not exist", promptName)
|
||||
|
||||
@@ -231,7 +231,7 @@ func TestMcpEndpointWithoutInitialized(t *testing.T) {
|
||||
"id": "tools-call-tool4",
|
||||
"error": map[string]any{
|
||||
"code": -32600.0,
|
||||
"message": "unauthorized Tool call: Please make sure your specify correct auth headers: unauthorized",
|
||||
"message": "unauthorized Tool call: Please make sure you specify correct auth headers",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -320,7 +320,7 @@ func TestMcpEndpointWithoutInitialized(t *testing.T) {
|
||||
Params: map[string]any{
|
||||
"name": "prompt2",
|
||||
"arguments": map[string]any{
|
||||
"arg1": 42, // prompt2 expects a string, we send a number
|
||||
"arg1": 42,
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -834,7 +834,7 @@ func TestMcpEndpoint(t *testing.T) {
|
||||
"id": "tools-call-tool4",
|
||||
"error": map[string]any{
|
||||
"code": -32600.0,
|
||||
"message": "unauthorized Tool call: Please make sure your specify correct auth headers: unauthorized",
|
||||
"message": "unauthorized Tool call: Please make sure you specify correct auth headers",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
160
internal/server/mocks.go
Normal file
160
internal/server/mocks.go
Normal file
@@ -0,0 +1,160 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package server
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/prompts"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
)
|
||||
|
||||
// MockTool is used to mock tools in tests
|
||||
type MockTool struct {
|
||||
Name string
|
||||
Description string
|
||||
Params []parameters.Parameter
|
||||
manifest tools.Manifest
|
||||
unauthorized bool
|
||||
requiresClientAuthrorization bool
|
||||
}
|
||||
|
||||
func (t MockTool) Invoke(context.Context, tools.SourceProvider, parameters.ParamValues, tools.AccessToken) (any, util.ToolboxError) {
|
||||
mock := []any{t.Name}
|
||||
return mock, nil
|
||||
}
|
||||
|
||||
func (t MockTool) ToConfig() tools.ToolConfig {
|
||||
return nil
|
||||
}
|
||||
|
||||
// claims is a map of user info decoded from an auth token
|
||||
func (t MockTool) ParseParams(data map[string]any, claimsMap map[string]map[string]any) (parameters.ParamValues, error) {
|
||||
return parameters.ParseParams(t.Params, data, claimsMap)
|
||||
}
|
||||
|
||||
func (t MockTool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.Params, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
func (t MockTool) Manifest() tools.Manifest {
|
||||
pMs := make([]parameters.ParameterManifest, 0, len(t.Params))
|
||||
for _, p := range t.Params {
|
||||
pMs = append(pMs, p.Manifest())
|
||||
}
|
||||
return tools.Manifest{Description: t.Description, Parameters: pMs}
|
||||
}
|
||||
|
||||
func (t MockTool) Authorized(verifiedAuthServices []string) bool {
|
||||
// defaulted to true
|
||||
return !t.unauthorized
|
||||
}
|
||||
|
||||
func (t MockTool) RequiresClientAuthorization(tools.SourceProvider) (bool, error) {
|
||||
// defaulted to false
|
||||
return t.requiresClientAuthrorization, nil
|
||||
}
|
||||
|
||||
func (t MockTool) GetParameters() parameters.Parameters {
|
||||
return t.Params
|
||||
}
|
||||
|
||||
func (t MockTool) McpManifest() tools.McpManifest {
|
||||
properties := make(map[string]parameters.ParameterMcpManifest)
|
||||
required := make([]string, 0)
|
||||
authParams := make(map[string][]string)
|
||||
|
||||
for _, p := range t.Params {
|
||||
name := p.GetName()
|
||||
paramManifest, authParamList := p.McpManifest()
|
||||
properties[name] = paramManifest
|
||||
required = append(required, name)
|
||||
|
||||
if len(authParamList) > 0 {
|
||||
authParams[name] = authParamList
|
||||
}
|
||||
}
|
||||
|
||||
toolsSchema := parameters.McpToolsSchema{
|
||||
Type: "object",
|
||||
Properties: properties,
|
||||
Required: required,
|
||||
}
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: t.Name,
|
||||
Description: t.Description,
|
||||
InputSchema: toolsSchema,
|
||||
}
|
||||
|
||||
if len(authParams) > 0 {
|
||||
mcpManifest.Metadata = map[string]any{
|
||||
"toolbox/authParams": authParams,
|
||||
}
|
||||
}
|
||||
|
||||
return mcpManifest
|
||||
}
|
||||
|
||||
func (t MockTool) GetAuthTokenHeaderName(tools.SourceProvider) (string, error) {
|
||||
return "Authorization", nil
|
||||
}
|
||||
|
||||
// MockPrompt is used to mock prompts in tests
|
||||
type MockPrompt struct {
|
||||
Name string
|
||||
Description string
|
||||
Args prompts.Arguments
|
||||
}
|
||||
|
||||
func (p MockPrompt) SubstituteParams(vals parameters.ParamValues) (any, error) {
|
||||
return []prompts.Message{
|
||||
{
|
||||
Role: "user",
|
||||
Content: fmt.Sprintf("substituted %s", p.Name),
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (p MockPrompt) ParseArgs(data map[string]any, claimsMap map[string]map[string]any) (parameters.ParamValues, error) {
|
||||
var params parameters.Parameters
|
||||
for _, arg := range p.Args {
|
||||
params = append(params, arg.Parameter)
|
||||
}
|
||||
return parameters.ParseParams(params, data, claimsMap)
|
||||
}
|
||||
|
||||
func (p MockPrompt) Manifest() prompts.Manifest {
|
||||
var argManifests []parameters.ParameterManifest
|
||||
for _, arg := range p.Args {
|
||||
argManifests = append(argManifests, arg.Manifest())
|
||||
}
|
||||
return prompts.Manifest{
|
||||
Description: p.Description,
|
||||
Arguments: argManifests,
|
||||
}
|
||||
}
|
||||
|
||||
func (p MockPrompt) McpManifest() prompts.McpManifest {
|
||||
return prompts.GetMcpManifest(p.Name, p.Description, p.Args)
|
||||
}
|
||||
|
||||
func (p MockPrompt) ToConfig() prompts.PromptConfig {
|
||||
return nil
|
||||
}
|
||||
@@ -361,7 +361,11 @@ func (s *Source) GetOperations(ctx context.Context, project, location, operation
|
||||
}
|
||||
}
|
||||
|
||||
return string(opBytes), nil
|
||||
var result any
|
||||
if err := json.Unmarshal(opBytes, &result); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal operation bytes: %w", err)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
logger.DebugContext(ctx, fmt.Sprintf("Operation not complete, retrying in %v\n", delay))
|
||||
}
|
||||
|
||||
421
internal/sources/cockroachdb/cockroachdb.go
Normal file
421
internal/sources/cockroachdb/cockroachdb.go
Normal file
@@ -0,0 +1,421 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package cockroachdb
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"math"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
crdbpgx "github.com/cockroachdb/cockroach-go/v2/crdb/crdbpgxv5"
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
const SourceType string = "cockroachdb"
|
||||
|
||||
var _ sources.SourceConfig = Config{}
|
||||
|
||||
func init() {
|
||||
if !sources.Register(SourceType, newConfig) {
|
||||
panic(fmt.Sprintf("source type %q already registered", SourceType))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources.SourceConfig, error) {
|
||||
// MCP compliance: Read-only by default, require explicit opt-in for writes
|
||||
actual := Config{
|
||||
Name: name,
|
||||
MaxRetries: 5,
|
||||
RetryBaseDelay: "500ms",
|
||||
ReadOnlyMode: true, // MCP requirement: read-only by default
|
||||
EnableWriteMode: false, // Must be explicitly enabled
|
||||
MaxRowLimit: 1000, // MCP requirement: limit query results
|
||||
QueryTimeoutSec: 30, // MCP requirement: prevent long-running queries
|
||||
EnableTelemetry: true, // MCP requirement: observability
|
||||
TelemetryVerbose: false,
|
||||
}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Security validation: If EnableWriteMode is true, ReadOnlyMode should be false
|
||||
if actual.EnableWriteMode {
|
||||
actual.ReadOnlyMode = false
|
||||
}
|
||||
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Type string `yaml:"type" validate:"required"`
|
||||
Host string `yaml:"host" validate:"required"`
|
||||
Port string `yaml:"port" validate:"required"`
|
||||
User string `yaml:"user" validate:"required"`
|
||||
Password string `yaml:"password"`
|
||||
Database string `yaml:"database" validate:"required"`
|
||||
QueryParams map[string]string `yaml:"queryParams"`
|
||||
MaxRetries int `yaml:"maxRetries"`
|
||||
RetryBaseDelay string `yaml:"retryBaseDelay"`
|
||||
|
||||
// MCP Security Features
|
||||
ReadOnlyMode bool `yaml:"readOnlyMode"` // Default: true (enforced in Initialize)
|
||||
EnableWriteMode bool `yaml:"enableWriteMode"` // Explicit opt-in for write operations
|
||||
MaxRowLimit int `yaml:"maxRowLimit"` // Default: 1000
|
||||
QueryTimeoutSec int `yaml:"queryTimeoutSec"` // Default: 30
|
||||
|
||||
// Observability
|
||||
EnableTelemetry bool `yaml:"enableTelemetry"` // Default: true
|
||||
TelemetryVerbose bool `yaml:"telemetryVerbose"` // Default: false
|
||||
ClusterID string `yaml:"clusterID"` // Optional cluster identifier for telemetry
|
||||
}
|
||||
|
||||
func (r Config) SourceConfigType() string {
|
||||
return SourceType
|
||||
}
|
||||
|
||||
func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
|
||||
retryBaseDelay, err := time.ParseDuration(r.RetryBaseDelay)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid retryBaseDelay: %w", err)
|
||||
}
|
||||
|
||||
pool, err := initCockroachDBConnectionPoolWithRetry(ctx, tracer, r.Name, r.Host, r.Port, r.User, r.Password, r.Database, r.QueryParams, r.MaxRetries, retryBaseDelay)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to create pool: %w", err)
|
||||
}
|
||||
|
||||
s := &Source{
|
||||
Config: r,
|
||||
Pool: pool,
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
var _ sources.Source = &Source{}
|
||||
|
||||
type Source struct {
|
||||
Config
|
||||
Pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
func (s *Source) SourceType() string {
|
||||
return SourceType
|
||||
}
|
||||
|
||||
func (s *Source) ToConfig() sources.SourceConfig {
|
||||
return s.Config
|
||||
}
|
||||
|
||||
func (s *Source) CockroachDBPool() *pgxpool.Pool {
|
||||
return s.Pool
|
||||
}
|
||||
|
||||
func (s *Source) PostgresPool() *pgxpool.Pool {
|
||||
return s.Pool
|
||||
}
|
||||
|
||||
// ExecuteTxWithRetry executes a function within a transaction with automatic retry logic
|
||||
// using the official CockroachDB retry mechanism from cockroach-go/v2
|
||||
func (s *Source) ExecuteTxWithRetry(ctx context.Context, fn func(pgx.Tx) error) error {
|
||||
return crdbpgx.ExecuteTx(ctx, s.Pool, pgx.TxOptions{}, fn)
|
||||
}
|
||||
|
||||
// Query executes a query using the connection pool with MCP security enforcement.
|
||||
// For read-only queries, connection-level retry is sufficient.
|
||||
// For write operations requiring transaction retry, use ExecuteTxWithRetry directly.
|
||||
// Note: Callers should manage context timeouts as needed.
|
||||
func (s *Source) Query(ctx context.Context, sql string, args ...interface{}) (pgx.Rows, error) {
|
||||
// MCP Security Check 1: Enforce write operation restrictions
|
||||
if err := s.CanExecuteWrite(sql); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// MCP Security Check 2: Apply query limits (row limit)
|
||||
modifiedSQL, err := s.ApplyQueryLimits(sql)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return s.Pool.Query(ctx, modifiedSQL, args...)
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// MCP Security & Observability Features
|
||||
// ============================================================================
|
||||
|
||||
// TelemetryEvent represents a structured telemetry event for MCP tool calls
|
||||
type TelemetryEvent struct {
|
||||
Timestamp time.Time `json:"timestamp"`
|
||||
ToolName string `json:"tool_name"`
|
||||
ClusterID string `json:"cluster_id"`
|
||||
Database string `json:"database"`
|
||||
User string `json:"user"`
|
||||
SQLRedacted string `json:"sql_redacted"` // Query with values redacted
|
||||
Status string `json:"status"` // "success" | "failure"
|
||||
ErrorCode string `json:"error_code,omitempty"`
|
||||
ErrorMsg string `json:"error_msg,omitempty"`
|
||||
LatencyMs int64 `json:"latency_ms"`
|
||||
RowsAffected int64 `json:"rows_affected,omitempty"`
|
||||
Metadata map[string]string `json:"metadata,omitempty"`
|
||||
}
|
||||
|
||||
// StructuredError represents an MCP-compliant error with error codes
|
||||
type StructuredError struct {
|
||||
Code string `json:"error_code"`
|
||||
Message string `json:"message"`
|
||||
Details map[string]any `json:"details,omitempty"`
|
||||
}
|
||||
|
||||
func (e *StructuredError) Error() string {
|
||||
return fmt.Sprintf("[%s] %s", e.Code, e.Message)
|
||||
}
|
||||
|
||||
// MCP Error Codes
|
||||
const (
|
||||
ErrCodeUnauthorized = "CRDB_UNAUTHORIZED"
|
||||
ErrCodeReadOnlyViolation = "CRDB_READONLY_VIOLATION"
|
||||
ErrCodeQueryTimeout = "CRDB_QUERY_TIMEOUT"
|
||||
ErrCodeRowLimitExceeded = "CRDB_ROW_LIMIT_EXCEEDED"
|
||||
ErrCodeInvalidSQL = "CRDB_INVALID_SQL"
|
||||
ErrCodeConnectionFailed = "CRDB_CONNECTION_FAILED"
|
||||
ErrCodeWriteModeRequired = "CRDB_WRITE_MODE_REQUIRED"
|
||||
ErrCodeQueryExecutionFailed = "CRDB_QUERY_EXECUTION_FAILED"
|
||||
)
|
||||
|
||||
// SQLStatementType represents the type of SQL statement
|
||||
type SQLStatementType int
|
||||
|
||||
const (
|
||||
SQLTypeUnknown SQLStatementType = iota
|
||||
SQLTypeSelect
|
||||
SQLTypeInsert
|
||||
SQLTypeUpdate
|
||||
SQLTypeDelete
|
||||
SQLTypeDDL // CREATE, ALTER, DROP
|
||||
SQLTypeTruncate
|
||||
SQLTypeExplain
|
||||
SQLTypeShow
|
||||
SQLTypeSet
|
||||
)
|
||||
|
||||
// ClassifySQL analyzes a SQL statement and returns its type
|
||||
func ClassifySQL(sql string) SQLStatementType {
|
||||
// Normalize: trim and convert to uppercase for analysis
|
||||
normalized := strings.TrimSpace(strings.ToUpper(sql))
|
||||
|
||||
if normalized == "" {
|
||||
return SQLTypeUnknown
|
||||
}
|
||||
|
||||
// Remove comments
|
||||
normalized = regexp.MustCompile(`--.*`).ReplaceAllString(normalized, "")
|
||||
normalized = regexp.MustCompile(`/\*.*?\*/`).ReplaceAllString(normalized, "")
|
||||
normalized = strings.TrimSpace(normalized)
|
||||
|
||||
// Check statement type
|
||||
switch {
|
||||
case strings.HasPrefix(normalized, "SELECT"):
|
||||
return SQLTypeSelect
|
||||
case strings.HasPrefix(normalized, "INSERT"):
|
||||
return SQLTypeInsert
|
||||
case strings.HasPrefix(normalized, "UPDATE"):
|
||||
return SQLTypeUpdate
|
||||
case strings.HasPrefix(normalized, "DELETE"):
|
||||
return SQLTypeDelete
|
||||
case strings.HasPrefix(normalized, "TRUNCATE"):
|
||||
return SQLTypeTruncate
|
||||
case strings.HasPrefix(normalized, "CREATE"):
|
||||
return SQLTypeDDL
|
||||
case strings.HasPrefix(normalized, "ALTER"):
|
||||
return SQLTypeDDL
|
||||
case strings.HasPrefix(normalized, "DROP"):
|
||||
return SQLTypeDDL
|
||||
case strings.HasPrefix(normalized, "EXPLAIN"):
|
||||
return SQLTypeExplain
|
||||
case strings.HasPrefix(normalized, "SHOW"):
|
||||
return SQLTypeShow
|
||||
case strings.HasPrefix(normalized, "SET"):
|
||||
return SQLTypeSet
|
||||
default:
|
||||
return SQLTypeUnknown
|
||||
}
|
||||
}
|
||||
|
||||
// IsWriteOperation returns true if the SQL statement modifies data
|
||||
func IsWriteOperation(sqlType SQLStatementType) bool {
|
||||
switch sqlType {
|
||||
case SQLTypeInsert, SQLTypeUpdate, SQLTypeDelete, SQLTypeTruncate, SQLTypeDDL:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// IsReadOnlyMode returns whether the source is in read-only mode
|
||||
func (s *Source) IsReadOnlyMode() bool {
|
||||
return s.ReadOnlyMode && !s.EnableWriteMode
|
||||
}
|
||||
|
||||
// CanExecuteWrite checks if a write operation is allowed
|
||||
func (s *Source) CanExecuteWrite(sql string) error {
|
||||
sqlType := ClassifySQL(sql)
|
||||
|
||||
if IsWriteOperation(sqlType) && s.IsReadOnlyMode() {
|
||||
return &StructuredError{
|
||||
Code: ErrCodeReadOnlyViolation,
|
||||
Message: "Write operations are not allowed in read-only mode. Set enableWriteMode: true to allow writes.",
|
||||
Details: map[string]any{
|
||||
"sql_type": sqlType,
|
||||
"read_only_mode": s.ReadOnlyMode,
|
||||
"enable_write_mode": s.EnableWriteMode,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ApplyQueryLimits applies row limits to a SQL query for MCP security compliance.
|
||||
// Context timeout management is the responsibility of the caller (following Go best practices).
|
||||
// Returns potentially modified SQL with LIMIT clause for SELECT queries.
|
||||
func (s *Source) ApplyQueryLimits(sql string) (string, error) {
|
||||
sqlType := ClassifySQL(sql)
|
||||
|
||||
// Apply row limit only to SELECT queries
|
||||
if sqlType == SQLTypeSelect && s.MaxRowLimit > 0 {
|
||||
// Check if query already has LIMIT clause
|
||||
normalized := strings.ToUpper(sql)
|
||||
if !strings.Contains(normalized, " LIMIT ") {
|
||||
// Add LIMIT clause - trim trailing whitespace and semicolon
|
||||
sql = strings.TrimSpace(sql)
|
||||
sql = strings.TrimSuffix(sql, ";")
|
||||
sql = fmt.Sprintf("%s LIMIT %d", sql, s.MaxRowLimit)
|
||||
}
|
||||
}
|
||||
|
||||
return sql, nil
|
||||
}
|
||||
|
||||
// RedactSQL redacts sensitive values from SQL for telemetry
|
||||
func RedactSQL(sql string) string {
|
||||
// Redact string literals
|
||||
sql = regexp.MustCompile(`'[^']*'`).ReplaceAllString(sql, "'***'")
|
||||
|
||||
// Redact numbers that might be sensitive
|
||||
sql = regexp.MustCompile(`\b\d{10,}\b`).ReplaceAllString(sql, "***")
|
||||
|
||||
return sql
|
||||
}
|
||||
|
||||
// EmitTelemetry logs a telemetry event in structured JSON format
|
||||
func (s *Source) EmitTelemetry(ctx context.Context, event TelemetryEvent) {
|
||||
if !s.EnableTelemetry {
|
||||
return
|
||||
}
|
||||
|
||||
// Set cluster ID if not already set
|
||||
if event.ClusterID == "" {
|
||||
event.ClusterID = s.ClusterID
|
||||
if event.ClusterID == "" {
|
||||
event.ClusterID = s.Database // Fallback to database name
|
||||
}
|
||||
}
|
||||
|
||||
// Set database and user
|
||||
if event.Database == "" {
|
||||
event.Database = s.Database
|
||||
}
|
||||
if event.User == "" {
|
||||
event.User = s.User
|
||||
}
|
||||
|
||||
// Log as structured JSON
|
||||
if s.TelemetryVerbose {
|
||||
jsonBytes, _ := json.Marshal(event)
|
||||
slog.Info("CockroachDB MCP Telemetry", "event", string(jsonBytes))
|
||||
} else {
|
||||
// Minimal logging
|
||||
slog.Info("CockroachDB MCP",
|
||||
"tool", event.ToolName,
|
||||
"status", event.Status,
|
||||
"latency_ms", event.LatencyMs,
|
||||
"error_code", event.ErrorCode,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func initCockroachDBConnectionPoolWithRetry(ctx context.Context, tracer trace.Tracer, name, host, port, user, pass, dbname string, queryParams map[string]string, maxRetries int, baseDelay time.Duration) (*pgxpool.Pool, error) {
|
||||
//nolint:all
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceType, name)
|
||||
defer span.End()
|
||||
|
||||
userAgent, err := util.UserAgentFromContext(ctx)
|
||||
if err != nil {
|
||||
userAgent = "genai-toolbox"
|
||||
}
|
||||
if queryParams == nil {
|
||||
queryParams = make(map[string]string)
|
||||
}
|
||||
if _, ok := queryParams["application_name"]; !ok {
|
||||
queryParams["application_name"] = userAgent
|
||||
}
|
||||
|
||||
connURL := &url.URL{
|
||||
Scheme: "postgres",
|
||||
User: url.UserPassword(user, pass),
|
||||
Host: fmt.Sprintf("%s:%s", host, port),
|
||||
Path: dbname,
|
||||
RawQuery: ConvertParamMapToRawQuery(queryParams),
|
||||
}
|
||||
|
||||
var pool *pgxpool.Pool
|
||||
for attempt := 0; attempt <= maxRetries; attempt++ {
|
||||
pool, err = pgxpool.New(ctx, connURL.String())
|
||||
if err == nil {
|
||||
err = pool.Ping(ctx)
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
return pool, nil
|
||||
}
|
||||
|
||||
if attempt < maxRetries {
|
||||
backoff := baseDelay * time.Duration(math.Pow(2, float64(attempt)))
|
||||
time.Sleep(backoff)
|
||||
}
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("failed to connect to CockroachDB after %d retries: %w", maxRetries, err)
|
||||
}
|
||||
|
||||
func ConvertParamMapToRawQuery(queryParams map[string]string) string {
|
||||
values := url.Values{}
|
||||
for k, v := range queryParams {
|
||||
values.Add(k, v)
|
||||
}
|
||||
return values.Encode()
|
||||
}
|
||||
224
internal/sources/cockroachdb/cockroachdb_test.go
Normal file
224
internal/sources/cockroachdb/cockroachdb_test.go
Normal file
@@ -0,0 +1,224 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package cockroachdb
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
)
|
||||
|
||||
func TestCockroachDBSourceConfig(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
yaml string
|
||||
}{
|
||||
{
|
||||
name: "valid config",
|
||||
yaml: `
|
||||
name: test-cockroachdb
|
||||
type: cockroachdb
|
||||
host: localhost
|
||||
port: "26257"
|
||||
user: root
|
||||
password: ""
|
||||
database: defaultdb
|
||||
maxRetries: 5
|
||||
retryBaseDelay: 500ms
|
||||
queryParams:
|
||||
sslmode: disable
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "with optional queryParams",
|
||||
yaml: `
|
||||
name: test-cockroachdb
|
||||
type: cockroachdb
|
||||
host: localhost
|
||||
port: "26257"
|
||||
user: root
|
||||
password: testpass
|
||||
database: testdb
|
||||
queryParams:
|
||||
sslmode: require
|
||||
sslcert: /path/to/cert
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "with custom retry settings",
|
||||
yaml: `
|
||||
name: test-cockroachdb
|
||||
type: cockroachdb
|
||||
host: localhost
|
||||
port: "26257"
|
||||
user: root
|
||||
password: ""
|
||||
database: defaultdb
|
||||
maxRetries: 10
|
||||
retryBaseDelay: 1s
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "without password (insecure mode)",
|
||||
yaml: `
|
||||
name: test-cockroachdb
|
||||
type: cockroachdb
|
||||
host: localhost
|
||||
port: "26257"
|
||||
user: root
|
||||
database: defaultdb
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
decoder := yaml.NewDecoder(strings.NewReader(tt.yaml))
|
||||
cfg, err := newConfig(context.Background(), "test", decoder)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
|
||||
if cfg == nil {
|
||||
t.Fatal("expected config but got nil")
|
||||
}
|
||||
|
||||
// Verify it's the right type
|
||||
cockroachCfg, ok := cfg.(Config)
|
||||
if !ok {
|
||||
t.Fatalf("expected Config type, got %T", cfg)
|
||||
}
|
||||
|
||||
// Verify SourceConfigType
|
||||
if cockroachCfg.SourceConfigType() != SourceType {
|
||||
t.Errorf("expected SourceConfigType %q, got %q", SourceType, cockroachCfg.SourceConfigType())
|
||||
}
|
||||
|
||||
t.Logf("✅ Config parsed successfully: %+v", cockroachCfg)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestCockroachDBSourceType(t *testing.T) {
|
||||
yamlContent := `
|
||||
name: test-cockroachdb
|
||||
type: cockroachdb
|
||||
host: localhost
|
||||
port: "26257"
|
||||
user: root
|
||||
password: ""
|
||||
database: defaultdb
|
||||
`
|
||||
decoder := yaml.NewDecoder(strings.NewReader(yamlContent))
|
||||
cfg, err := newConfig(context.Background(), "test", decoder)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create config: %v", err)
|
||||
}
|
||||
|
||||
if cfg.SourceConfigType() != "cockroachdb" {
|
||||
t.Errorf("expected SourceConfigType 'cockroachdb', got %q", cfg.SourceConfigType())
|
||||
}
|
||||
}
|
||||
|
||||
func TestCockroachDBDefaultValues(t *testing.T) {
|
||||
yamlContent := `
|
||||
name: test-cockroachdb
|
||||
type: cockroachdb
|
||||
host: localhost
|
||||
port: "26257"
|
||||
user: root
|
||||
password: ""
|
||||
database: defaultdb
|
||||
`
|
||||
decoder := yaml.NewDecoder(strings.NewReader(yamlContent))
|
||||
cfg, err := newConfig(context.Background(), "test", decoder)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create config: %v", err)
|
||||
}
|
||||
|
||||
cockroachCfg, ok := cfg.(Config)
|
||||
if !ok {
|
||||
t.Fatalf("expected Config type")
|
||||
}
|
||||
|
||||
// Check default values
|
||||
if cockroachCfg.MaxRetries != 5 {
|
||||
t.Errorf("expected default MaxRetries 5, got %d", cockroachCfg.MaxRetries)
|
||||
}
|
||||
|
||||
if cockroachCfg.RetryBaseDelay != "500ms" {
|
||||
t.Errorf("expected default RetryBaseDelay '500ms', got %q", cockroachCfg.RetryBaseDelay)
|
||||
}
|
||||
|
||||
t.Logf("✅ Default values set correctly")
|
||||
}
|
||||
|
||||
func TestConvertParamMapToRawQuery(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
params map[string]string
|
||||
want []string // Expected substrings in any order
|
||||
}{
|
||||
{
|
||||
name: "empty params",
|
||||
params: map[string]string{},
|
||||
want: []string{},
|
||||
},
|
||||
{
|
||||
name: "single param",
|
||||
params: map[string]string{
|
||||
"sslmode": "disable",
|
||||
},
|
||||
want: []string{"sslmode=disable"},
|
||||
},
|
||||
{
|
||||
name: "multiple params",
|
||||
params: map[string]string{
|
||||
"sslmode": "require",
|
||||
"application_name": "test-app",
|
||||
},
|
||||
want: []string{"sslmode=require", "application_name=test-app"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := ConvertParamMapToRawQuery(tt.params)
|
||||
|
||||
if len(tt.want) == 0 {
|
||||
if result != "" {
|
||||
t.Errorf("expected empty string, got %q", result)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Check that all expected substrings are in the result
|
||||
for _, want := range tt.want {
|
||||
if !contains(result, want) {
|
||||
t.Errorf("expected result to contain %q, got %q", want, result)
|
||||
}
|
||||
}
|
||||
|
||||
t.Logf("✅ Query string: %s", result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func contains(s, substr string) bool {
|
||||
return strings.Contains(s, substr)
|
||||
}
|
||||
455
internal/sources/cockroachdb/security_test.go
Normal file
455
internal/sources/cockroachdb/security_test.go
Normal file
@@ -0,0 +1,455 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package cockroachdb
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
)
|
||||
|
||||
// TestClassifySQL tests SQL statement classification
|
||||
func TestClassifySQL(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
sql string
|
||||
expected SQLStatementType
|
||||
}{
|
||||
{"SELECT", "SELECT * FROM users", SQLTypeSelect},
|
||||
{"SELECT with spaces", " SELECT * FROM users ", SQLTypeSelect},
|
||||
{"SELECT with comment", "-- comment\nSELECT * FROM users", SQLTypeSelect},
|
||||
{"INSERT", "INSERT INTO users (name) VALUES ('alice')", SQLTypeInsert},
|
||||
{"UPDATE", "UPDATE users SET name='bob' WHERE id=1", SQLTypeUpdate},
|
||||
{"DELETE", "DELETE FROM users WHERE id=1", SQLTypeDelete},
|
||||
{"CREATE TABLE", "CREATE TABLE users (id UUID PRIMARY KEY)", SQLTypeDDL},
|
||||
{"ALTER TABLE", "ALTER TABLE users ADD COLUMN email STRING", SQLTypeDDL},
|
||||
{"DROP TABLE", "DROP TABLE users", SQLTypeDDL},
|
||||
{"TRUNCATE", "TRUNCATE TABLE users", SQLTypeTruncate},
|
||||
{"EXPLAIN", "EXPLAIN SELECT * FROM users", SQLTypeExplain},
|
||||
{"SHOW", "SHOW TABLES", SQLTypeShow},
|
||||
{"SET", "SET application_name = 'myapp'", SQLTypeSet},
|
||||
{"Empty", "", SQLTypeUnknown},
|
||||
{"Lowercase select", "select * from users", SQLTypeSelect},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := ClassifySQL(tt.sql)
|
||||
if result != tt.expected {
|
||||
t.Errorf("ClassifySQL(%q) = %v, want %v", tt.sql, result, tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TestIsWriteOperation tests write operation detection
|
||||
func TestIsWriteOperation(t *testing.T) {
|
||||
tests := []struct {
|
||||
sqlType SQLStatementType
|
||||
expected bool
|
||||
}{
|
||||
{SQLTypeSelect, false},
|
||||
{SQLTypeInsert, true},
|
||||
{SQLTypeUpdate, true},
|
||||
{SQLTypeDelete, true},
|
||||
{SQLTypeTruncate, true},
|
||||
{SQLTypeDDL, true},
|
||||
{SQLTypeExplain, false},
|
||||
{SQLTypeShow, false},
|
||||
{SQLTypeSet, false},
|
||||
{SQLTypeUnknown, false},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.sqlType.String(), func(t *testing.T) {
|
||||
result := IsWriteOperation(tt.sqlType)
|
||||
if result != tt.expected {
|
||||
t.Errorf("IsWriteOperation(%v) = %v, want %v", tt.sqlType, result, tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Helper for SQLStatementType to string
|
||||
func (s SQLStatementType) String() string {
|
||||
switch s {
|
||||
case SQLTypeSelect:
|
||||
return "SELECT"
|
||||
case SQLTypeInsert:
|
||||
return "INSERT"
|
||||
case SQLTypeUpdate:
|
||||
return "UPDATE"
|
||||
case SQLTypeDelete:
|
||||
return "DELETE"
|
||||
case SQLTypeDDL:
|
||||
return "DDL"
|
||||
case SQLTypeTruncate:
|
||||
return "TRUNCATE"
|
||||
case SQLTypeExplain:
|
||||
return "EXPLAIN"
|
||||
case SQLTypeShow:
|
||||
return "SHOW"
|
||||
case SQLTypeSet:
|
||||
return "SET"
|
||||
default:
|
||||
return "UNKNOWN"
|
||||
}
|
||||
}
|
||||
|
||||
// TestCanExecuteWrite tests write operation enforcement
|
||||
func TestCanExecuteWrite(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
readOnlyMode bool
|
||||
enableWriteMode bool
|
||||
sql string
|
||||
expectError bool
|
||||
errorCode string
|
||||
}{
|
||||
{
|
||||
name: "SELECT in read-only mode",
|
||||
readOnlyMode: true,
|
||||
enableWriteMode: false,
|
||||
sql: "SELECT * FROM users",
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "INSERT in read-only mode",
|
||||
readOnlyMode: true,
|
||||
enableWriteMode: false,
|
||||
sql: "INSERT INTO users (name) VALUES ('alice')",
|
||||
expectError: true,
|
||||
errorCode: ErrCodeReadOnlyViolation,
|
||||
},
|
||||
{
|
||||
name: "INSERT with write mode enabled",
|
||||
readOnlyMode: false,
|
||||
enableWriteMode: true,
|
||||
sql: "INSERT INTO users (name) VALUES ('alice')",
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "CREATE TABLE in read-only mode",
|
||||
readOnlyMode: true,
|
||||
enableWriteMode: false,
|
||||
sql: "CREATE TABLE test (id UUID PRIMARY KEY)",
|
||||
expectError: true,
|
||||
errorCode: ErrCodeReadOnlyViolation,
|
||||
},
|
||||
{
|
||||
name: "CREATE TABLE with write mode enabled",
|
||||
readOnlyMode: false,
|
||||
enableWriteMode: true,
|
||||
sql: "CREATE TABLE test (id UUID PRIMARY KEY)",
|
||||
expectError: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
source := &Source{
|
||||
Config: Config{
|
||||
ReadOnlyMode: tt.readOnlyMode,
|
||||
EnableWriteMode: tt.enableWriteMode,
|
||||
},
|
||||
}
|
||||
|
||||
err := source.CanExecuteWrite(tt.sql)
|
||||
|
||||
if tt.expectError {
|
||||
if err == nil {
|
||||
t.Errorf("Expected error but got nil")
|
||||
return
|
||||
}
|
||||
|
||||
structErr, ok := err.(*StructuredError)
|
||||
if !ok {
|
||||
t.Errorf("Expected StructuredError but got %T", err)
|
||||
return
|
||||
}
|
||||
|
||||
if structErr.Code != tt.errorCode {
|
||||
t.Errorf("Expected error code %s but got %s", tt.errorCode, structErr.Code)
|
||||
}
|
||||
} else {
|
||||
if err != nil {
|
||||
t.Errorf("Expected no error but got: %v", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TestApplyQueryLimits tests query limit application
|
||||
func TestApplyQueryLimits(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
sql string
|
||||
maxRowLimit int
|
||||
expectedSQL string
|
||||
shouldAddLimit bool
|
||||
}{
|
||||
{
|
||||
name: "SELECT without LIMIT",
|
||||
sql: "SELECT * FROM users",
|
||||
maxRowLimit: 100,
|
||||
expectedSQL: "SELECT * FROM users LIMIT 100",
|
||||
shouldAddLimit: true,
|
||||
},
|
||||
{
|
||||
name: "SELECT with existing LIMIT",
|
||||
sql: "SELECT * FROM users LIMIT 50",
|
||||
maxRowLimit: 100,
|
||||
expectedSQL: "SELECT * FROM users LIMIT 50",
|
||||
shouldAddLimit: false,
|
||||
},
|
||||
{
|
||||
name: "SELECT without LIMIT and semicolon",
|
||||
sql: "SELECT * FROM users;",
|
||||
maxRowLimit: 100,
|
||||
expectedSQL: "SELECT * FROM users LIMIT 100",
|
||||
shouldAddLimit: true,
|
||||
},
|
||||
{
|
||||
name: "SELECT with trailing newline and semicolon",
|
||||
sql: "SELECT * FROM users;\n",
|
||||
maxRowLimit: 100,
|
||||
expectedSQL: "SELECT * FROM users LIMIT 100",
|
||||
shouldAddLimit: true,
|
||||
},
|
||||
{
|
||||
name: "SELECT with multiline and semicolon",
|
||||
sql: "\n\tSELECT *\n\tFROM users\n\tORDER BY id;\n",
|
||||
maxRowLimit: 100,
|
||||
expectedSQL: "SELECT *\n\tFROM users\n\tORDER BY id LIMIT 100",
|
||||
shouldAddLimit: true,
|
||||
},
|
||||
{
|
||||
name: "INSERT should not have LIMIT added",
|
||||
sql: "INSERT INTO users (name) VALUES ('alice')",
|
||||
maxRowLimit: 100,
|
||||
expectedSQL: "INSERT INTO users (name) VALUES ('alice')",
|
||||
shouldAddLimit: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
source := &Source{
|
||||
Config: Config{
|
||||
MaxRowLimit: tt.maxRowLimit,
|
||||
QueryTimeoutSec: 0, // Timeout now managed by caller
|
||||
},
|
||||
}
|
||||
|
||||
modifiedSQL, err := source.ApplyQueryLimits(tt.sql)
|
||||
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if modifiedSQL != tt.expectedSQL {
|
||||
t.Errorf("Expected SQL:\n%s\nGot:\n%s", tt.expectedSQL, modifiedSQL)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TestApplyQueryTimeout tests that timeout is managed by caller (not source)
|
||||
func TestApplyQueryTimeout(t *testing.T) {
|
||||
source := &Source{
|
||||
Config: Config{
|
||||
QueryTimeoutSec: 5, // Documented recommended timeout
|
||||
MaxRowLimit: 0, // Don't add LIMIT
|
||||
},
|
||||
}
|
||||
|
||||
// Caller creates timeout context (following Go best practices)
|
||||
ctx := context.Background()
|
||||
ctx, cancel := context.WithTimeout(ctx, time.Duration(source.QueryTimeoutSec)*time.Second)
|
||||
defer cancel()
|
||||
|
||||
// Apply query limits (doesn't modify context anymore)
|
||||
modifiedSQL, err := source.ApplyQueryLimits("SELECT * FROM users")
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Verify context has deadline (managed by caller)
|
||||
deadline, ok := ctx.Deadline()
|
||||
if !ok {
|
||||
t.Error("Expected deadline to be set but it wasn't")
|
||||
return
|
||||
}
|
||||
|
||||
// Verify deadline is approximately 5 seconds from now
|
||||
expectedDeadline := time.Now().Add(5 * time.Second)
|
||||
diff := deadline.Sub(expectedDeadline)
|
||||
if diff < 0 {
|
||||
diff = -diff
|
||||
}
|
||||
|
||||
// Allow 1 second tolerance
|
||||
if diff > time.Second {
|
||||
t.Errorf("Deadline diff too large: %v", diff)
|
||||
}
|
||||
|
||||
// Verify SQL is unchanged (LIMIT not added since MaxRowLimit=0)
|
||||
if modifiedSQL != "SELECT * FROM users" {
|
||||
t.Errorf("Expected SQL unchanged, got: %s", modifiedSQL)
|
||||
}
|
||||
}
|
||||
|
||||
// TestRedactSQL tests SQL redaction for telemetry
|
||||
func TestRedactSQL(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
sql string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "String literal redaction",
|
||||
sql: "SELECT * FROM users WHERE name='alice' AND email='alice@example.com'",
|
||||
expected: "SELECT * FROM users WHERE name='***' AND email='***'",
|
||||
},
|
||||
{
|
||||
name: "Long number redaction",
|
||||
sql: "SELECT * FROM users WHERE ssn=1234567890123",
|
||||
expected: "SELECT * FROM users WHERE ssn=***",
|
||||
},
|
||||
{
|
||||
name: "Short numbers not redacted",
|
||||
sql: "SELECT * FROM users WHERE age=25",
|
||||
expected: "SELECT * FROM users WHERE age=25",
|
||||
},
|
||||
{
|
||||
name: "Multiple sensitive values",
|
||||
sql: "INSERT INTO users (name, email, phone) VALUES ('bob', 'bob@example.com', '5551234567')",
|
||||
expected: "INSERT INTO users (name, email, phone) VALUES ('***', '***', '***')",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := RedactSQL(tt.sql)
|
||||
if result != tt.expected {
|
||||
t.Errorf("RedactSQL:\nGot: %s\nExpected: %s", result, tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TestIsReadOnlyMode tests read-only mode detection
|
||||
func TestIsReadOnlyMode(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
readOnlyMode bool
|
||||
enableWriteMode bool
|
||||
expected bool
|
||||
}{
|
||||
{"Read-only by default", true, false, true},
|
||||
{"Write mode enabled", false, true, false},
|
||||
{"Both false", false, false, false},
|
||||
{"Read-only overridden by write mode", true, true, false},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
source := &Source{
|
||||
Config: Config{
|
||||
ReadOnlyMode: tt.readOnlyMode,
|
||||
EnableWriteMode: tt.enableWriteMode,
|
||||
},
|
||||
}
|
||||
|
||||
result := source.IsReadOnlyMode()
|
||||
if result != tt.expected {
|
||||
t.Errorf("IsReadOnlyMode() = %v, want %v", result, tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TestStructuredError tests error formatting
|
||||
func TestStructuredError(t *testing.T) {
|
||||
err := &StructuredError{
|
||||
Code: ErrCodeReadOnlyViolation,
|
||||
Message: "Write operations not allowed",
|
||||
Details: map[string]any{
|
||||
"sql_type": "INSERT",
|
||||
},
|
||||
}
|
||||
|
||||
errorStr := err.Error()
|
||||
if !strings.Contains(errorStr, ErrCodeReadOnlyViolation) {
|
||||
t.Errorf("Error string should contain error code: %s", errorStr)
|
||||
}
|
||||
if !strings.Contains(errorStr, "Write operations not allowed") {
|
||||
t.Errorf("Error string should contain message: %s", errorStr)
|
||||
}
|
||||
}
|
||||
|
||||
// TestDefaultSecuritySettings tests that security defaults are correct
|
||||
func TestDefaultSecuritySettings(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
// Create a minimal YAML config
|
||||
yamlData := `name: test
|
||||
type: cockroachdb
|
||||
host: localhost
|
||||
port: "26257"
|
||||
user: root
|
||||
database: defaultdb
|
||||
`
|
||||
|
||||
var cfg Config
|
||||
if err := yaml.Unmarshal([]byte(yamlData), &cfg); err != nil {
|
||||
t.Fatalf("Failed to unmarshal YAML: %v", err)
|
||||
}
|
||||
|
||||
// Apply defaults through newConfig logic manually
|
||||
cfg.MaxRetries = 5
|
||||
cfg.RetryBaseDelay = "500ms"
|
||||
cfg.ReadOnlyMode = true
|
||||
cfg.EnableWriteMode = false
|
||||
cfg.MaxRowLimit = 1000
|
||||
cfg.QueryTimeoutSec = 30
|
||||
cfg.EnableTelemetry = true
|
||||
cfg.TelemetryVerbose = false
|
||||
|
||||
_ = ctx // prevent unused
|
||||
|
||||
// Verify MCP security defaults
|
||||
if !cfg.ReadOnlyMode {
|
||||
t.Error("ReadOnlyMode should be true by default")
|
||||
}
|
||||
if cfg.EnableWriteMode {
|
||||
t.Error("EnableWriteMode should be false by default")
|
||||
}
|
||||
if cfg.MaxRowLimit != 1000 {
|
||||
t.Errorf("MaxRowLimit should be 1000, got %d", cfg.MaxRowLimit)
|
||||
}
|
||||
if cfg.QueryTimeoutSec != 30 {
|
||||
t.Errorf("QueryTimeoutSec should be 30, got %d", cfg.QueryTimeoutSec)
|
||||
}
|
||||
if !cfg.EnableTelemetry {
|
||||
t.Error("EnableTelemetry should be true by default")
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user