mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-01-30 01:38:38 -05:00
Compare commits
8 Commits
processing
...
config-yam
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f6fef90359 | ||
|
|
87c5953b75 | ||
|
|
7f0c49a4df | ||
|
|
ad8df40791 | ||
|
|
c29355ff82 | ||
|
|
70f5550910 | ||
|
|
348c9fde08 | ||
|
|
aef539bcf3 |
@@ -87,7 +87,7 @@ steps:
|
|||||||
- "CLOUD_SQL_POSTGRES_REGION=$_REGION"
|
- "CLOUD_SQL_POSTGRES_REGION=$_REGION"
|
||||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||||
secretEnv:
|
secretEnv:
|
||||||
["CLOUD_SQL_POSTGRES_USER", "CLOUD_SQL_POSTGRES_PASS", "CLIENT_ID", "API_KEY"]
|
["CLOUD_SQL_POSTGRES_USER", "CLOUD_SQL_POSTGRES_PASS", "CLIENT_ID"]
|
||||||
volumes:
|
volumes:
|
||||||
- name: "go"
|
- name: "go"
|
||||||
path: "/gopath"
|
path: "/gopath"
|
||||||
@@ -134,7 +134,7 @@ steps:
|
|||||||
- "ALLOYDB_POSTGRES_DATABASE=$_DATABASE_NAME"
|
- "ALLOYDB_POSTGRES_DATABASE=$_DATABASE_NAME"
|
||||||
- "ALLOYDB_POSTGRES_REGION=$_REGION"
|
- "ALLOYDB_POSTGRES_REGION=$_REGION"
|
||||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||||
secretEnv: ["ALLOYDB_POSTGRES_USER", "ALLOYDB_POSTGRES_PASS", "CLIENT_ID", "API_KEY"]
|
secretEnv: ["ALLOYDB_POSTGRES_USER", "ALLOYDB_POSTGRES_PASS", "CLIENT_ID"]
|
||||||
volumes:
|
volumes:
|
||||||
- name: "go"
|
- name: "go"
|
||||||
path: "/gopath"
|
path: "/gopath"
|
||||||
@@ -171,23 +171,6 @@ steps:
|
|||||||
alloydbainl \
|
alloydbainl \
|
||||||
alloydbainl
|
alloydbainl
|
||||||
|
|
||||||
- id: "alloydb-omni"
|
|
||||||
name: golang:1
|
|
||||||
waitFor: ["compile-test-binary"]
|
|
||||||
entrypoint: /bin/bash
|
|
||||||
env:
|
|
||||||
- "GOPATH=/gopath"
|
|
||||||
volumes:
|
|
||||||
- name: "go"
|
|
||||||
path: "/gopath"
|
|
||||||
args:
|
|
||||||
- -c
|
|
||||||
- |
|
|
||||||
.ci/test_with_coverage.sh \
|
|
||||||
"AlloyDB Omni" \
|
|
||||||
alloydbomni \
|
|
||||||
postgres
|
|
||||||
|
|
||||||
- id: "bigtable"
|
- id: "bigtable"
|
||||||
name: golang:1
|
name: golang:1
|
||||||
waitFor: ["compile-test-binary"]
|
waitFor: ["compile-test-binary"]
|
||||||
@@ -310,26 +293,7 @@ steps:
|
|||||||
.ci/test_with_coverage.sh \
|
.ci/test_with_coverage.sh \
|
||||||
"Cloud Healthcare API" \
|
"Cloud Healthcare API" \
|
||||||
cloudhealthcare \
|
cloudhealthcare \
|
||||||
cloudhealthcare
|
cloudhealthcare || echo "Integration tests failed."
|
||||||
|
|
||||||
- id: "cloud-logging-admin"
|
|
||||||
name: golang:1
|
|
||||||
waitFor: ["compile-test-binary"]
|
|
||||||
entrypoint: /bin/bash
|
|
||||||
env:
|
|
||||||
- "GOPATH=/gopath"
|
|
||||||
- "LOGADMIN_PROJECT=$PROJECT_ID"
|
|
||||||
secretEnv: ["CLIENT_ID"]
|
|
||||||
volumes:
|
|
||||||
- name: "go"
|
|
||||||
path: "/gopath"
|
|
||||||
args:
|
|
||||||
- -c
|
|
||||||
- |
|
|
||||||
.ci/test_with_coverage.sh \
|
|
||||||
"Cloud Logging Admin" \
|
|
||||||
cloudloggingadmin \
|
|
||||||
cloudloggingadmin
|
|
||||||
|
|
||||||
- id: "postgres"
|
- id: "postgres"
|
||||||
name: golang:1
|
name: golang:1
|
||||||
@@ -341,7 +305,7 @@ steps:
|
|||||||
- "POSTGRES_HOST=$_POSTGRES_HOST"
|
- "POSTGRES_HOST=$_POSTGRES_HOST"
|
||||||
- "POSTGRES_PORT=$_POSTGRES_PORT"
|
- "POSTGRES_PORT=$_POSTGRES_PORT"
|
||||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||||
secretEnv: ["POSTGRES_USER", "POSTGRES_PASS", "CLIENT_ID", "API_KEY"]
|
secretEnv: ["POSTGRES_USER", "POSTGRES_PASS", "CLIENT_ID"]
|
||||||
volumes:
|
volumes:
|
||||||
- name: "go"
|
- name: "go"
|
||||||
path: "/gopath"
|
path: "/gopath"
|
||||||
@@ -923,7 +887,7 @@ steps:
|
|||||||
tar -C /usr/local -xzf go.tar.gz
|
tar -C /usr/local -xzf go.tar.gz
|
||||||
export PATH="/usr/local/go/bin:$$PATH"
|
export PATH="/usr/local/go/bin:$$PATH"
|
||||||
|
|
||||||
go test -v ./tests/oracle/... \
|
go test -v ./internal/sources/oracle/... \
|
||||||
-coverprofile=oracle_coverage.out \
|
-coverprofile=oracle_coverage.out \
|
||||||
-coverpkg=./internal/sources/oracle/...,./internal/tools/oracle/...
|
-coverpkg=./internal/sources/oracle/...,./internal/tools/oracle/...
|
||||||
|
|
||||||
@@ -931,8 +895,8 @@ steps:
|
|||||||
total_coverage=$(go tool cover -func=oracle_coverage.out | grep "total:" | awk '{print $3}')
|
total_coverage=$(go tool cover -func=oracle_coverage.out | grep "total:" | awk '{print $3}')
|
||||||
echo "Oracle total coverage: $total_coverage"
|
echo "Oracle total coverage: $total_coverage"
|
||||||
coverage_numeric=$(echo "$total_coverage" | sed 's/%//')
|
coverage_numeric=$(echo "$total_coverage" | sed 's/%//')
|
||||||
if awk -v cov="$coverage_numeric" 'BEGIN {exit !(cov < 60)}'; then
|
if awk -v cov="$coverage_numeric" 'BEGIN {exit !(cov < 20)}'; then
|
||||||
echo "Coverage failure: $total_coverage is below 60%."
|
echo "Coverage failure: $total_coverage is below 20%."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -1000,13 +964,6 @@ steps:
|
|||||||
|
|
||||||
availableSecrets:
|
availableSecrets:
|
||||||
secretManager:
|
secretManager:
|
||||||
# Common secrets
|
|
||||||
- versionName: projects/$PROJECT_ID/secrets/client_id/versions/latest
|
|
||||||
env: CLIENT_ID
|
|
||||||
- versionName: projects/$PROJECT_ID/secrets/api_key/versions/latest
|
|
||||||
env: API_KEY
|
|
||||||
|
|
||||||
# Resource-specific secrets
|
|
||||||
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_user/versions/latest
|
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_user/versions/latest
|
||||||
env: CLOUD_SQL_POSTGRES_USER
|
env: CLOUD_SQL_POSTGRES_USER
|
||||||
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_pass/versions/latest
|
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_pass/versions/latest
|
||||||
@@ -1023,6 +980,8 @@ availableSecrets:
|
|||||||
env: POSTGRES_USER
|
env: POSTGRES_USER
|
||||||
- versionName: projects/$PROJECT_ID/secrets/postgres_pass/versions/latest
|
- versionName: projects/$PROJECT_ID/secrets/postgres_pass/versions/latest
|
||||||
env: POSTGRES_PASS
|
env: POSTGRES_PASS
|
||||||
|
- versionName: projects/$PROJECT_ID/secrets/client_id/versions/latest
|
||||||
|
env: CLIENT_ID
|
||||||
- versionName: projects/$PROJECT_ID/secrets/neo4j_user/versions/latest
|
- versionName: projects/$PROJECT_ID/secrets/neo4j_user/versions/latest
|
||||||
env: NEO4J_USER
|
env: NEO4J_USER
|
||||||
- versionName: projects/$PROJECT_ID/secrets/neo4j_pass/versions/latest
|
- versionName: projects/$PROJECT_ID/secrets/neo4j_pass/versions/latest
|
||||||
|
|||||||
4
.github/workflows/deploy_dev_docs.yaml
vendored
4
.github/workflows/deploy_dev_docs.yaml
vendored
@@ -51,12 +51,12 @@ jobs:
|
|||||||
extended: true
|
extended: true
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
|
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
|
||||||
with:
|
with:
|
||||||
node-version: "22"
|
node-version: "22"
|
||||||
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5
|
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
|
||||||
with:
|
with:
|
||||||
path: ~/.npm
|
path: ~/.npm
|
||||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
hugo-version: "0.145.0"
|
hugo-version: "0.145.0"
|
||||||
extended: true
|
extended: true
|
||||||
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
|
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
|
||||||
with:
|
with:
|
||||||
node-version: "22"
|
node-version: "22"
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/deploy_versioned_docs.yaml
vendored
2
.github/workflows/deploy_versioned_docs.yaml
vendored
@@ -44,7 +44,7 @@ jobs:
|
|||||||
extended: true
|
extended: true
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
|
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
|
||||||
with:
|
with:
|
||||||
node-version: "22"
|
node-version: "22"
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/docs_preview_deploy.yaml
vendored
4
.github/workflows/docs_preview_deploy.yaml
vendored
@@ -62,12 +62,12 @@ jobs:
|
|||||||
extended: true
|
extended: true
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
|
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
|
||||||
with:
|
with:
|
||||||
node-version: "22"
|
node-version: "22"
|
||||||
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5
|
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
|
||||||
with:
|
with:
|
||||||
path: ~/.npm
|
path: ~/.npm
|
||||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||||
|
|||||||
3
.github/workflows/link_checker_workflow.yaml
vendored
3
.github/workflows/link_checker_workflow.yaml
vendored
@@ -25,7 +25,7 @@ jobs:
|
|||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||||
|
|
||||||
- name: Restore lychee cache
|
- name: Restore lychee cache
|
||||||
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5
|
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
|
||||||
with:
|
with:
|
||||||
path: .lycheecache
|
path: .lycheecache
|
||||||
key: cache-lychee-${{ github.sha }}
|
key: cache-lychee-${{ github.sha }}
|
||||||
@@ -39,7 +39,6 @@ jobs:
|
|||||||
--no-progress
|
--no-progress
|
||||||
--cache
|
--cache
|
||||||
--max-cache-age 1d
|
--max-cache-age 1d
|
||||||
--exclude '^neo4j\+.*' --exclude '^bolt://.*'
|
|
||||||
README.md
|
README.md
|
||||||
docs/
|
docs/
|
||||||
output: /tmp/foo.txt
|
output: /tmp/foo.txt
|
||||||
|
|||||||
4
.github/workflows/lint.yaml
vendored
4
.github/workflows/lint.yaml
vendored
@@ -51,11 +51,11 @@ jobs:
|
|||||||
console.log('Failed to remove label. Another job may have already removed it!');
|
console.log('Failed to remove label. Another job may have already removed it!');
|
||||||
}
|
}
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
|
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||||
with:
|
with:
|
||||||
go-version: "1.25"
|
go-version: "1.25"
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||||
|
|||||||
2
.github/workflows/sync-labels.yaml
vendored
2
.github/workflows/sync-labels.yaml
vendored
@@ -29,7 +29,7 @@ jobs:
|
|||||||
issues: 'write'
|
issues: 'write'
|
||||||
pull-requests: 'write'
|
pull-requests: 'write'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
- uses: micnncim/action-label-syncer@3abd5ab72fda571e69fffd97bd4e0033dd5f495c # v1.3.0
|
- uses: micnncim/action-label-syncer@3abd5ab72fda571e69fffd97bd4e0033dd5f495c # v1.3.0
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
4
.github/workflows/tests.yaml
vendored
4
.github/workflows/tests.yaml
vendored
@@ -57,12 +57,12 @@ jobs:
|
|||||||
}
|
}
|
||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
|
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||||
with:
|
with:
|
||||||
go-version: "1.24"
|
go-version: "1.24"
|
||||||
|
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||||
|
|||||||
@@ -51,10 +51,6 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
|
|||||||
# Add a new version block here before every release
|
# Add a new version block here before every release
|
||||||
# The order of versions in this file is mirrored into the dropdown
|
# The order of versions in this file is mirrored into the dropdown
|
||||||
|
|
||||||
[[params.versions]]
|
|
||||||
version = "v0.26.0"
|
|
||||||
url = "https://googleapis.github.io/genai-toolbox/v0.26.0/"
|
|
||||||
|
|
||||||
[[params.versions]]
|
[[params.versions]]
|
||||||
version = "v0.25.0"
|
version = "v0.25.0"
|
||||||
url = "https://googleapis.github.io/genai-toolbox/v0.25.0/"
|
url = "https://googleapis.github.io/genai-toolbox/v0.25.0/"
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ https://dev.mysql.com/doc/refman/8.4/en/user-names.html
|
|||||||
# npmjs links can occasionally trigger rate limiting during high-frequency CI builds
|
# npmjs links can occasionally trigger rate limiting during high-frequency CI builds
|
||||||
https://www.npmjs.com/package/@toolbox-sdk/core
|
https://www.npmjs.com/package/@toolbox-sdk/core
|
||||||
https://www.npmjs.com/package/@toolbox-sdk/adk
|
https://www.npmjs.com/package/@toolbox-sdk/adk
|
||||||
https://www.oceanbase.com/
|
|
||||||
|
|
||||||
# Ignore social media and blog profiles to reduce external request overhead
|
# Ignore social media and blog profiles to reduce external request overhead
|
||||||
https://medium.com/@mcp_toolbox
|
https://medium.com/@mcp_toolbox
|
||||||
25
CHANGELOG.md
25
CHANGELOG.md
@@ -1,30 +1,5 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
## [0.26.0](https://github.com/googleapis/genai-toolbox/compare/v0.25.0...v0.26.0) (2026-01-22)
|
|
||||||
|
|
||||||
|
|
||||||
### ⚠ BREAKING CHANGES
|
|
||||||
|
|
||||||
* Validate tool naming ([#2305](https://github.com/googleapis/genai-toolbox/issues/2305)) ([5054212](https://github.com/googleapis/genai-toolbox/commit/5054212fa43017207fe83275d27b9fbab96e8ab5))
|
|
||||||
* **tools/cloudgda:** Update description and parameter name for cloudgda tool ([#2288](https://github.com/googleapis/genai-toolbox/issues/2288)) ([6b02591](https://github.com/googleapis/genai-toolbox/commit/6b025917032394a66840488259db8ff2c3063016))
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
* Add new `user-agent-metadata` flag ([#2302](https://github.com/googleapis/genai-toolbox/issues/2302)) ([adc9589](https://github.com/googleapis/genai-toolbox/commit/adc9589766904d9e3cbe0a6399222f8d4bb9d0cc))
|
|
||||||
* Add remaining flag to Toolbox server in MCP registry ([#2272](https://github.com/googleapis/genai-toolbox/issues/2272)) ([5e0999e](https://github.com/googleapis/genai-toolbox/commit/5e0999ebf5cdd9046e96857738254b2e0561b6d2))
|
|
||||||
* **embeddingModel:** Add embedding model to MCP handler ([#2310](https://github.com/googleapis/genai-toolbox/issues/2310)) ([e4f60e5](https://github.com/googleapis/genai-toolbox/commit/e4f60e56335b755ef55b9553d3f40b31858ec8d9))
|
|
||||||
* **sources/bigquery:** Make maximum rows returned from queries configurable ([#2262](https://github.com/googleapis/genai-toolbox/issues/2262)) ([4abf0c3](https://github.com/googleapis/genai-toolbox/commit/4abf0c39e717d53b22cc61efb65e09928c598236))
|
|
||||||
* **prebuilt/cloud-sql:** Add create backup tool for Cloud SQL ([#2141](https://github.com/googleapis/genai-toolbox/issues/2141)) ([8e0fb03](https://github.com/googleapis/genai-toolbox/commit/8e0fb0348315a80f63cb47b3c7204869482448f4))
|
|
||||||
* **prebuilt/cloud-sql:** Add restore backup tool for Cloud SQL ([#2171](https://github.com/googleapis/genai-toolbox/issues/2171)) ([00c3e6d](https://github.com/googleapis/genai-toolbox/commit/00c3e6d8cba54e2ab6cb271c7e6b378895df53e1))
|
|
||||||
* Support combining multiple prebuilt configurations ([#2295](https://github.com/googleapis/genai-toolbox/issues/2295)) ([e535b37](https://github.com/googleapis/genai-toolbox/commit/e535b372ea81864d644a67135a1b07e4e519b4b4))
|
|
||||||
* Support MCP specs version 2025-11-25 ([#2303](https://github.com/googleapis/genai-toolbox/issues/2303)) ([4d23a3b](https://github.com/googleapis/genai-toolbox/commit/4d23a3bbf2797b1f7fe328aeb5789e778121da23))
|
|
||||||
* **tools:** Add `valueFromParam` support to Tool config ([#2333](https://github.com/googleapis/genai-toolbox/issues/2333)) ([15101b1](https://github.com/googleapis/genai-toolbox/commit/15101b1edbe2b85a4a5f9f819c23cf83138f4ee1))
|
|
||||||
|
|
||||||
|
|
||||||
### Bug Fixes
|
|
||||||
|
|
||||||
* **tools/cloudhealthcare:** Add check for client authorization before retrieving token string ([#2327](https://github.com/googleapis/genai-toolbox/issues/2327)) ([c25a233](https://github.com/googleapis/genai-toolbox/commit/c25a2330fea2ac382a398842c9e572e4e19bcb08))
|
|
||||||
|
|
||||||
## [0.25.0](https://github.com/googleapis/genai-toolbox/compare/v0.24.0...v0.25.0) (2026-01-08)
|
## [0.25.0](https://github.com/googleapis/genai-toolbox/compare/v0.24.0...v0.25.0) (2026-01-08)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ Tool type serves as a category or type that a user can assign to a tool.
|
|||||||
|
|
||||||
The following guidelines apply to tool types:
|
The following guidelines apply to tool types:
|
||||||
|
|
||||||
* Should use hyphens over underscores (e.g. `firestore-list-collections` or
|
* Should user hyphens over underscores (e.g. `firestore-list-collections` or
|
||||||
`firestore_list_colelctions`).
|
`firestore_list_colelctions`).
|
||||||
* Should use product name in name (e.g. `firestore-list-collections` over
|
* Should use product name in name (e.g. `firestore-list-collections` over
|
||||||
`list-collections`).
|
`list-collections`).
|
||||||
|
|||||||
18
README.md
18
README.md
@@ -2,8 +2,6 @@
|
|||||||
|
|
||||||
# MCP Toolbox for Databases
|
# MCP Toolbox for Databases
|
||||||
|
|
||||||
<a href="https://trendshift.io/repositories/13019" target="_blank"><img src="https://trendshift.io/api/badge/repositories/13019" alt="googleapis%2Fgenai-toolbox | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
|
||||||
|
|
||||||
[](https://googleapis.github.io/genai-toolbox/)
|
[](https://googleapis.github.io/genai-toolbox/)
|
||||||
[](https://discord.gg/Dmm69peqjh)
|
[](https://discord.gg/Dmm69peqjh)
|
||||||
[](https://medium.com/@mcp_toolbox)
|
[](https://medium.com/@mcp_toolbox)
|
||||||
@@ -107,7 +105,7 @@ redeploying your application.
|
|||||||
|
|
||||||
## Getting Started
|
## Getting Started
|
||||||
|
|
||||||
### Quickstart: Running Toolbox using NPX
|
### (Non-production) Running Toolbox
|
||||||
|
|
||||||
You can run Toolbox directly with a [configuration file](#configuration):
|
You can run Toolbox directly with a [configuration file](#configuration):
|
||||||
|
|
||||||
@@ -142,7 +140,7 @@ To install Toolbox as a binary:
|
|||||||
>
|
>
|
||||||
> ```sh
|
> ```sh
|
||||||
> # see releases page for other versions
|
> # see releases page for other versions
|
||||||
> export VERSION=0.26.0
|
> export VERSION=0.25.0
|
||||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||||
> chmod +x toolbox
|
> chmod +x toolbox
|
||||||
> ```
|
> ```
|
||||||
@@ -155,7 +153,7 @@ To install Toolbox as a binary:
|
|||||||
>
|
>
|
||||||
> ```sh
|
> ```sh
|
||||||
> # see releases page for other versions
|
> # see releases page for other versions
|
||||||
> export VERSION=0.26.0
|
> export VERSION=0.25.0
|
||||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||||
> chmod +x toolbox
|
> chmod +x toolbox
|
||||||
> ```
|
> ```
|
||||||
@@ -168,7 +166,7 @@ To install Toolbox as a binary:
|
|||||||
>
|
>
|
||||||
> ```sh
|
> ```sh
|
||||||
> # see releases page for other versions
|
> # see releases page for other versions
|
||||||
> export VERSION=0.26.0
|
> export VERSION=0.25.0
|
||||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||||
> chmod +x toolbox
|
> chmod +x toolbox
|
||||||
> ```
|
> ```
|
||||||
@@ -181,7 +179,7 @@ To install Toolbox as a binary:
|
|||||||
>
|
>
|
||||||
> ```cmd
|
> ```cmd
|
||||||
> :: see releases page for other versions
|
> :: see releases page for other versions
|
||||||
> set VERSION=0.26.0
|
> set VERSION=0.25.0
|
||||||
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||||
> ```
|
> ```
|
||||||
>
|
>
|
||||||
@@ -193,7 +191,7 @@ To install Toolbox as a binary:
|
|||||||
>
|
>
|
||||||
> ```powershell
|
> ```powershell
|
||||||
> # see releases page for other versions
|
> # see releases page for other versions
|
||||||
> $VERSION = "0.26.0"
|
> $VERSION = "0.25.0"
|
||||||
> curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
> curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||||
> ```
|
> ```
|
||||||
>
|
>
|
||||||
@@ -206,7 +204,7 @@ You can also install Toolbox as a container:
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# see releases page for other versions
|
# see releases page for other versions
|
||||||
export VERSION=0.26.0
|
export VERSION=0.25.0
|
||||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -230,7 +228,7 @@ To install from source, ensure you have the latest version of
|
|||||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
go install github.com/googleapis/genai-toolbox@v0.26.0
|
go install github.com/googleapis/genai-toolbox@v0.25.0
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|
||||||
|
|||||||
@@ -1,131 +0,0 @@
|
|||||||
// Copyright 2026 Google LLC
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package cmd
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestInvokeTool(t *testing.T) {
|
|
||||||
// Create a temporary tools file
|
|
||||||
tmpDir := t.TempDir()
|
|
||||||
|
|
||||||
toolsFileContent := `
|
|
||||||
sources:
|
|
||||||
my-sqlite:
|
|
||||||
kind: sqlite
|
|
||||||
database: test.db
|
|
||||||
tools:
|
|
||||||
hello-sqlite:
|
|
||||||
kind: sqlite-sql
|
|
||||||
source: my-sqlite
|
|
||||||
description: "hello tool"
|
|
||||||
statement: "SELECT 'hello' as greeting"
|
|
||||||
echo-tool:
|
|
||||||
kind: sqlite-sql
|
|
||||||
source: my-sqlite
|
|
||||||
description: "echo tool"
|
|
||||||
statement: "SELECT ? as msg"
|
|
||||||
parameters:
|
|
||||||
- name: message
|
|
||||||
type: string
|
|
||||||
description: message to echo
|
|
||||||
`
|
|
||||||
|
|
||||||
toolsFilePath := filepath.Join(tmpDir, "tools.yaml")
|
|
||||||
if err := os.WriteFile(toolsFilePath, []byte(toolsFileContent), 0644); err != nil {
|
|
||||||
t.Fatalf("failed to write tools file: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
tcs := []struct {
|
|
||||||
desc string
|
|
||||||
args []string
|
|
||||||
want string
|
|
||||||
wantErr bool
|
|
||||||
errStr string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
desc: "success - basic tool call",
|
|
||||||
args: []string{"invoke", "hello-sqlite", "--tools-file", toolsFilePath},
|
|
||||||
want: `"greeting": "hello"`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "success - tool call with parameters",
|
|
||||||
args: []string{"invoke", "echo-tool", `{"message": "world"}`, "--tools-file", toolsFilePath},
|
|
||||||
want: `"msg": "world"`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "error - tool not found",
|
|
||||||
args: []string{"invoke", "non-existent", "--tools-file", toolsFilePath},
|
|
||||||
wantErr: true,
|
|
||||||
errStr: `tool "non-existent" not found`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "error - invalid JSON params",
|
|
||||||
args: []string{"invoke", "echo-tool", `invalid-json`, "--tools-file", toolsFilePath},
|
|
||||||
wantErr: true,
|
|
||||||
errStr: `params must be a valid JSON string`,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tc := range tcs {
|
|
||||||
t.Run(tc.desc, func(t *testing.T) {
|
|
||||||
_, got, err := invokeCommandWithContext(context.Background(), tc.args)
|
|
||||||
if (err != nil) != tc.wantErr {
|
|
||||||
t.Fatalf("got error %v, wantErr %v", err, tc.wantErr)
|
|
||||||
}
|
|
||||||
if tc.wantErr && !strings.Contains(err.Error(), tc.errStr) {
|
|
||||||
t.Fatalf("got error %v, want error containing %q", err, tc.errStr)
|
|
||||||
}
|
|
||||||
if !tc.wantErr && !strings.Contains(got, tc.want) {
|
|
||||||
t.Fatalf("got %q, want it to contain %q", got, tc.want)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInvokeTool_AuthUnsupported(t *testing.T) {
|
|
||||||
tmpDir := t.TempDir()
|
|
||||||
toolsFileContent := `
|
|
||||||
sources:
|
|
||||||
my-bq:
|
|
||||||
kind: bigquery
|
|
||||||
project: my-project
|
|
||||||
useClientOAuth: true
|
|
||||||
tools:
|
|
||||||
bq-tool:
|
|
||||||
kind: bigquery-sql
|
|
||||||
source: my-bq
|
|
||||||
description: "bq tool"
|
|
||||||
statement: "SELECT 1"
|
|
||||||
`
|
|
||||||
toolsFilePath := filepath.Join(tmpDir, "auth_tools.yaml")
|
|
||||||
if err := os.WriteFile(toolsFilePath, []byte(toolsFileContent), 0644); err != nil {
|
|
||||||
t.Fatalf("failed to write tools file: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
args := []string{"invoke", "bq-tool", "--tools-file", toolsFilePath}
|
|
||||||
_, _, err := invokeCommandWithContext(context.Background(), args)
|
|
||||||
if err == nil {
|
|
||||||
t.Fatal("expected error for tool requiring client auth, but got nil")
|
|
||||||
}
|
|
||||||
if !strings.Contains(err.Error(), "client authorization is not supported") {
|
|
||||||
t.Fatalf("unexpected error message: %v", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
520
cmd/root.go
520
cmd/root.go
@@ -34,7 +34,6 @@ import (
|
|||||||
"github.com/fsnotify/fsnotify"
|
"github.com/fsnotify/fsnotify"
|
||||||
yaml "github.com/goccy/go-yaml"
|
yaml "github.com/goccy/go-yaml"
|
||||||
"github.com/googleapis/genai-toolbox/internal/auth"
|
"github.com/googleapis/genai-toolbox/internal/auth"
|
||||||
"github.com/googleapis/genai-toolbox/internal/cli/invoke"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||||
"github.com/googleapis/genai-toolbox/internal/log"
|
"github.com/googleapis/genai-toolbox/internal/log"
|
||||||
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
|
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
|
||||||
@@ -92,9 +91,6 @@ import (
|
|||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicominstances"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicominstances"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomseries"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomseries"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomstudies"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomstudies"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminlistlognames"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminlistresourcetypes"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminquerylogs"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcloneinstance"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcloneinstance"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatebackup"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatebackup"
|
||||||
@@ -103,7 +99,6 @@ import (
|
|||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistdatabases"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistdatabases"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistinstances"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistinstances"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlrestorebackup"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmssql/cloudsqlmssqlcreateinstance"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmssql/cloudsqlmssqlcreateinstance"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmysql/cloudsqlmysqlcreateinstance"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmysql/cloudsqlmysqlcreateinstance"
|
||||||
@@ -248,7 +243,6 @@ import (
|
|||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/clickhouse"
|
_ "github.com/googleapis/genai-toolbox/internal/sources/clickhouse"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudgda"
|
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudgda"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudhealthcare"
|
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudhealthcare"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudloggingadmin"
|
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudmonitoring"
|
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudmonitoring"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin"
|
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmssql"
|
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmssql"
|
||||||
@@ -321,15 +315,15 @@ func Execute() {
|
|||||||
type Command struct {
|
type Command struct {
|
||||||
*cobra.Command
|
*cobra.Command
|
||||||
|
|
||||||
cfg server.ServerConfig
|
cfg server.ServerConfig
|
||||||
logger log.Logger
|
logger log.Logger
|
||||||
tools_file string
|
tools_file string
|
||||||
tools_files []string
|
tools_files []string
|
||||||
tools_folder string
|
tools_folder string
|
||||||
prebuiltConfigs []string
|
prebuiltConfig string
|
||||||
inStream io.Reader
|
inStream io.Reader
|
||||||
outStream io.Writer
|
outStream io.Writer
|
||||||
errStream io.Writer
|
errStream io.Writer
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewCommand returns a Command object representing an invocation of the CLI.
|
// NewCommand returns a Command object representing an invocation of the CLI.
|
||||||
@@ -366,42 +360,36 @@ func NewCommand(opts ...Option) *Command {
|
|||||||
baseCmd.SetErr(cmd.errStream)
|
baseCmd.SetErr(cmd.errStream)
|
||||||
|
|
||||||
flags := cmd.Flags()
|
flags := cmd.Flags()
|
||||||
persistentFlags := cmd.PersistentFlags()
|
|
||||||
|
|
||||||
flags.StringVarP(&cmd.cfg.Address, "address", "a", "127.0.0.1", "Address of the interface the server will listen on.")
|
flags.StringVarP(&cmd.cfg.Address, "address", "a", "127.0.0.1", "Address of the interface the server will listen on.")
|
||||||
flags.IntVarP(&cmd.cfg.Port, "port", "p", 5000, "Port the server will listen on.")
|
flags.IntVarP(&cmd.cfg.Port, "port", "p", 5000, "Port the server will listen on.")
|
||||||
|
|
||||||
flags.StringVar(&cmd.tools_file, "tools_file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
|
flags.StringVar(&cmd.tools_file, "tools_file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
|
||||||
// deprecate tools_file
|
// deprecate tools_file
|
||||||
_ = flags.MarkDeprecated("tools_file", "please use --tools-file instead")
|
_ = flags.MarkDeprecated("tools_file", "please use --tools-file instead")
|
||||||
persistentFlags.StringVar(&cmd.tools_file, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
|
flags.StringVar(&cmd.tools_file, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
|
||||||
persistentFlags.StringSliceVar(&cmd.tools_files, "tools-files", []string{}, "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file, or --tools-folder.")
|
flags.StringSliceVar(&cmd.tools_files, "tools-files", []string{}, "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file, or --tools-folder.")
|
||||||
persistentFlags.StringVar(&cmd.tools_folder, "tools-folder", "", "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file, or --tools-files.")
|
flags.StringVar(&cmd.tools_folder, "tools-folder", "", "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file, or --tools-files.")
|
||||||
persistentFlags.Var(&cmd.cfg.LogLevel, "log-level", "Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'.")
|
flags.Var(&cmd.cfg.LogLevel, "log-level", "Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'.")
|
||||||
persistentFlags.Var(&cmd.cfg.LoggingFormat, "logging-format", "Specify logging format to use. Allowed: 'standard' or 'JSON'.")
|
flags.Var(&cmd.cfg.LoggingFormat, "logging-format", "Specify logging format to use. Allowed: 'standard' or 'JSON'.")
|
||||||
persistentFlags.BoolVar(&cmd.cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.")
|
flags.BoolVar(&cmd.cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.")
|
||||||
persistentFlags.StringVar(&cmd.cfg.TelemetryOTLP, "telemetry-otlp", "", "Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318')")
|
flags.StringVar(&cmd.cfg.TelemetryOTLP, "telemetry-otlp", "", "Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318')")
|
||||||
persistentFlags.StringVar(&cmd.cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
|
flags.StringVar(&cmd.cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
|
||||||
// Fetch prebuilt tools sources to customize the help description
|
// Fetch prebuilt tools sources to customize the help description
|
||||||
prebuiltHelp := fmt.Sprintf(
|
prebuiltHelp := fmt.Sprintf(
|
||||||
"Use a prebuilt tool configuration by source type. Allowed: '%s'. Can be specified multiple times.",
|
"Use a prebuilt tool configuration by source type. Allowed: '%s'.",
|
||||||
strings.Join(prebuiltconfigs.GetPrebuiltSources(), "', '"),
|
strings.Join(prebuiltconfigs.GetPrebuiltSources(), "', '"),
|
||||||
)
|
)
|
||||||
persistentFlags.StringSliceVar(&cmd.prebuiltConfigs, "prebuilt", []string{}, prebuiltHelp)
|
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", prebuiltHelp)
|
||||||
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
|
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
|
||||||
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
|
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
|
||||||
flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
|
flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
|
||||||
// TODO: Insecure by default. Might consider updating this for v1.0.0
|
// TODO: Insecure by default. Might consider updating this for v1.0.0
|
||||||
flags.StringSliceVar(&cmd.cfg.AllowedOrigins, "allowed-origins", []string{"*"}, "Specifies a list of origins permitted to access this server. Defaults to '*'.")
|
flags.StringSliceVar(&cmd.cfg.AllowedOrigins, "allowed-origins", []string{"*"}, "Specifies a list of origins permitted to access this server. Defaults to '*'.")
|
||||||
flags.StringSliceVar(&cmd.cfg.AllowedHosts, "allowed-hosts", []string{"*"}, "Specifies a list of hosts permitted to access this server. Defaults to '*'.")
|
flags.StringSliceVar(&cmd.cfg.AllowedHosts, "allowed-hosts", []string{"*"}, "Specifies a list of hosts permitted to access this server. Defaults to '*'.")
|
||||||
persistentFlags.StringSliceVar(&cmd.cfg.UserAgentMetadata, "user-agent-metadata", []string{}, "Appends additional metadata to the User-Agent.")
|
|
||||||
|
|
||||||
// wrap RunE command so that we have access to original Command object
|
// wrap RunE command so that we have access to original Command object
|
||||||
cmd.RunE = func(*cobra.Command, []string) error { return run(cmd) }
|
cmd.RunE = func(*cobra.Command, []string) error { return run(cmd) }
|
||||||
|
|
||||||
// Register subcommands for tool invocation
|
|
||||||
baseCmd.AddCommand(invoke.NewCommand(cmd))
|
|
||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -437,124 +425,101 @@ func parseEnv(input string) (string, error) {
|
|||||||
return output, err
|
return output, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func convertToolsFile(raw []byte) ([]byte, error) {
|
func convertToolsFile(ctx context.Context, raw []byte) ([]byte, error) {
|
||||||
var input yaml.MapSlice
|
var input yaml.MapSlice
|
||||||
decoder := yaml.NewDecoder(bytes.NewReader(raw), yaml.UseOrderedMap())
|
decoder := yaml.NewDecoder(bytes.NewReader(raw), yaml.UseOrderedMap())
|
||||||
|
if err := decoder.Decode(&input); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert raw MapSlice to a helper map for quick lookup
|
||||||
|
// while keeping the values as MapSlices to preserve internal order
|
||||||
|
resourceOrder := []string{}
|
||||||
|
lookup := make(map[string]yaml.MapSlice)
|
||||||
|
for _, item := range input {
|
||||||
|
key, ok := item.Key.(string)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected non-string key in input: %v", item.Key)
|
||||||
|
}
|
||||||
|
if slice, ok := item.Value.(yaml.MapSlice); ok {
|
||||||
|
// convert authSources to authServices
|
||||||
|
if key == "authSources" {
|
||||||
|
key = "authServices"
|
||||||
|
}
|
||||||
|
// works even if lookup[key] is nil
|
||||||
|
lookup[key] = append(lookup[key], slice...)
|
||||||
|
// preserving the resource's order of original toolsFile
|
||||||
|
if !slices.Contains(resourceOrder, key) {
|
||||||
|
resourceOrder = append(resourceOrder, key)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// toolsfile is already v2
|
||||||
|
if key == "kind" {
|
||||||
|
return raw, nil
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("'%s' is not a map", key)
|
||||||
|
}
|
||||||
|
}
|
||||||
// convert to tools file v2
|
// convert to tools file v2
|
||||||
var buf bytes.Buffer
|
var buf bytes.Buffer
|
||||||
encoder := yaml.NewEncoder(&buf)
|
encoder := yaml.NewEncoder(&buf)
|
||||||
|
for _, kind := range resourceOrder {
|
||||||
v1keys := []string{"sources", "authSources", "authServices", "embeddingModels", "tools", "toolsets", "prompts"}
|
data, exists := lookup[kind]
|
||||||
for {
|
if !exists {
|
||||||
if err := decoder.Decode(&input); err != nil {
|
// if this is skipped for all keys, the tools file is in v2
|
||||||
if err == io.EOF {
|
continue
|
||||||
break
|
|
||||||
}
|
|
||||||
return nil, err
|
|
||||||
}
|
}
|
||||||
for _, item := range input {
|
// Transform each entry
|
||||||
key, ok := item.Key.(string)
|
for _, entry := range data {
|
||||||
|
entryName, ok := entry.Key.(string)
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, fmt.Errorf("unexpected non-string key in input: %v", item.Key)
|
return nil, fmt.Errorf("unexpected non-string key for entry in '%s': %v", kind, entry.Key)
|
||||||
}
|
}
|
||||||
// check if the key is config file v1's key
|
entryBody := ProcessValue(entry.Value, kind == "toolsets")
|
||||||
if slices.Contains(v1keys, key) {
|
|
||||||
// check if value conversion to yaml.MapSlice successfully
|
transformed := yaml.MapSlice{
|
||||||
// fields such as "tools" in toolsets might pass the first check but
|
{Key: "kind", Value: kind},
|
||||||
// fail to convert to MapSlice
|
{Key: "name", Value: entryName},
|
||||||
if slice, ok := item.Value.(yaml.MapSlice); ok {
|
}
|
||||||
// Deprecated: convert authSources to authServices
|
|
||||||
if key == "authSources" {
|
// Merge the transformed body into our result
|
||||||
key = "authServices"
|
if bodySlice, ok := entryBody.(yaml.MapSlice); ok {
|
||||||
}
|
transformed = append(transformed, bodySlice...)
|
||||||
transformed, err := transformDocs(key, slice)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
// encode per-doc
|
|
||||||
for _, doc := range transformed {
|
|
||||||
if err := encoder.Encode(doc); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// invalid input will be ignored
|
|
||||||
// we don't want to throw error here since the config could
|
|
||||||
// be valid but with a different order such as:
|
|
||||||
// ---
|
|
||||||
// tools:
|
|
||||||
// - tool_a
|
|
||||||
// kind: toolsets
|
|
||||||
// ---
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
// this doc is already v2, encode to buf
|
return nil, fmt.Errorf("unable to convert entryBody to MapSlice")
|
||||||
if err := encoder.Encode(input); err != nil {
|
}
|
||||||
return nil, err
|
|
||||||
}
|
if err := encoder.Encode(transformed); err != nil {
|
||||||
break
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return buf.Bytes(), nil
|
return buf.Bytes(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// transformDocs transforms the configuration file from v1 format to v2
|
|
||||||
// yaml.MapSlice will preserve the order in a map
|
|
||||||
func transformDocs(kind string, input yaml.MapSlice) ([]yaml.MapSlice, error) {
|
|
||||||
var transformed []yaml.MapSlice
|
|
||||||
for _, entry := range input {
|
|
||||||
entryName, ok := entry.Key.(string)
|
|
||||||
if !ok {
|
|
||||||
return nil, fmt.Errorf("unexpected non-string key for entry in '%s': %v", kind, entry.Key)
|
|
||||||
}
|
|
||||||
entryBody := ProcessValue(entry.Value, kind == "toolsets")
|
|
||||||
|
|
||||||
currentTransformed := yaml.MapSlice{
|
|
||||||
{Key: "kind", Value: kind},
|
|
||||||
{Key: "name", Value: entryName},
|
|
||||||
}
|
|
||||||
|
|
||||||
// Merge the transformed body into our result
|
|
||||||
if bodySlice, ok := entryBody.(yaml.MapSlice); ok {
|
|
||||||
currentTransformed = append(currentTransformed, bodySlice...)
|
|
||||||
} else {
|
|
||||||
return nil, fmt.Errorf("unable to convert entryBody to MapSlice")
|
|
||||||
}
|
|
||||||
transformed = append(transformed, currentTransformed)
|
|
||||||
}
|
|
||||||
return transformed, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ProcessValue recursively looks for MapSlices to rename 'kind' -> 'type'
|
// ProcessValue recursively looks for MapSlices to rename 'kind' -> 'type'
|
||||||
func ProcessValue(v any, isToolset bool) any {
|
func ProcessValue(v any, isToolset bool) any {
|
||||||
switch val := v.(type) {
|
switch val := v.(type) {
|
||||||
case yaml.MapSlice:
|
case yaml.MapSlice:
|
||||||
// creating a new MapSlice is safer for recursive transformation
|
for i := range val {
|
||||||
newVal := make(yaml.MapSlice, len(val))
|
|
||||||
for i, item := range val {
|
|
||||||
// Perform renaming
|
// Perform renaming
|
||||||
if item.Key == "kind" {
|
if val[i].Key == "kind" {
|
||||||
item.Key = "type"
|
val[i].Key = "type"
|
||||||
}
|
}
|
||||||
// Recursive call for nested values (e.g., nested objects or lists)
|
// Recursive call for nested values (e.g., nested objects or lists)
|
||||||
item.Value = ProcessValue(item.Value, false)
|
val[i].Value = ProcessValue(val[i].Value, false)
|
||||||
newVal[i] = item
|
|
||||||
}
|
}
|
||||||
return newVal
|
return val
|
||||||
case []any:
|
case []any:
|
||||||
// Process lists: If it's a toolset top-level list, wrap it.
|
// Process lists: If it's a toolset top-level list, wrap it.
|
||||||
if isToolset {
|
if isToolset {
|
||||||
return yaml.MapSlice{{Key: "tools", Value: val}}
|
return yaml.MapSlice{{Key: "tools", Value: val}}
|
||||||
}
|
}
|
||||||
// Otherwise, recurse into list items (to catch nested objects)
|
// Otherwise, recurse into list items (to catch nested objects)
|
||||||
newVal := make([]any, len(val))
|
|
||||||
for i := range val {
|
for i := range val {
|
||||||
newVal[i] = ProcessValue(val[i], false)
|
val[i] = ProcessValue(val[i], false)
|
||||||
}
|
}
|
||||||
return newVal
|
return val
|
||||||
default:
|
default:
|
||||||
return val
|
return val
|
||||||
}
|
}
|
||||||
@@ -570,7 +535,7 @@ func parseToolsFile(ctx context.Context, raw []byte) (ToolsFile, error) {
|
|||||||
}
|
}
|
||||||
raw = []byte(output)
|
raw = []byte(output)
|
||||||
|
|
||||||
raw, err = convertToolsFile(raw)
|
raw, err = convertToolsFile(ctx, raw)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return toolsFile, fmt.Errorf("error converting tools file: %s", err)
|
return toolsFile, fmt.Errorf("error converting tools file: %s", err)
|
||||||
}
|
}
|
||||||
@@ -925,183 +890,6 @@ func resolveWatcherInputs(toolsFile string, toolsFiles []string, toolsFolder str
|
|||||||
return watchDirs, watchedFiles
|
return watchDirs, watchedFiles
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cmd *Command) Config() server.ServerConfig {
|
|
||||||
return cmd.cfg
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cmd *Command) Out() io.Writer {
|
|
||||||
return cmd.outStream
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cmd *Command) Logger() log.Logger {
|
|
||||||
return cmd.logger
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cmd *Command) LoadConfig(ctx context.Context) error {
|
|
||||||
logger, err := util.LoggerFromContext(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
var allToolsFiles []ToolsFile
|
|
||||||
|
|
||||||
// Load Prebuilt Configuration
|
|
||||||
|
|
||||||
if len(cmd.prebuiltConfigs) > 0 {
|
|
||||||
slices.Sort(cmd.prebuiltConfigs)
|
|
||||||
sourcesList := strings.Join(cmd.prebuiltConfigs, ", ")
|
|
||||||
logMsg := fmt.Sprintf("Using prebuilt tool configurations for: %s", sourcesList)
|
|
||||||
logger.InfoContext(ctx, logMsg)
|
|
||||||
|
|
||||||
for _, configName := range cmd.prebuiltConfigs {
|
|
||||||
buf, err := prebuiltconfigs.Get(configName)
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorContext(ctx, err.Error())
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse into ToolsFile struct
|
|
||||||
parsed, err := parseToolsFile(ctx, buf)
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("unable to parse prebuilt tool configuration for '%s': %w", configName, err)
|
|
||||||
logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
allToolsFiles = append(allToolsFiles, parsed)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Determine if Custom Files should be loaded
|
|
||||||
// Check for explicit custom flags
|
|
||||||
isCustomConfigured := cmd.tools_file != "" || len(cmd.tools_files) > 0 || cmd.tools_folder != ""
|
|
||||||
|
|
||||||
// Determine if default 'tools.yaml' should be used (No prebuilt AND No custom flags)
|
|
||||||
useDefaultToolsFile := len(cmd.prebuiltConfigs) == 0 && !isCustomConfigured
|
|
||||||
|
|
||||||
if useDefaultToolsFile {
|
|
||||||
cmd.tools_file = "tools.yaml"
|
|
||||||
isCustomConfigured = true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Load Custom Configurations
|
|
||||||
if isCustomConfigured {
|
|
||||||
// Enforce exclusivity among custom flags (tools-file vs tools-files vs tools-folder)
|
|
||||||
if (cmd.tools_file != "" && len(cmd.tools_files) > 0) ||
|
|
||||||
(cmd.tools_file != "" && cmd.tools_folder != "") ||
|
|
||||||
(len(cmd.tools_files) > 0 && cmd.tools_folder != "") {
|
|
||||||
errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously")
|
|
||||||
logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
var customTools ToolsFile
|
|
||||||
var err error
|
|
||||||
|
|
||||||
if len(cmd.tools_files) > 0 {
|
|
||||||
// Use tools-files
|
|
||||||
logger.InfoContext(ctx, fmt.Sprintf("Loading and merging %d tool configuration files", len(cmd.tools_files)))
|
|
||||||
customTools, err = loadAndMergeToolsFiles(ctx, cmd.tools_files)
|
|
||||||
} else if cmd.tools_folder != "" {
|
|
||||||
// Use tools-folder
|
|
||||||
logger.InfoContext(ctx, fmt.Sprintf("Loading and merging all YAML files from directory: %s", cmd.tools_folder))
|
|
||||||
customTools, err = loadAndMergeToolsFolder(ctx, cmd.tools_folder)
|
|
||||||
} else {
|
|
||||||
// Use single file (tools-file or default `tools.yaml`)
|
|
||||||
buf, readFileErr := os.ReadFile(cmd.tools_file)
|
|
||||||
if readFileErr != nil {
|
|
||||||
errMsg := fmt.Errorf("unable to read tool file at %q: %w", cmd.tools_file, readFileErr)
|
|
||||||
logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
customTools, err = parseToolsFile(ctx, buf)
|
|
||||||
if err != nil {
|
|
||||||
err = fmt.Errorf("unable to parse tool file at %q: %w", cmd.tools_file, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorContext(ctx, err.Error())
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
allToolsFiles = append(allToolsFiles, customTools)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Modify version string based on loaded configurations
|
|
||||||
if len(cmd.prebuiltConfigs) > 0 {
|
|
||||||
tag := "prebuilt"
|
|
||||||
if isCustomConfigured {
|
|
||||||
tag = "custom"
|
|
||||||
}
|
|
||||||
// cmd.prebuiltConfigs is already sorted above
|
|
||||||
for _, configName := range cmd.prebuiltConfigs {
|
|
||||||
cmd.cfg.Version += fmt.Sprintf("+%s.%s", tag, configName)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Merge Everything
|
|
||||||
// This will error if custom tools collide with prebuilt tools
|
|
||||||
finalToolsFile, err := mergeToolsFiles(allToolsFiles...)
|
|
||||||
if err != nil {
|
|
||||||
logger.ErrorContext(ctx, err.Error())
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
cmd.cfg.SourceConfigs = finalToolsFile.Sources
|
|
||||||
cmd.cfg.AuthServiceConfigs = finalToolsFile.AuthServices
|
|
||||||
cmd.cfg.EmbeddingModelConfigs = finalToolsFile.EmbeddingModels
|
|
||||||
cmd.cfg.ToolConfigs = finalToolsFile.Tools
|
|
||||||
cmd.cfg.ToolsetConfigs = finalToolsFile.Toolsets
|
|
||||||
cmd.cfg.PromptConfigs = finalToolsFile.Prompts
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cmd *Command) Setup(ctx context.Context) (context.Context, func(context.Context) error, error) {
|
|
||||||
// If stdio, set logger's out stream (usually DEBUG and INFO logs) to errStream
|
|
||||||
loggerOut := cmd.outStream
|
|
||||||
if cmd.cfg.Stdio {
|
|
||||||
loggerOut = cmd.errStream
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle logger separately from config
|
|
||||||
logger, err := log.NewLogger(cmd.cfg.LoggingFormat.String(), cmd.cfg.LogLevel.String(), loggerOut, cmd.errStream)
|
|
||||||
if err != nil {
|
|
||||||
return ctx, nil, fmt.Errorf("unable to initialize logger: %w", err)
|
|
||||||
}
|
|
||||||
cmd.logger = logger
|
|
||||||
|
|
||||||
ctx = util.WithLogger(ctx, cmd.logger)
|
|
||||||
|
|
||||||
// Set up OpenTelemetry
|
|
||||||
otelShutdown, err := telemetry.SetupOTel(ctx, cmd.cfg.Version, cmd.cfg.TelemetryOTLP, cmd.cfg.TelemetryGCP, cmd.cfg.TelemetryServiceName)
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("error setting up OpenTelemetry: %w", err)
|
|
||||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return ctx, nil, errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
shutdownFunc := func(ctx context.Context) error {
|
|
||||||
err := otelShutdown(ctx)
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("error shutting down OpenTelemetry: %w", err)
|
|
||||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
instrumentation, err := telemetry.CreateTelemetryInstrumentation(cmd.cfg.Version)
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("unable to create telemetry instrumentation: %w", err)
|
|
||||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
|
||||||
return ctx, shutdownFunc, errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx = util.WithInstrumentation(ctx, instrumentation)
|
|
||||||
|
|
||||||
return ctx, shutdownFunc, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func run(cmd *Command) error {
|
func run(cmd *Command) error {
|
||||||
ctx, cancel := context.WithCancel(cmd.Context())
|
ctx, cancel := context.WithCancel(cmd.Context())
|
||||||
defer cancel()
|
defer cancel()
|
||||||
@@ -1126,18 +914,149 @@ func run(cmd *Command) error {
|
|||||||
cancel()
|
cancel()
|
||||||
}(ctx)
|
}(ctx)
|
||||||
|
|
||||||
ctx, shutdown, err := cmd.Setup(ctx)
|
// If stdio, set logger's out stream (usually DEBUG and INFO logs) to errStream
|
||||||
|
loggerOut := cmd.outStream
|
||||||
|
if cmd.cfg.Stdio {
|
||||||
|
loggerOut = cmd.errStream
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle logger separately from config
|
||||||
|
switch strings.ToLower(cmd.cfg.LoggingFormat.String()) {
|
||||||
|
case "json":
|
||||||
|
logger, err := log.NewStructuredLogger(loggerOut, cmd.errStream, cmd.cfg.LogLevel.String())
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to initialize logger: %w", err)
|
||||||
|
}
|
||||||
|
cmd.logger = logger
|
||||||
|
case "standard":
|
||||||
|
logger, err := log.NewStdLogger(loggerOut, cmd.errStream, cmd.cfg.LogLevel.String())
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to initialize logger: %w", err)
|
||||||
|
}
|
||||||
|
cmd.logger = logger
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("logging format invalid")
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx = util.WithLogger(ctx, cmd.logger)
|
||||||
|
|
||||||
|
// Set up OpenTelemetry
|
||||||
|
otelShutdown, err := telemetry.SetupOTel(ctx, cmd.cfg.Version, cmd.cfg.TelemetryOTLP, cmd.cfg.TelemetryGCP, cmd.cfg.TelemetryServiceName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
errMsg := fmt.Errorf("error setting up OpenTelemetry: %w", err)
|
||||||
|
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||||
|
return errMsg
|
||||||
}
|
}
|
||||||
defer func() {
|
defer func() {
|
||||||
_ = shutdown(ctx)
|
err := otelShutdown(ctx)
|
||||||
|
if err != nil {
|
||||||
|
errMsg := fmt.Errorf("error shutting down OpenTelemetry: %w", err)
|
||||||
|
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||||
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
if err := cmd.LoadConfig(ctx); err != nil {
|
var allToolsFiles []ToolsFile
|
||||||
|
|
||||||
|
// Load Prebuilt Configuration
|
||||||
|
if cmd.prebuiltConfig != "" {
|
||||||
|
buf, err := prebuiltconfigs.Get(cmd.prebuiltConfig)
|
||||||
|
if err != nil {
|
||||||
|
cmd.logger.ErrorContext(ctx, err.Error())
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
logMsg := fmt.Sprint("Using prebuilt tool configuration for ", cmd.prebuiltConfig)
|
||||||
|
cmd.logger.InfoContext(ctx, logMsg)
|
||||||
|
// Append prebuilt.source to Version string for the User Agent
|
||||||
|
cmd.cfg.Version += "+prebuilt." + cmd.prebuiltConfig
|
||||||
|
|
||||||
|
parsed, err := parseToolsFile(ctx, buf)
|
||||||
|
if err != nil {
|
||||||
|
errMsg := fmt.Errorf("unable to parse prebuilt tool configuration: %w", err)
|
||||||
|
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||||
|
return errMsg
|
||||||
|
}
|
||||||
|
allToolsFiles = append(allToolsFiles, parsed)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine if Custom Files should be loaded
|
||||||
|
// Check for explicit custom flags
|
||||||
|
isCustomConfigured := cmd.tools_file != "" || len(cmd.tools_files) > 0 || cmd.tools_folder != ""
|
||||||
|
|
||||||
|
// Determine if default 'tools.yaml' should be used (No prebuilt AND No custom flags)
|
||||||
|
useDefaultToolsFile := cmd.prebuiltConfig == "" && !isCustomConfigured
|
||||||
|
|
||||||
|
if useDefaultToolsFile {
|
||||||
|
cmd.tools_file = "tools.yaml"
|
||||||
|
isCustomConfigured = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load Custom Configurations
|
||||||
|
if isCustomConfigured {
|
||||||
|
// Enforce exclusivity among custom flags (tools-file vs tools-files vs tools-folder)
|
||||||
|
if (cmd.tools_file != "" && len(cmd.tools_files) > 0) ||
|
||||||
|
(cmd.tools_file != "" && cmd.tools_folder != "") ||
|
||||||
|
(len(cmd.tools_files) > 0 && cmd.tools_folder != "") {
|
||||||
|
errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously")
|
||||||
|
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||||
|
return errMsg
|
||||||
|
}
|
||||||
|
|
||||||
|
var customTools ToolsFile
|
||||||
|
var err error
|
||||||
|
|
||||||
|
if len(cmd.tools_files) > 0 {
|
||||||
|
// Use tools-files
|
||||||
|
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging %d tool configuration files", len(cmd.tools_files)))
|
||||||
|
customTools, err = loadAndMergeToolsFiles(ctx, cmd.tools_files)
|
||||||
|
} else if cmd.tools_folder != "" {
|
||||||
|
// Use tools-folder
|
||||||
|
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging all YAML files from directory: %s", cmd.tools_folder))
|
||||||
|
customTools, err = loadAndMergeToolsFolder(ctx, cmd.tools_folder)
|
||||||
|
} else {
|
||||||
|
// Use single file (tools-file or default `tools.yaml`)
|
||||||
|
buf, readFileErr := os.ReadFile(cmd.tools_file)
|
||||||
|
if readFileErr != nil {
|
||||||
|
errMsg := fmt.Errorf("unable to read tool file at %q: %w", cmd.tools_file, readFileErr)
|
||||||
|
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||||
|
return errMsg
|
||||||
|
}
|
||||||
|
customTools, err = parseToolsFile(ctx, buf)
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("unable to parse tool file at %q: %w", cmd.tools_file, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
cmd.logger.ErrorContext(ctx, err.Error())
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
allToolsFiles = append(allToolsFiles, customTools)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge Everything
|
||||||
|
// This will error if custom tools collide with prebuilt tools
|
||||||
|
finalToolsFile, err := mergeToolsFiles(allToolsFiles...)
|
||||||
|
if err != nil {
|
||||||
|
cmd.logger.ErrorContext(ctx, err.Error())
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cmd.cfg.SourceConfigs = finalToolsFile.Sources
|
||||||
|
cmd.cfg.AuthServiceConfigs = finalToolsFile.AuthServices
|
||||||
|
cmd.cfg.EmbeddingModelConfigs = finalToolsFile.EmbeddingModels
|
||||||
|
cmd.cfg.ToolConfigs = finalToolsFile.Tools
|
||||||
|
cmd.cfg.ToolsetConfigs = finalToolsFile.Toolsets
|
||||||
|
cmd.cfg.PromptConfigs = finalToolsFile.Prompts
|
||||||
|
|
||||||
|
instrumentation, err := telemetry.CreateTelemetryInstrumentation(versionString)
|
||||||
|
if err != nil {
|
||||||
|
errMsg := fmt.Errorf("unable to create telemetry instrumentation: %w", err)
|
||||||
|
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||||
|
return errMsg
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx = util.WithInstrumentation(ctx, instrumentation)
|
||||||
|
|
||||||
// start server
|
// start server
|
||||||
s, err := server.NewServer(ctx, cmd.cfg)
|
s, err := server.NewServer(ctx, cmd.cfg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -1177,9 +1096,6 @@ func run(cmd *Command) error {
|
|||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine if Custom Files are configured (re-check as loadAndMergeConfig might have updated defaults)
|
|
||||||
isCustomConfigured := cmd.tools_file != "" || len(cmd.tools_files) > 0 || cmd.tools_folder != ""
|
|
||||||
|
|
||||||
if isCustomConfigured && !cmd.cfg.DisableReload {
|
if isCustomConfigured && !cmd.cfg.DisableReload {
|
||||||
watchDirs, watchedFiles := resolveWatcherInputs(cmd.tools_file, cmd.tools_files, cmd.tools_folder)
|
watchDirs, watchedFiles := resolveWatcherInputs(cmd.tools_file, cmd.tools_files, cmd.tools_folder)
|
||||||
// start watching the file(s) or folder for changes to trigger dynamic reloading
|
// start watching the file(s) or folder for changes to trigger dynamic reloading
|
||||||
|
|||||||
539
cmd/root_test.go
539
cmd/root_test.go
@@ -23,12 +23,14 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"reflect"
|
||||||
"regexp"
|
"regexp"
|
||||||
"runtime"
|
"runtime"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
yaml "github.com/goccy/go-yaml"
|
||||||
"github.com/google/go-cmp/cmp"
|
"github.com/google/go-cmp/cmp"
|
||||||
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/auth/google"
|
"github.com/googleapis/genai-toolbox/internal/auth/google"
|
||||||
@@ -70,9 +72,6 @@ func withDefaults(c server.ServerConfig) server.ServerConfig {
|
|||||||
if c.AllowedHosts == nil {
|
if c.AllowedHosts == nil {
|
||||||
c.AllowedHosts = []string{"*"}
|
c.AllowedHosts = []string{"*"}
|
||||||
}
|
}
|
||||||
if c.UserAgentMetadata == nil {
|
|
||||||
c.UserAgentMetadata = []string{}
|
|
||||||
}
|
|
||||||
return c
|
return c
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -233,13 +232,6 @@ func TestServerConfigFlags(t *testing.T) {
|
|||||||
AllowedHosts: []string{"http://foo.com", "http://bar.com"},
|
AllowedHosts: []string{"http://foo.com", "http://bar.com"},
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
{
|
|
||||||
desc: "user agent metadata",
|
|
||||||
args: []string{"--user-agent-metadata", "foo,bar"},
|
|
||||||
want: withDefaults(server.ServerConfig{
|
|
||||||
UserAgentMetadata: []string{"foo", "bar"},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
for _, tc := range tcs {
|
for _, tc := range tcs {
|
||||||
t.Run(tc.desc, func(t *testing.T) {
|
t.Run(tc.desc, func(t *testing.T) {
|
||||||
@@ -430,27 +422,17 @@ func TestPrebuiltFlag(t *testing.T) {
|
|||||||
tcs := []struct {
|
tcs := []struct {
|
||||||
desc string
|
desc string
|
||||||
args []string
|
args []string
|
||||||
want []string
|
want string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
desc: "default value",
|
desc: "default value",
|
||||||
args: []string{},
|
args: []string{},
|
||||||
want: []string{},
|
want: "",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "single prebuilt flag",
|
desc: "custom pre built flag",
|
||||||
args: []string{"--prebuilt", "alloydb"},
|
args: []string{"--tools-file", "alloydb"},
|
||||||
want: []string{"alloydb"},
|
want: "alloydb",
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "multiple prebuilt flags",
|
|
||||||
args: []string{"--prebuilt", "alloydb", "--prebuilt", "bigquery"},
|
|
||||||
want: []string{"alloydb", "bigquery"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "comma separated prebuilt flags",
|
|
||||||
args: []string{"--prebuilt", "alloydb,bigquery"},
|
|
||||||
want: []string{"alloydb", "bigquery"},
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, tc := range tcs {
|
for _, tc := range tcs {
|
||||||
@@ -459,8 +441,8 @@ func TestPrebuiltFlag(t *testing.T) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("unexpected error invoking command: %s", err)
|
t.Fatalf("unexpected error invoking command: %s", err)
|
||||||
}
|
}
|
||||||
if diff := cmp.Diff(c.prebuiltConfigs, tc.want); diff != "" {
|
if c.tools_file != tc.want {
|
||||||
t.Fatalf("got %v, want %v, diff %s", c.prebuiltConfigs, tc.want, diff)
|
t.Fatalf("got %v, want %v", c.cfg, tc.want)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -515,6 +497,18 @@ func TestDefaultLogLevel(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestConvertToolsFile(t *testing.T) {
|
func TestConvertToolsFile(t *testing.T) {
|
||||||
|
ctx, cancelCtx := context.WithTimeout(context.Background(), time.Minute)
|
||||||
|
defer cancelCtx()
|
||||||
|
pr, pw := io.Pipe()
|
||||||
|
defer pw.Close()
|
||||||
|
defer pr.Close()
|
||||||
|
|
||||||
|
logger, err := log.NewStdLogger(pw, pw, "DEBUG")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to setup logger %s", err)
|
||||||
|
}
|
||||||
|
ctx = util.WithLogger(ctx, logger)
|
||||||
|
|
||||||
tcs := []struct {
|
tcs := []struct {
|
||||||
desc string
|
desc string
|
||||||
in string
|
in string
|
||||||
@@ -543,7 +537,8 @@ func TestConvertToolsFile(t *testing.T) {
|
|||||||
kind: postgres-sql
|
kind: postgres-sql
|
||||||
source: my-pg-instance
|
source: my-pg-instance
|
||||||
description: some description
|
description: some description
|
||||||
statement: SELECT * FROM SQL_STATEMENT;
|
statement: |
|
||||||
|
SELECT * FROM SQL_STATEMENT;
|
||||||
parameters:
|
parameters:
|
||||||
- name: country
|
- name: country
|
||||||
type: string
|
type: string
|
||||||
@@ -565,7 +560,8 @@ func TestConvertToolsFile(t *testing.T) {
|
|||||||
model: gemini-embedding-001
|
model: gemini-embedding-001
|
||||||
apiKey: some-key
|
apiKey: some-key
|
||||||
dimension: 768`,
|
dimension: 768`,
|
||||||
want: `kind: sources
|
want: `
|
||||||
|
kind: sources
|
||||||
name: my-pg-instance
|
name: my-pg-instance
|
||||||
type: cloud-sql-postgres
|
type: cloud-sql-postgres
|
||||||
project: my-project
|
project: my-project
|
||||||
@@ -585,7 +581,8 @@ name: example_tool
|
|||||||
type: postgres-sql
|
type: postgres-sql
|
||||||
source: my-pg-instance
|
source: my-pg-instance
|
||||||
description: some description
|
description: some description
|
||||||
statement: SELECT * FROM SQL_STATEMENT;
|
statement: |
|
||||||
|
SELECT * FROM SQL_STATEMENT;
|
||||||
parameters:
|
parameters:
|
||||||
- name: country
|
- name: country
|
||||||
type: string
|
type: string
|
||||||
@@ -610,18 +607,18 @@ name: gemini-model
|
|||||||
type: gemini
|
type: gemini
|
||||||
model: gemini-embedding-001
|
model: gemini-embedding-001
|
||||||
apiKey: some-key
|
apiKey: some-key
|
||||||
dimension: 768
|
dimension: 768`,
|
||||||
`,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "preserve resource order",
|
desc: "preserve resource order with grouping",
|
||||||
in: `
|
in: `
|
||||||
tools:
|
tools:
|
||||||
example_tool:
|
example_tool:
|
||||||
kind: postgres-sql
|
kind: postgres-sql
|
||||||
source: my-pg-instance
|
source: my-pg-instance
|
||||||
description: some description
|
description: some description
|
||||||
statement: SELECT * FROM SQL_STATEMENT;
|
statement: |
|
||||||
|
SELECT * FROM SQL_STATEMENT;
|
||||||
parameters:
|
parameters:
|
||||||
- name: country
|
- name: country
|
||||||
type: string
|
type: string
|
||||||
@@ -643,136 +640,23 @@ dimension: 768
|
|||||||
example_toolset:
|
example_toolset:
|
||||||
- example_tool
|
- example_tool
|
||||||
authSources:
|
authSources:
|
||||||
my-google-auth2:
|
|
||||||
kind: google
|
|
||||||
clientId: testing-id`,
|
|
||||||
want: `kind: tools
|
|
||||||
name: example_tool
|
|
||||||
type: postgres-sql
|
|
||||||
source: my-pg-instance
|
|
||||||
description: some description
|
|
||||||
statement: SELECT * FROM SQL_STATEMENT;
|
|
||||||
parameters:
|
|
||||||
- name: country
|
|
||||||
type: string
|
|
||||||
description: some description
|
|
||||||
---
|
|
||||||
kind: sources
|
|
||||||
name: my-pg-instance
|
|
||||||
type: cloud-sql-postgres
|
|
||||||
project: my-project
|
|
||||||
region: my-region
|
|
||||||
instance: my-instance
|
|
||||||
database: my_db
|
|
||||||
user: my_user
|
|
||||||
password: my_pass
|
|
||||||
---
|
|
||||||
kind: authServices
|
|
||||||
name: my-google-auth
|
|
||||||
type: google
|
|
||||||
clientId: testing-id
|
|
||||||
---
|
|
||||||
kind: toolsets
|
|
||||||
name: example_toolset
|
|
||||||
tools:
|
|
||||||
- example_tool
|
|
||||||
---
|
|
||||||
kind: authServices
|
|
||||||
name: my-google-auth2
|
|
||||||
type: google
|
|
||||||
clientId: testing-id
|
|
||||||
`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "convert combination of v1 and v2",
|
|
||||||
in: `
|
|
||||||
sources:
|
|
||||||
my-pg-instance:
|
|
||||||
kind: cloud-sql-postgres
|
|
||||||
project: my-project
|
|
||||||
region: my-region
|
|
||||||
instance: my-instance
|
|
||||||
database: my_db
|
|
||||||
user: my_user
|
|
||||||
password: my_pass
|
|
||||||
authServices:
|
|
||||||
my-google-auth:
|
my-google-auth:
|
||||||
kind: google
|
kind: google
|
||||||
clientId: testing-id
|
clientId: testing-id`,
|
||||||
tools:
|
want: `
|
||||||
example_tool:
|
kind: tools
|
||||||
kind: postgres-sql
|
name: example_tool
|
||||||
source: my-pg-instance
|
type: postgres-sql
|
||||||
description: some description
|
source: my-pg-instance
|
||||||
statement: SELECT * FROM SQL_STATEMENT;
|
description: some description
|
||||||
parameters:
|
statement: |
|
||||||
- name: country
|
SELECT * FROM SQL_STATEMENT;
|
||||||
type: string
|
parameters:
|
||||||
description: some description
|
- name: country
|
||||||
toolsets:
|
type: string
|
||||||
example_toolset:
|
description: some description
|
||||||
- example_tool
|
|
||||||
prompts:
|
|
||||||
code_review:
|
|
||||||
description: ask llm to analyze code quality
|
|
||||||
messages:
|
|
||||||
- content: "please review the following code for quality: {{.code}}"
|
|
||||||
arguments:
|
|
||||||
- name: code
|
|
||||||
description: the code to review
|
|
||||||
embeddingModels:
|
|
||||||
gemini-model:
|
|
||||||
kind: gemini
|
|
||||||
model: gemini-embedding-001
|
|
||||||
apiKey: some-key
|
|
||||||
dimension: 768
|
|
||||||
---
|
---
|
||||||
kind: sources
|
kind: sources
|
||||||
name: my-pg-instance2
|
|
||||||
type: cloud-sql-postgres
|
|
||||||
project: my-project
|
|
||||||
region: my-region
|
|
||||||
instance: my-instance
|
|
||||||
---
|
|
||||||
kind: authServices
|
|
||||||
name: my-google-auth2
|
|
||||||
type: google
|
|
||||||
clientId: testing-id
|
|
||||||
---
|
|
||||||
kind: tools
|
|
||||||
name: example_tool2
|
|
||||||
type: postgres-sql
|
|
||||||
source: my-pg-instance
|
|
||||||
description: some description
|
|
||||||
statement: SELECT * FROM SQL_STATEMENT;
|
|
||||||
parameters:
|
|
||||||
- name: country
|
|
||||||
type: string
|
|
||||||
description: some description
|
|
||||||
---
|
|
||||||
kind: toolsets
|
|
||||||
name: example_toolset2
|
|
||||||
tools:
|
|
||||||
- example_tool
|
|
||||||
---
|
|
||||||
tools:
|
|
||||||
- example_tool
|
|
||||||
kind: toolsets
|
|
||||||
name: example_toolset3
|
|
||||||
---
|
|
||||||
kind: prompts
|
|
||||||
name: code_review2
|
|
||||||
description: ask llm to analyze code quality
|
|
||||||
messages:
|
|
||||||
- content: "please review the following code for quality: {{.code}}"
|
|
||||||
arguments:
|
|
||||||
- name: code
|
|
||||||
description: the code to review
|
|
||||||
---
|
|
||||||
kind: embeddingModels
|
|
||||||
name: gemini-model2
|
|
||||||
type: gemini`,
|
|
||||||
want: `kind: sources
|
|
||||||
name: my-pg-instance
|
name: my-pg-instance
|
||||||
type: cloud-sql-postgres
|
type: cloud-sql-postgres
|
||||||
project: my-project
|
project: my-project
|
||||||
@@ -787,88 +671,20 @@ name: my-google-auth
|
|||||||
type: google
|
type: google
|
||||||
clientId: testing-id
|
clientId: testing-id
|
||||||
---
|
---
|
||||||
kind: tools
|
kind: authServices
|
||||||
name: example_tool
|
name: my-google-auth
|
||||||
type: postgres-sql
|
type: google
|
||||||
source: my-pg-instance
|
clientId: testing-id
|
||||||
description: some description
|
|
||||||
statement: SELECT * FROM SQL_STATEMENT;
|
|
||||||
parameters:
|
|
||||||
- name: country
|
|
||||||
type: string
|
|
||||||
description: some description
|
|
||||||
---
|
---
|
||||||
kind: toolsets
|
kind: toolsets
|
||||||
name: example_toolset
|
name: example_toolset
|
||||||
tools:
|
tools:
|
||||||
- example_tool
|
- example_tool`,
|
||||||
---
|
|
||||||
kind: prompts
|
|
||||||
name: code_review
|
|
||||||
description: ask llm to analyze code quality
|
|
||||||
messages:
|
|
||||||
- content: "please review the following code for quality: {{.code}}"
|
|
||||||
arguments:
|
|
||||||
- name: code
|
|
||||||
description: the code to review
|
|
||||||
---
|
|
||||||
kind: embeddingModels
|
|
||||||
name: gemini-model
|
|
||||||
type: gemini
|
|
||||||
model: gemini-embedding-001
|
|
||||||
apiKey: some-key
|
|
||||||
dimension: 768
|
|
||||||
---
|
|
||||||
kind: sources
|
|
||||||
name: my-pg-instance2
|
|
||||||
type: cloud-sql-postgres
|
|
||||||
project: my-project
|
|
||||||
region: my-region
|
|
||||||
instance: my-instance
|
|
||||||
---
|
|
||||||
kind: authServices
|
|
||||||
name: my-google-auth2
|
|
||||||
type: google
|
|
||||||
clientId: testing-id
|
|
||||||
---
|
|
||||||
kind: tools
|
|
||||||
name: example_tool2
|
|
||||||
type: postgres-sql
|
|
||||||
source: my-pg-instance
|
|
||||||
description: some description
|
|
||||||
statement: SELECT * FROM SQL_STATEMENT;
|
|
||||||
parameters:
|
|
||||||
- name: country
|
|
||||||
type: string
|
|
||||||
description: some description
|
|
||||||
---
|
|
||||||
kind: toolsets
|
|
||||||
name: example_toolset2
|
|
||||||
tools:
|
|
||||||
- example_tool
|
|
||||||
---
|
|
||||||
tools:
|
|
||||||
- example_tool
|
|
||||||
kind: toolsets
|
|
||||||
name: example_toolset3
|
|
||||||
---
|
|
||||||
kind: prompts
|
|
||||||
name: code_review2
|
|
||||||
description: ask llm to analyze code quality
|
|
||||||
messages:
|
|
||||||
- content: "please review the following code for quality: {{.code}}"
|
|
||||||
arguments:
|
|
||||||
- name: code
|
|
||||||
description: the code to review
|
|
||||||
---
|
|
||||||
kind: embeddingModels
|
|
||||||
name: gemini-model2
|
|
||||||
type: gemini
|
|
||||||
`,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "no convertion needed",
|
desc: "no convertion needed",
|
||||||
in: `kind: sources
|
in: `
|
||||||
|
kind: sources
|
||||||
name: my-pg-instance
|
name: my-pg-instance
|
||||||
type: cloud-sql-postgres
|
type: cloud-sql-postgres
|
||||||
project: my-project
|
project: my-project
|
||||||
@@ -883,7 +699,8 @@ name: example_tool
|
|||||||
type: postgres-sql
|
type: postgres-sql
|
||||||
source: my-pg-instance
|
source: my-pg-instance
|
||||||
description: some description
|
description: some description
|
||||||
statement: SELECT * FROM SQL_STATEMENT;
|
statement: |
|
||||||
|
SELECT * FROM SQL_STATEMENT;
|
||||||
parameters:
|
parameters:
|
||||||
- name: country
|
- name: country
|
||||||
type: string
|
type: string
|
||||||
@@ -893,7 +710,8 @@ kind: toolsets
|
|||||||
name: example_toolset
|
name: example_toolset
|
||||||
tools:
|
tools:
|
||||||
- example_tool`,
|
- example_tool`,
|
||||||
want: `kind: sources
|
want: `
|
||||||
|
kind: sources
|
||||||
name: my-pg-instance
|
name: my-pg-instance
|
||||||
type: cloud-sql-postgres
|
type: cloud-sql-postgres
|
||||||
project: my-project
|
project: my-project
|
||||||
@@ -908,7 +726,8 @@ name: example_tool
|
|||||||
type: postgres-sql
|
type: postgres-sql
|
||||||
source: my-pg-instance
|
source: my-pg-instance
|
||||||
description: some description
|
description: some description
|
||||||
statement: SELECT * FROM SQL_STATEMENT;
|
statement: |
|
||||||
|
SELECT * FROM SQL_STATEMENT;
|
||||||
parameters:
|
parameters:
|
||||||
- name: country
|
- name: country
|
||||||
type: string
|
type: string
|
||||||
@@ -917,34 +736,69 @@ parameters:
|
|||||||
kind: toolsets
|
kind: toolsets
|
||||||
name: example_toolset
|
name: example_toolset
|
||||||
tools:
|
tools:
|
||||||
- example_tool
|
- example_tool`,
|
||||||
`,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "invalid source",
|
desc: "invalid source",
|
||||||
in: `sources: invalid`,
|
in: `sources: invalid`,
|
||||||
want: "",
|
isErr: true,
|
||||||
|
errStr: "'sources' is not a map",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "invalid toolset",
|
desc: "invalid toolset",
|
||||||
in: `toolsets: invalid`,
|
in: `toolsets: invalid`,
|
||||||
want: "",
|
isErr: true,
|
||||||
|
errStr: "'toolsets' is not a map",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, tc := range tcs {
|
for _, tc := range tcs {
|
||||||
t.Run(tc.desc, func(t *testing.T) {
|
t.Run(tc.desc, func(t *testing.T) {
|
||||||
output, err := convertToolsFile([]byte(tc.in))
|
output, err := convertToolsFile(ctx, []byte(tc.in))
|
||||||
|
if tc.isErr {
|
||||||
|
if err == nil {
|
||||||
|
t.Fatalf("missing error: %s", tc.errStr)
|
||||||
|
}
|
||||||
|
if err.Error() != tc.errStr {
|
||||||
|
t.Fatalf("invalid error string: got %s, want %s", err, tc.errStr)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("unexpected error: %s", err)
|
t.Fatalf("unexpected error: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if diff := cmp.Diff(string(output), tc.want); diff != "" {
|
var docs1, docs2 []yaml.MapSlice
|
||||||
t.Fatalf("incorrect toolsets parse: diff %v", diff)
|
if docs1, err = decodeToMapSlice(string(output)); err != nil {
|
||||||
|
t.Fatalf("error decoding output: %s", err)
|
||||||
|
}
|
||||||
|
if docs2, err = decodeToMapSlice(tc.want); err != nil {
|
||||||
|
t.Fatalf("Error decoding want: %s", err)
|
||||||
|
}
|
||||||
|
if !reflect.DeepEqual(docs1, docs2) {
|
||||||
|
t.Fatalf("incorrect output: got %s, want %s", string(output), tc.want)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func decodeToMapSlice(data string) ([]yaml.MapSlice, error) {
|
||||||
|
// ensures that the order is correct
|
||||||
|
var docs []yaml.MapSlice
|
||||||
|
decoder := yaml.NewDecoder(strings.NewReader(data))
|
||||||
|
for {
|
||||||
|
var doc yaml.MapSlice
|
||||||
|
err := decoder.Decode(&doc)
|
||||||
|
if err == io.EOF {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
docs = append(docs, doc)
|
||||||
|
}
|
||||||
|
return docs, nil
|
||||||
|
}
|
||||||
|
|
||||||
func TestParseToolFile(t *testing.T) {
|
func TestParseToolFile(t *testing.T) {
|
||||||
ctx, err := testutils.ContextWithNewLogger()
|
ctx, err := testutils.ContextWithNewLogger()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -1015,8 +869,7 @@ func TestParseToolFile(t *testing.T) {
|
|||||||
ToolNames: []string{"example_tool"},
|
ToolNames: []string{"example_tool"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
AuthServices: nil,
|
Prompts: nil,
|
||||||
Prompts: nil,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -1120,7 +973,7 @@ func TestParseToolFile(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
Prompts: server.PromptConfigs{
|
Prompts: server.PromptConfigs{
|
||||||
"code_review": &custom.Config{
|
"code_review": custom.Config{
|
||||||
Name: "code_review",
|
Name: "code_review",
|
||||||
Description: "ask llm to analyze code quality",
|
Description: "ask llm to analyze code quality",
|
||||||
Arguments: prompts.Arguments{
|
Arguments: prompts.Arguments{
|
||||||
@@ -1138,12 +991,12 @@ func TestParseToolFile(t *testing.T) {
|
|||||||
in: `
|
in: `
|
||||||
kind: prompts
|
kind: prompts
|
||||||
name: my-prompt
|
name: my-prompt
|
||||||
description: A prompt template for data analysis.
|
description: A prompt template for data analysis.
|
||||||
arguments:
|
arguments:
|
||||||
- name: country
|
- name: country
|
||||||
description: The country to analyze.
|
description: The country to analyze.
|
||||||
messages:
|
messages:
|
||||||
- content: Analyze the data for {{.country}}.
|
- content: Analyze the data for {{.country}}.
|
||||||
`,
|
`,
|
||||||
wantToolsFile: ToolsFile{
|
wantToolsFile: ToolsFile{
|
||||||
Sources: nil,
|
Sources: nil,
|
||||||
@@ -1213,17 +1066,17 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
|||||||
database: my_db
|
database: my_db
|
||||||
user: my_user
|
user: my_user
|
||||||
password: my_pass
|
password: my_pass
|
||||||
---
|
---
|
||||||
kind: authServices
|
kind: authServices
|
||||||
name: my-google-service
|
name: my-google-service
|
||||||
type: google
|
type: google
|
||||||
clientId: my-client-id
|
clientId: my-client-id
|
||||||
---
|
---
|
||||||
kind: authServices
|
kind: authServices
|
||||||
name: other-google-service
|
name: other-google-service
|
||||||
type: google
|
type: google
|
||||||
clientId: other-client-id
|
clientId: other-client-id
|
||||||
---
|
---
|
||||||
kind: tools
|
kind: tools
|
||||||
name: example_tool
|
name: example_tool
|
||||||
type: postgres-sql
|
type: postgres-sql
|
||||||
@@ -1249,7 +1102,7 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
|||||||
field: email
|
field: email
|
||||||
- name: other-google-service
|
- name: other-google-service
|
||||||
field: other_email
|
field: other_email
|
||||||
---
|
---
|
||||||
kind: toolsets
|
kind: toolsets
|
||||||
name: example_toolset
|
name: example_toolset
|
||||||
tools:
|
tools:
|
||||||
@@ -1417,17 +1270,17 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
|||||||
database: my_db
|
database: my_db
|
||||||
user: my_user
|
user: my_user
|
||||||
password: my_pass
|
password: my_pass
|
||||||
---
|
---
|
||||||
kind: authServices
|
kind: authServices
|
||||||
name: my-google-service
|
name: my-google-service
|
||||||
type: google
|
type: google
|
||||||
clientId: my-client-id
|
clientId: my-client-id
|
||||||
---
|
---
|
||||||
kind: authServices
|
kind: authServices
|
||||||
name: other-google-service
|
name: other-google-service
|
||||||
type: google
|
type: google
|
||||||
clientId: other-client-id
|
clientId: other-client-id
|
||||||
---
|
---
|
||||||
kind: tools
|
kind: tools
|
||||||
name: example_tool
|
name: example_tool
|
||||||
type: postgres-sql
|
type: postgres-sql
|
||||||
@@ -1455,7 +1308,7 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
|||||||
field: email
|
field: email
|
||||||
- name: other-google-service
|
- name: other-google-service
|
||||||
field: other_email
|
field: other_email
|
||||||
---
|
---
|
||||||
kind: toolsets
|
kind: toolsets
|
||||||
name: example_toolset
|
name: example_toolset
|
||||||
tools:
|
tools:
|
||||||
@@ -1714,17 +1567,17 @@ func TestEnvVarReplacement(t *testing.T) {
|
|||||||
Authorization: ${TestHeader}
|
Authorization: ${TestHeader}
|
||||||
queryParams:
|
queryParams:
|
||||||
api-key: ${API_KEY}
|
api-key: ${API_KEY}
|
||||||
---
|
---
|
||||||
kind: authServices
|
kind: authServices
|
||||||
name: my-google-service
|
name: my-google-service
|
||||||
type: google
|
type: google
|
||||||
clientId: ${clientId}
|
clientId: ${clientId}
|
||||||
---
|
---
|
||||||
kind: authServices
|
kind: authServices
|
||||||
name: other-google-service
|
name: other-google-service
|
||||||
type: google
|
type: google
|
||||||
clientId: ${clientId2}
|
clientId: ${clientId2}
|
||||||
---
|
---
|
||||||
kind: tools
|
kind: tools
|
||||||
name: example_tool
|
name: example_tool
|
||||||
type: http
|
type: http
|
||||||
@@ -1765,12 +1618,12 @@ func TestEnvVarReplacement(t *testing.T) {
|
|||||||
- name: Language
|
- name: Language
|
||||||
type: string
|
type: string
|
||||||
description: language string
|
description: language string
|
||||||
---
|
---
|
||||||
kind: toolsets
|
kind: toolsets
|
||||||
name: ${toolset_name}
|
name: ${toolset_name}
|
||||||
tools:
|
tools:
|
||||||
- example_tool
|
- example_tool
|
||||||
---
|
---
|
||||||
kind: prompts
|
kind: prompts
|
||||||
name: ${prompt_name}
|
name: ${prompt_name}
|
||||||
description: A test prompt for {{.name}}.
|
description: A test prompt for {{.name}}.
|
||||||
@@ -2054,7 +1907,6 @@ func TestSingleEdit(t *testing.T) {
|
|||||||
|
|
||||||
func TestPrebuiltTools(t *testing.T) {
|
func TestPrebuiltTools(t *testing.T) {
|
||||||
// Get prebuilt configs
|
// Get prebuilt configs
|
||||||
alloydb_omni_config, _ := prebuiltconfigs.Get("alloydb-omni")
|
|
||||||
alloydb_admin_config, _ := prebuiltconfigs.Get("alloydb-postgres-admin")
|
alloydb_admin_config, _ := prebuiltconfigs.Get("alloydb-postgres-admin")
|
||||||
alloydb_config, _ := prebuiltconfigs.Get("alloydb-postgres")
|
alloydb_config, _ := prebuiltconfigs.Get("alloydb-postgres")
|
||||||
bigquery_config, _ := prebuiltconfigs.Get("bigquery")
|
bigquery_config, _ := prebuiltconfigs.Get("bigquery")
|
||||||
@@ -2105,12 +1957,6 @@ func TestPrebuiltTools(t *testing.T) {
|
|||||||
t.Setenv("ALLOYDB_POSTGRES_USER", "your_alloydb_user")
|
t.Setenv("ALLOYDB_POSTGRES_USER", "your_alloydb_user")
|
||||||
t.Setenv("ALLOYDB_POSTGRES_PASSWORD", "your_alloydb_password")
|
t.Setenv("ALLOYDB_POSTGRES_PASSWORD", "your_alloydb_password")
|
||||||
|
|
||||||
t.Setenv("ALLOYDB_OMNI_HOST", "localhost")
|
|
||||||
t.Setenv("ALLOYDB_OMNI_PORT", "5432")
|
|
||||||
t.Setenv("ALLOYDB_OMNI_DATABASE", "your_alloydb_db")
|
|
||||||
t.Setenv("ALLOYDB_OMNI_USER", "your_alloydb_user")
|
|
||||||
t.Setenv("ALLOYDB_OMNI_PASSWORD", "your_alloydb_password")
|
|
||||||
|
|
||||||
t.Setenv("CLICKHOUSE_PROTOCOL", "your_clickhouse_protocol")
|
t.Setenv("CLICKHOUSE_PROTOCOL", "your_clickhouse_protocol")
|
||||||
t.Setenv("CLICKHOUSE_DATABASE", "your_clickhouse_database")
|
t.Setenv("CLICKHOUSE_DATABASE", "your_clickhouse_database")
|
||||||
t.Setenv("CLICKHOUSE_PASSWORD", "your_clickhouse_password")
|
t.Setenv("CLICKHOUSE_PASSWORD", "your_clickhouse_password")
|
||||||
@@ -2204,16 +2050,6 @@ func TestPrebuiltTools(t *testing.T) {
|
|||||||
in []byte
|
in []byte
|
||||||
wantToolset server.ToolsetConfigs
|
wantToolset server.ToolsetConfigs
|
||||||
}{
|
}{
|
||||||
{
|
|
||||||
name: "alloydb omni prebuilt tools",
|
|
||||||
in: alloydb_omni_config,
|
|
||||||
wantToolset: server.ToolsetConfigs{
|
|
||||||
"alloydb_omni_database_tools": tools.ToolsetConfig{
|
|
||||||
Name: "alloydb_omni_database_tools",
|
|
||||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_columnar_configurations", "list_columnar_recommended_columns", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats", "list_stored_procedure"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
name: "alloydb postgres admin prebuilt tools",
|
name: "alloydb postgres admin prebuilt tools",
|
||||||
in: alloydb_admin_config,
|
in: alloydb_admin_config,
|
||||||
@@ -2230,7 +2066,7 @@ func TestPrebuiltTools(t *testing.T) {
|
|||||||
wantToolset: server.ToolsetConfigs{
|
wantToolset: server.ToolsetConfigs{
|
||||||
"cloud_sql_postgres_admin_tools": tools.ToolsetConfig{
|
"cloud_sql_postgres_admin_tools": tools.ToolsetConfig{
|
||||||
Name: "cloud_sql_postgres_admin_tools",
|
Name: "cloud_sql_postgres_admin_tools",
|
||||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck", "clone_instance", "create_backup", "restore_backup"},
|
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck", "clone_instance", "create_backup"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -2240,7 +2076,7 @@ func TestPrebuiltTools(t *testing.T) {
|
|||||||
wantToolset: server.ToolsetConfigs{
|
wantToolset: server.ToolsetConfigs{
|
||||||
"cloud_sql_mysql_admin_tools": tools.ToolsetConfig{
|
"cloud_sql_mysql_admin_tools": tools.ToolsetConfig{
|
||||||
Name: "cloud_sql_mysql_admin_tools",
|
Name: "cloud_sql_mysql_admin_tools",
|
||||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup", "restore_backup"},
|
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -2250,7 +2086,7 @@ func TestPrebuiltTools(t *testing.T) {
|
|||||||
wantToolset: server.ToolsetConfigs{
|
wantToolset: server.ToolsetConfigs{
|
||||||
"cloud_sql_mssql_admin_tools": tools.ToolsetConfig{
|
"cloud_sql_mssql_admin_tools": tools.ToolsetConfig{
|
||||||
Name: "cloud_sql_mssql_admin_tools",
|
Name: "cloud_sql_mssql_admin_tools",
|
||||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup", "restore_backup"},
|
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -2765,7 +2601,6 @@ description: "Dummy"
|
|||||||
---
|
---
|
||||||
kind: toolsets
|
kind: toolsets
|
||||||
name: sqlite_database_tools
|
name: sqlite_database_tools
|
||||||
tools:
|
|
||||||
- dummy_tool
|
- dummy_tool
|
||||||
`
|
`
|
||||||
toolsetConflictFile := filepath.Join(t.TempDir(), "toolset_conflict.yaml")
|
toolsetConflictFile := filepath.Join(t.TempDir(), "toolset_conflict.yaml")
|
||||||
@@ -2806,12 +2641,6 @@ authSources:
|
|||||||
return nil
|
return nil
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
|
||||||
desc: "sqlite called twice error",
|
|
||||||
args: []string{"--prebuilt", "sqlite", "--prebuilt", "sqlite"},
|
|
||||||
wantErr: true,
|
|
||||||
errString: "resource conflicts detected",
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
desc: "tool conflict error",
|
desc: "tool conflict error",
|
||||||
args: []string{"--prebuilt", "sqlite", "--tools-file", toolConflictFile},
|
args: []string{"--prebuilt", "sqlite", "--tools-file", toolConflictFile},
|
||||||
@@ -2920,115 +2749,3 @@ func TestDefaultToolsFileBehavior(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParameterReferenceValidation(t *testing.T) {
|
|
||||||
ctx, err := testutils.ContextWithNewLogger()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("unexpected error: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Base template
|
|
||||||
baseYaml := `
|
|
||||||
sources:
|
|
||||||
dummy-source:
|
|
||||||
kind: http
|
|
||||||
baseUrl: http://example.com
|
|
||||||
tools:
|
|
||||||
test-tool:
|
|
||||||
kind: postgres-sql
|
|
||||||
source: dummy-source
|
|
||||||
description: test tool
|
|
||||||
statement: SELECT 1;
|
|
||||||
parameters:
|
|
||||||
%s`
|
|
||||||
|
|
||||||
tcs := []struct {
|
|
||||||
desc string
|
|
||||||
params string
|
|
||||||
wantErr bool
|
|
||||||
errSubstr string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
desc: "valid backward reference",
|
|
||||||
params: `
|
|
||||||
- name: source_param
|
|
||||||
type: string
|
|
||||||
description: source
|
|
||||||
- name: copy_param
|
|
||||||
type: string
|
|
||||||
description: copy
|
|
||||||
valueFromParam: source_param`,
|
|
||||||
wantErr: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "valid forward reference (out of order)",
|
|
||||||
params: `
|
|
||||||
- name: copy_param
|
|
||||||
type: string
|
|
||||||
description: copy
|
|
||||||
valueFromParam: source_param
|
|
||||||
- name: source_param
|
|
||||||
type: string
|
|
||||||
description: source`,
|
|
||||||
wantErr: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "invalid missing reference",
|
|
||||||
params: `
|
|
||||||
- name: copy_param
|
|
||||||
type: string
|
|
||||||
description: copy
|
|
||||||
valueFromParam: non_existent_param`,
|
|
||||||
wantErr: true,
|
|
||||||
errSubstr: "references '\"non_existent_param\"' in the 'valueFromParam' field",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "invalid self reference",
|
|
||||||
params: `
|
|
||||||
- name: myself
|
|
||||||
type: string
|
|
||||||
description: self
|
|
||||||
valueFromParam: myself`,
|
|
||||||
wantErr: true,
|
|
||||||
errSubstr: "parameter \"myself\" cannot copy value from itself",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "multiple valid references",
|
|
||||||
params: `
|
|
||||||
- name: a
|
|
||||||
type: string
|
|
||||||
description: a
|
|
||||||
- name: b
|
|
||||||
type: string
|
|
||||||
description: b
|
|
||||||
valueFromParam: a
|
|
||||||
- name: c
|
|
||||||
type: string
|
|
||||||
description: c
|
|
||||||
valueFromParam: a`,
|
|
||||||
wantErr: false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tc := range tcs {
|
|
||||||
t.Run(tc.desc, func(t *testing.T) {
|
|
||||||
// Indent parameters to match YAML structure
|
|
||||||
yamlContent := fmt.Sprintf(baseYaml, tc.params)
|
|
||||||
|
|
||||||
_, err := parseToolsFile(ctx, []byte(yamlContent))
|
|
||||||
|
|
||||||
if tc.wantErr {
|
|
||||||
if err == nil {
|
|
||||||
t.Fatal("expected error, got nil")
|
|
||||||
}
|
|
||||||
if !strings.Contains(err.Error(), tc.errSubstr) {
|
|
||||||
t.Errorf("error %q does not contain expected substring %q", err.Error(), tc.errSubstr)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("unexpected error: %v", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
0
cmd/test.db
Normal file
0
cmd/test.db
Normal file
@@ -1 +1 @@
|
|||||||
0.26.0
|
0.25.0
|
||||||
|
|||||||
@@ -23,14 +23,6 @@ To connect to the database to explore and query data, search the MCP store for t
|
|||||||
|
|
||||||
In the Antigravity MCP Store, click the "Install" button.
|
In the Antigravity MCP Store, click the "Install" button.
|
||||||
|
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt alloydb-postgres-admin```.
|
|
||||||
|
|
||||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
|
|||||||
@@ -27,13 +27,6 @@ For AlloyDB infrastructure management, search the MCP store for the AlloyDB for
|
|||||||
## Install & Configuration
|
## Install & Configuration
|
||||||
|
|
||||||
1. In the Antigravity MCP Store, click the "Install" button.
|
1. In the Antigravity MCP Store, click the "Install" button.
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt alloydb-postgres```.
|
|
||||||
|
|
||||||
2. Add the required inputs for your [cluster](https://docs.cloud.google.com/alloydb/docs/cluster-list) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
2. Add the required inputs for your [cluster](https://docs.cloud.google.com/alloydb/docs/cluster-list) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||||
|
|
||||||
|
|||||||
@@ -21,13 +21,6 @@ An editor configured to use the BigQuery MCP server can use its AI capabilities
|
|||||||
## Install & Configuration
|
## Install & Configuration
|
||||||
|
|
||||||
1. In the Antigravity MCP Store, click the "Install" button.
|
1. In the Antigravity MCP Store, click the "Install" button.
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt bigquery```.
|
|
||||||
|
|
||||||
2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||||
|
|
||||||
|
|||||||
@@ -23,14 +23,6 @@ To connect to the database to explore and query data, search the MCP store for t
|
|||||||
|
|
||||||
In the Antigravity MCP Store, click the "Install" button.
|
In the Antigravity MCP Store, click the "Install" button.
|
||||||
|
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mssql-admin```.
|
|
||||||
|
|
||||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
|
|||||||
@@ -24,13 +24,6 @@ For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL
|
|||||||
## Install & Configuration
|
## Install & Configuration
|
||||||
|
|
||||||
1. In the Antigravity MCP Store, click the "Install" button.
|
1. In the Antigravity MCP Store, click the "Install" button.
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mssql```.
|
|
||||||
|
|
||||||
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/sqlserver/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/sqlserver/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||||
|
|
||||||
|
|||||||
@@ -23,14 +23,6 @@ To connect to the database to explore and query data, search the MCP store for t
|
|||||||
|
|
||||||
In the Antigravity MCP Store, click the "Install" button.
|
In the Antigravity MCP Store, click the "Install" button.
|
||||||
|
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mysql-admin```.
|
|
||||||
|
|
||||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
|
|||||||
@@ -26,13 +26,6 @@ For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL
|
|||||||
## Install & Configuration
|
## Install & Configuration
|
||||||
|
|
||||||
1. In the Antigravity MCP Store, click the "Install" button.
|
1. In the Antigravity MCP Store, click the "Install" button.
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mysql```.
|
|
||||||
|
|
||||||
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/mysql/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/mysql/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||||
|
|
||||||
|
|||||||
@@ -23,14 +23,6 @@ To connect to the database to explore and query data, search the MCP store for t
|
|||||||
|
|
||||||
In the Antigravity MCP Store, click the "Install" button.
|
In the Antigravity MCP Store, click the "Install" button.
|
||||||
|
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-postgres-admin```.
|
|
||||||
|
|
||||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
|
|||||||
@@ -26,13 +26,6 @@ For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL
|
|||||||
## Install & Configuration
|
## Install & Configuration
|
||||||
|
|
||||||
1. In the Antigravity MCP Store, click the "Install" button.
|
1. In the Antigravity MCP Store, click the "Install" button.
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-postgres```.
|
|
||||||
|
|
||||||
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/postgres/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/postgres/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||||
|
|
||||||
|
|||||||
@@ -20,13 +20,6 @@ An editor configured to use the Dataplex MCP server can use its AI capabilities
|
|||||||
## Install & Configuration
|
## Install & Configuration
|
||||||
|
|
||||||
1. In the Antigravity MCP Store, click the "Install" button.
|
1. In the Antigravity MCP Store, click the "Install" button.
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt dataplex```.
|
|
||||||
|
|
||||||
2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||||
|
|
||||||
|
|||||||
@@ -21,13 +21,6 @@ An editor configured to use the Looker MCP server can use its AI capabilities to
|
|||||||
## Install & Configuration
|
## Install & Configuration
|
||||||
|
|
||||||
1. In the Antigravity MCP Store, click the "Install" button.
|
1. In the Antigravity MCP Store, click the "Install" button.
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt looker```.
|
|
||||||
|
|
||||||
2. Add the required inputs for your [instance](https://docs.cloud.google.com/looker/docs/set-up-and-administer-looker) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
2. Add the required inputs for your [instance](https://docs.cloud.google.com/looker/docs/set-up-and-administer-looker) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||||
|
|
||||||
|
|||||||
@@ -21,13 +21,6 @@ An editor configured to use the Cloud Spanner MCP server can use its AI capabili
|
|||||||
## Install & Configuration
|
## Install & Configuration
|
||||||
|
|
||||||
1. In the Antigravity MCP Store, click the "Install" button.
|
1. In the Antigravity MCP Store, click the "Install" button.
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt spanner```.
|
|
||||||
|
|
||||||
2. Add the required inputs for your [instance](https://docs.cloud.google.com/spanner/docs/instances) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
2. Add the required inputs for your [instance](https://docs.cloud.google.com/spanner/docs/instances) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||||
|
|
||||||
|
|||||||
@@ -12,17 +12,10 @@ The MCP Toolbox for Databases Server gives AI-powered development tools the abil
|
|||||||
## Install & Configuration
|
## Install & Configuration
|
||||||
|
|
||||||
1. In the Antigravity MCP Store, click the **Install** button. A configuration window will appear.
|
1. In the Antigravity MCP Store, click the **Install** button. A configuration window will appear.
|
||||||
> [!NOTE]
|
|
||||||
> On first use, the installation process automatically downloads and uses
|
|
||||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
|
||||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
|
||||||
> ```npm i -g @toolbox-sdk/server@latest```
|
|
||||||
> To always run the latest version, update the MCP server configuration to use:
|
|
||||||
> ```npx -y @toolbox-sdk/server@latest```.
|
|
||||||
|
|
||||||
3. Create your [`tools.yaml` configuration file](https://googleapis.github.io/genai-toolbox/getting-started/configure/).
|
2. Create your [`tools.yaml` configuration file](https://googleapis.github.io/genai-toolbox/getting-started/configure/).
|
||||||
|
|
||||||
4. In the configuration window, enter the full absolute path to your `tools.yaml` file and click **Save**.
|
3. In the configuration window, enter the full absolute path to your `tools.yaml` file and click **Save**.
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
||||||
|
|||||||
@@ -234,7 +234,7 @@
|
|||||||
},
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"version = \"0.26.0\" # x-release-please-version\n",
|
"version = \"0.25.0\" # x-release-please-version\n",
|
||||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# Make the binary executable\n",
|
"# Make the binary executable\n",
|
||||||
@@ -520,7 +520,8 @@
|
|||||||
},
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"! pip install google-adk[toolbox] --quiet"
|
"! pip install toolbox-core --quiet\n",
|
||||||
|
"! pip install google-adk --quiet"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -535,18 +536,14 @@
|
|||||||
"from google.adk.runners import Runner\n",
|
"from google.adk.runners import Runner\n",
|
||||||
"from google.adk.sessions import InMemorySessionService\n",
|
"from google.adk.sessions import InMemorySessionService\n",
|
||||||
"from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService\n",
|
"from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService\n",
|
||||||
"from google.adk.tools.toolbox_toolset import ToolboxToolset\n",
|
|
||||||
"from google.genai import types\n",
|
"from google.genai import types\n",
|
||||||
|
"from toolbox_core import ToolboxSyncClient\n",
|
||||||
"\n",
|
"\n",
|
||||||
"import os\n",
|
"import os\n",
|
||||||
"# TODO(developer): replace this with your Google API key\n",
|
"# TODO(developer): replace this with your Google API key\n",
|
||||||
"os.environ['GOOGLE_API_KEY'] = \"<GOOGLE_API_KEY>\"\n",
|
"os.environ['GOOGLE_API_KEY'] = \"<GOOGLE_API_KEY>\"\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# Configure toolset\n",
|
"toolbox_client = ToolboxSyncClient(\"http://127.0.0.1:5000\")\n",
|
||||||
"toolset = ToolboxToolset(\n",
|
|
||||||
" server_url=\"http://127.0.0.1:5000\",\n",
|
|
||||||
" toolset_name=\"my-toolset\"\n",
|
|
||||||
")\n",
|
|
||||||
"\n",
|
"\n",
|
||||||
"prompt = \"\"\"\n",
|
"prompt = \"\"\"\n",
|
||||||
" You're a helpful hotel assistant. You handle hotel searching, booking and\n",
|
" You're a helpful hotel assistant. You handle hotel searching, booking and\n",
|
||||||
@@ -563,7 +560,7 @@
|
|||||||
" name='hotel_agent',\n",
|
" name='hotel_agent',\n",
|
||||||
" description='A helpful AI assistant.',\n",
|
" description='A helpful AI assistant.',\n",
|
||||||
" instruction=prompt,\n",
|
" instruction=prompt,\n",
|
||||||
" tools=[toolset],\n",
|
" tools=toolbox_client.load_toolset(\"my-toolset\"),\n",
|
||||||
")\n",
|
")\n",
|
||||||
"\n",
|
"\n",
|
||||||
"session_service = InMemorySessionService()\n",
|
"session_service = InMemorySessionService()\n",
|
||||||
|
|||||||
@@ -16,12 +16,6 @@ Databases” as its initial development predated MCP, but was renamed to align
|
|||||||
with recently added MCP compatibility.
|
with recently added MCP compatibility.
|
||||||
{{< /notice >}}
|
{{< /notice >}}
|
||||||
|
|
||||||
{{< notice note >}}
|
|
||||||
This document has been updated to support the configuration file v2 format. To
|
|
||||||
view documentation with configuration file v1 format, please navigate to the
|
|
||||||
top-right menu and select versions v0.26.0 or older.
|
|
||||||
{{< /notice >}}
|
|
||||||
|
|
||||||
## Why Toolbox?
|
## Why Toolbox?
|
||||||
|
|
||||||
Toolbox helps you build Gen AI tools that let your agents access data in your
|
Toolbox helps you build Gen AI tools that let your agents access data in your
|
||||||
@@ -77,7 +71,7 @@ redeploying your application.
|
|||||||
|
|
||||||
## Getting Started
|
## Getting Started
|
||||||
|
|
||||||
### Quickstart: Running Toolbox using NPX
|
### (Non-production) Running Toolbox
|
||||||
|
|
||||||
You can run Toolbox directly with a [configuration file](../configure.md):
|
You can run Toolbox directly with a [configuration file](../configure.md):
|
||||||
|
|
||||||
@@ -109,7 +103,7 @@ To install Toolbox as a binary on Linux (AMD64):
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# see releases page for other versions
|
# see releases page for other versions
|
||||||
export VERSION=0.26.0
|
export VERSION=0.25.0
|
||||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||||
chmod +x toolbox
|
chmod +x toolbox
|
||||||
```
|
```
|
||||||
@@ -120,7 +114,7 @@ To install Toolbox as a binary on macOS (Apple Silicon):
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# see releases page for other versions
|
# see releases page for other versions
|
||||||
export VERSION=0.26.0
|
export VERSION=0.25.0
|
||||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||||
chmod +x toolbox
|
chmod +x toolbox
|
||||||
```
|
```
|
||||||
@@ -131,7 +125,7 @@ To install Toolbox as a binary on macOS (Intel):
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# see releases page for other versions
|
# see releases page for other versions
|
||||||
export VERSION=0.26.0
|
export VERSION=0.25.0
|
||||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||||
chmod +x toolbox
|
chmod +x toolbox
|
||||||
```
|
```
|
||||||
@@ -142,7 +136,7 @@ To install Toolbox as a binary on Windows (Command Prompt):
|
|||||||
|
|
||||||
```cmd
|
```cmd
|
||||||
:: see releases page for other versions
|
:: see releases page for other versions
|
||||||
set VERSION=0.26.0
|
set VERSION=0.25.0
|
||||||
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -152,7 +146,7 @@ To install Toolbox as a binary on Windows (PowerShell):
|
|||||||
|
|
||||||
```powershell
|
```powershell
|
||||||
# see releases page for other versions
|
# see releases page for other versions
|
||||||
$VERSION = "0.26.0"
|
$VERSION = "0.25.0"
|
||||||
curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -164,7 +158,7 @@ You can also install Toolbox as a container:
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# see releases page for other versions
|
# see releases page for other versions
|
||||||
export VERSION=0.26.0
|
export VERSION=0.25.0
|
||||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -183,7 +177,7 @@ To install from source, ensure you have the latest version of
|
|||||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
go install github.com/googleapis/genai-toolbox@v0.26.0
|
go install github.com/googleapis/genai-toolbox@v0.25.0
|
||||||
```
|
```
|
||||||
|
|
||||||
{{% /tab %}}
|
{{% /tab %}}
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ runtime](https://research.google.com/colaboratory/local-runtimes.html).
|
|||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="ADK" lang="bash" >}}
|
{{< tab header="ADK" lang="bash" >}}
|
||||||
|
|
||||||
pip install google-adk[toolbox]
|
pip install toolbox-core
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< tab header="Langchain" lang="bash" >}}
|
{{< tab header="Langchain" lang="bash" >}}
|
||||||
|
|
||||||
@@ -73,7 +73,7 @@ pip install toolbox-core
|
|||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="ADK" lang="bash" >}}
|
{{< tab header="ADK" lang="bash" >}}
|
||||||
|
|
||||||
# No other dependencies required for ADK
|
pip install google-adk
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< tab header="Langchain" lang="bash" >}}
|
{{< tab header="Langchain" lang="bash" >}}
|
||||||
|
|
||||||
|
|||||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
```bash
|
```bash
|
||||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|
||||||
|
|||||||
@@ -24,13 +24,12 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@dabh/diagnostics": {
|
"node_modules/@dabh/diagnostics": {
|
||||||
"version": "2.0.8",
|
"version": "2.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.3.tgz",
|
||||||
"integrity": "sha512-R4MSXTVnuMzGD7bzHdW2ZhhdPC/igELENcq5IjEverBvq5hn1SXCWcsi6eSsdWP0/Ur+SItRRjAktmdoX/8R/Q==",
|
"integrity": "sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==",
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@so-ric/colorspace": "^1.1.6",
|
"colorspace": "1.1.x",
|
||||||
"enabled": "2.0.x",
|
"enabled": "2.0.x",
|
||||||
"kuler": "^2.0.0"
|
"kuler": "^2.0.0"
|
||||||
}
|
}
|
||||||
@@ -579,10 +578,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@google-cloud/firestore": {
|
"node_modules/@google-cloud/firestore": {
|
||||||
"version": "7.11.6",
|
"version": "7.11.3",
|
||||||
"resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.11.6.tgz",
|
"resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.11.3.tgz",
|
||||||
"integrity": "sha512-EW/O8ktzwLfyWBOsNuhRoMi8lrC3clHM5LVFhGvO1HCsLozCOOXRAlHrYBoE6HL42Sc8yYMuCb2XqcnJ4OOEpw==",
|
"integrity": "sha512-qsM3/WHpawF07SRVvEJJVRwhYzM7o9qtuksyuqnrMig6fxIrwWnsezECWsG/D5TyYru51Fv5c/RTqNDQ2yU+4w==",
|
||||||
"license": "Apache-2.0",
|
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
@@ -2889,17 +2887,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz",
|
||||||
"integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="
|
"integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="
|
||||||
},
|
},
|
||||||
"node_modules/@so-ric/colorspace": {
|
|
||||||
"version": "1.1.6",
|
|
||||||
"resolved": "https://registry.npmjs.org/@so-ric/colorspace/-/colorspace-1.1.6.tgz",
|
|
||||||
"integrity": "sha512-/KiKkpHNOBgkFJwu9sh48LkHSMYGyuTcSFK/qMBdnOAlrRJzRSXAOFB5qwzaVQuDl8wAvHVMkaASQDReTahxuw==",
|
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
|
||||||
"dependencies": {
|
|
||||||
"color": "^5.0.2",
|
|
||||||
"text-hex": "1.0.x"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@toolbox-sdk/core": {
|
"node_modules/@toolbox-sdk/core": {
|
||||||
"version": "0.1.2",
|
"version": "0.1.2",
|
||||||
"resolved": "https://registry.npmjs.org/@toolbox-sdk/core/-/core-0.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/@toolbox-sdk/core/-/core-0.1.2.tgz",
|
||||||
@@ -3528,53 +3515,38 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/color": {
|
"node_modules/color": {
|
||||||
"version": "5.0.3",
|
"version": "3.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/color/-/color-5.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/color/-/color-3.2.1.tgz",
|
||||||
"integrity": "sha512-ezmVcLR3xAVp8kYOm4GS45ZLLgIE6SPAFoduLr6hTDajwb3KZ2F46gulK3XpcwRFb5KKGCSezCBAY4Dw4HsyXA==",
|
"integrity": "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==",
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"color-convert": "^3.1.3",
|
"color-convert": "^1.9.3",
|
||||||
"color-string": "^2.1.3"
|
"color-string": "^1.6.0"
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=18"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/color-convert": {
|
"node_modules/color-convert": {
|
||||||
"version": "3.1.3",
|
"version": "1.9.3",
|
||||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-3.1.3.tgz",
|
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
|
||||||
"integrity": "sha512-fasDH2ont2GqF5HpyO4w0+BcewlhHEZOFn9c1ckZdHpJ56Qb7MHhH/IcJZbBGgvdtwdwNbLvxiBEdg336iA9Sg==",
|
"integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"color-name": "^2.0.0"
|
"color-name": "1.1.3"
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=14.6"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/color-name": {
|
"node_modules/color-name": {
|
||||||
"version": "2.1.0",
|
"version": "1.1.3",
|
||||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-2.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
|
||||||
"integrity": "sha512-1bPaDNFm0axzE4MEAzKPuqKWeRaT43U/hyxKPBdqTfmPF+d6n7FSoTFxLVULUJOmiLp01KjhIPPH+HrXZJN4Rg==",
|
"integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==",
|
||||||
"license": "MIT",
|
"optional": true
|
||||||
"optional": true,
|
|
||||||
"engines": {
|
|
||||||
"node": ">=12.20"
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"node_modules/color-string": {
|
"node_modules/color-string": {
|
||||||
"version": "2.1.4",
|
"version": "1.9.1",
|
||||||
"resolved": "https://registry.npmjs.org/color-string/-/color-string-2.1.4.tgz",
|
"resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz",
|
||||||
"integrity": "sha512-Bb6Cq8oq0IjDOe8wJmi4JeNn763Xs9cfrBcaylK1tPypWzyoy2G3l90v9k64kjphl/ZJjPIShFztenRomi8WTg==",
|
"integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==",
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"color-name": "^2.0.0"
|
"color-name": "^1.0.0",
|
||||||
},
|
"simple-swizzle": "^0.2.2"
|
||||||
"engines": {
|
|
||||||
"node": ">=18"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/colorette": {
|
"node_modules/colorette": {
|
||||||
@@ -3582,6 +3554,16 @@
|
|||||||
"resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz",
|
"resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz",
|
||||||
"integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w=="
|
"integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w=="
|
||||||
},
|
},
|
||||||
|
"node_modules/colorspace": {
|
||||||
|
"version": "1.1.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.4.tgz",
|
||||||
|
"integrity": "sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w==",
|
||||||
|
"optional": true,
|
||||||
|
"dependencies": {
|
||||||
|
"color": "^3.1.3",
|
||||||
|
"text-hex": "1.0.x"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/combined-stream": {
|
"node_modules/combined-stream": {
|
||||||
"version": "1.0.8",
|
"version": "1.0.8",
|
||||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||||
@@ -4986,6 +4968,12 @@
|
|||||||
"node": ">= 0.10"
|
"node": ">= 0.10"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/is-arrayish": {
|
||||||
|
"version": "0.3.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz",
|
||||||
|
"integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==",
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
"node_modules/is-core-module": {
|
"node_modules/is-core-module": {
|
||||||
"version": "2.16.1",
|
"version": "2.16.1",
|
||||||
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz",
|
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz",
|
||||||
@@ -5126,14 +5114,13 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/jsonwebtoken/node_modules/jws": {
|
"node_modules/jsonwebtoken/node_modules/jws": {
|
||||||
"version": "3.2.3",
|
"version": "3.2.2",
|
||||||
"resolved": "https://registry.npmjs.org/jws/-/jws-3.2.3.tgz",
|
"resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz",
|
||||||
"integrity": "sha512-byiJ0FLRdLdSVSReO/U4E7RoEyOCKnEnEPMjq3HxWtvzLsV08/i5RQKsFVNkCldrCaPr2vDNAOMsfs8T/Hze7g==",
|
"integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==",
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"jwa": "^1.4.2",
|
"jwa": "^1.4.1",
|
||||||
"safe-buffer": "^5.0.1"
|
"safe-buffer": "^5.0.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -5166,12 +5153,11 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/jws": {
|
"node_modules/jws": {
|
||||||
"version": "4.0.1",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
||||||
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
|
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"jwa": "^2.0.1",
|
"jwa": "^2.0.0",
|
||||||
"safe-buffer": "^5.0.1"
|
"safe-buffer": "^5.0.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -5438,10 +5424,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/node-forge": {
|
"node_modules/node-forge": {
|
||||||
"version": "1.3.3",
|
"version": "1.3.1",
|
||||||
"resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.3.tgz",
|
"resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz",
|
||||||
"integrity": "sha512-rLvcdSyRCyouf6jcOIPe/BgwG/d7hKjzMKOas33/pHEr6gbq18IK9zV7DiPvzsz0oBJPme6qr6H6kGZuI9/DZg==",
|
"integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==",
|
||||||
"license": "(BSD-3-Clause OR GPL-2.0)",
|
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
@@ -6053,6 +6038,15 @@
|
|||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/simple-swizzle": {
|
||||||
|
"version": "0.2.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz",
|
||||||
|
"integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==",
|
||||||
|
"optional": true,
|
||||||
|
"dependencies": {
|
||||||
|
"is-arrayish": "^0.3.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/source-map": {
|
"node_modules/source-map": {
|
||||||
"version": "0.6.1",
|
"version": "0.6.1",
|
||||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||||
@@ -6239,7 +6233,6 @@
|
|||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz",
|
||||||
"integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==",
|
"integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==",
|
||||||
"license": "MIT",
|
|
||||||
"optional": true
|
"optional": true
|
||||||
},
|
},
|
||||||
"node_modules/thriftrw": {
|
"node_modules/thriftrw": {
|
||||||
@@ -6423,14 +6416,13 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/winston": {
|
"node_modules/winston": {
|
||||||
"version": "3.19.0",
|
"version": "3.17.0",
|
||||||
"resolved": "https://registry.npmjs.org/winston/-/winston-3.19.0.tgz",
|
"resolved": "https://registry.npmjs.org/winston/-/winston-3.17.0.tgz",
|
||||||
"integrity": "sha512-LZNJgPzfKR+/J3cHkxcpHKpKKvGfDZVPS4hfJCc4cCG0CgYzvlD6yE/S3CIL/Yt91ak327YCpiF/0MyeZHEHKA==",
|
"integrity": "sha512-DLiFIXYC5fMPxaRg832S6F5mJYvePtmO5G9v9IgUFPhXm9/GkXarH/TUrBAVzhTCzAj9anE/+GjrgXp/54nOgw==",
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@colors/colors": "^1.6.0",
|
"@colors/colors": "^1.6.0",
|
||||||
"@dabh/diagnostics": "^2.0.8",
|
"@dabh/diagnostics": "^2.0.2",
|
||||||
"async": "^3.2.3",
|
"async": "^3.2.3",
|
||||||
"is-stream": "^2.0.0",
|
"is-stream": "^2.0.0",
|
||||||
"logform": "^2.7.0",
|
"logform": "^2.7.0",
|
||||||
|
|||||||
@@ -975,10 +975,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/lodash": {
|
"node_modules/lodash": {
|
||||||
"version": "4.17.23",
|
"version": "4.17.21",
|
||||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz",
|
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
||||||
"integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==",
|
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
|
||||||
"license": "MIT"
|
|
||||||
},
|
},
|
||||||
"node_modules/magic-bytes.js": {
|
"node_modules/magic-bytes.js": {
|
||||||
"version": "1.12.1",
|
"version": "1.12.1",
|
||||||
|
|||||||
@@ -1,17 +1,15 @@
|
|||||||
from google.adk import Agent
|
from google.adk import Agent
|
||||||
from google.adk.apps import App
|
from google.adk.apps import App
|
||||||
from google.adk.tools.toolbox_toolset import ToolboxToolset
|
from toolbox_core import ToolboxSyncClient
|
||||||
|
|
||||||
# TODO(developer): update the TOOLBOX_URL to your toolbox endpoint
|
# TODO(developer): update the TOOLBOX_URL to your toolbox endpoint
|
||||||
toolset = ToolboxToolset(
|
client = ToolboxSyncClient("http://127.0.0.1:5000")
|
||||||
server_url="http://127.0.0.1:5000",
|
|
||||||
)
|
|
||||||
|
|
||||||
root_agent = Agent(
|
root_agent = Agent(
|
||||||
name='root_agent',
|
name='root_agent',
|
||||||
model='gemini-2.5-flash',
|
model='gemini-2.5-flash',
|
||||||
instruction="You are a helpful AI assistant designed to provide accurate and useful information.",
|
instruction="You are a helpful AI assistant designed to provide accurate and useful information.",
|
||||||
tools=[toolset],
|
tools=client.load_toolset(),
|
||||||
)
|
)
|
||||||
|
|
||||||
app = App(root_agent=root_agent, name="my_agent")
|
app = App(root_agent=root_agent, name="my_agent")
|
||||||
|
|||||||
@@ -1,2 +1,3 @@
|
|||||||
google-adk[toolbox]==1.23.0
|
google-adk==1.21.0
|
||||||
|
toolbox-core==0.5.4
|
||||||
pytest==9.0.2
|
pytest==9.0.2
|
||||||
@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
```bash
|
```bash
|
||||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|
||||||
|
|||||||
@@ -54,7 +54,6 @@ instance, database and users:
|
|||||||
* `create_instance`
|
* `create_instance`
|
||||||
* `create_user`
|
* `create_user`
|
||||||
* `clone_instance`
|
* `clone_instance`
|
||||||
* `restore_backup`
|
|
||||||
|
|
||||||
## Install MCP Toolbox
|
## Install MCP Toolbox
|
||||||
|
|
||||||
@@ -302,7 +301,6 @@ instances and interacting with your database:
|
|||||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for SQL Server instance.
|
* **clone_instance**: Creates a clone of an existing Cloud SQL for SQL Server instance.
|
||||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||||
* **restore_backup**: Restores a backup of a Cloud SQL instance.
|
|
||||||
|
|
||||||
{{< notice note >}}
|
{{< notice note >}}
|
||||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||||
|
|||||||
@@ -54,7 +54,6 @@ database and users:
|
|||||||
* `create_instance`
|
* `create_instance`
|
||||||
* `create_user`
|
* `create_user`
|
||||||
* `clone_instance`
|
* `clone_instance`
|
||||||
* `restore_backup`
|
|
||||||
|
|
||||||
## Install MCP Toolbox
|
## Install MCP Toolbox
|
||||||
|
|
||||||
@@ -302,7 +301,6 @@ instances and interacting with your database:
|
|||||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for MySQL instance.
|
* **clone_instance**: Creates a clone of an existing Cloud SQL for MySQL instance.
|
||||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||||
* **restore_backup**: Restores a backup of a Cloud SQL instance.
|
|
||||||
|
|
||||||
{{< notice note >}}
|
{{< notice note >}}
|
||||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||||
|
|||||||
@@ -54,7 +54,6 @@ instance, database and users:
|
|||||||
* `create_instance`
|
* `create_instance`
|
||||||
* `create_user`
|
* `create_user`
|
||||||
* `clone_instance`
|
* `clone_instance`
|
||||||
* `restore_backup`
|
|
||||||
|
|
||||||
## Install MCP Toolbox
|
## Install MCP Toolbox
|
||||||
|
|
||||||
@@ -302,7 +301,6 @@ instances and interacting with your database:
|
|||||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for PostgreSQL instance.
|
* **clone_instance**: Creates a clone of an existing Cloud SQL for PostgreSQL instance.
|
||||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||||
* **restore_backup**: Restores a backup of a Cloud SQL instance.
|
|
||||||
|
|
||||||
{{< notice note >}}
|
{{< notice note >}}
|
||||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||||
|
|||||||
@@ -100,19 +100,19 @@ After you install Looker in the MCP Store, resources and tools from the server a
|
|||||||
|
|
||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="linux/amd64" lang="bash" >}}
|
{{< tab header="linux/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="windows/amd64" lang="bash" >}}
|
{{< tab header="windows/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -45,19 +45,19 @@ instance:
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="linux/amd64" lang="bash" >}}
|
{{< tab header="linux/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="windows/amd64" lang="bash" >}}
|
{{< tab header="windows/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -43,19 +43,19 @@ expose your developer assistant tools to a MySQL instance:
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="linux/amd64" lang="bash" >}}
|
{{< tab header="linux/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="windows/amd64" lang="bash" >}}
|
{{< tab header="windows/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -44,19 +44,19 @@ expose your developer assistant tools to a Neo4j instance:
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="linux/amd64" lang="bash" >}}
|
{{< tab header="linux/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="windows/amd64" lang="bash" >}}
|
{{< tab header="windows/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="linux/amd64" lang="bash" >}}
|
{{< tab header="linux/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="windows/amd64" lang="bash" >}}
|
{{< tab header="windows/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -43,19 +43,19 @@ to expose your developer assistant tools to a SQLite instance:
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="linux/amd64" lang="bash" >}}
|
{{< tab header="linux/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="windows/amd64" lang="bash" >}}
|
{{< tab header="windows/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -20,7 +20,6 @@ The native SDKs can be combined with MCP clients in many cases.
|
|||||||
|
|
||||||
Toolbox currently supports the following versions of MCP specification:
|
Toolbox currently supports the following versions of MCP specification:
|
||||||
|
|
||||||
* [2025-11-25](https://modelcontextprotocol.io/specification/2025-11-25)
|
|
||||||
* [2025-06-18](https://modelcontextprotocol.io/specification/2025-06-18)
|
* [2025-06-18](https://modelcontextprotocol.io/specification/2025-06-18)
|
||||||
* [2025-03-26](https://modelcontextprotocol.io/specification/2025-03-26)
|
* [2025-03-26](https://modelcontextprotocol.io/specification/2025-03-26)
|
||||||
* [2024-11-05](https://modelcontextprotocol.io/specification/2024-11-05)
|
* [2024-11-05](https://modelcontextprotocol.io/specification/2024-11-05)
|
||||||
|
|||||||
@@ -46,10 +46,10 @@ with the necessary configuration for deployment to Vertex AI Agent Engine.
|
|||||||
process will generate deployment configuration files (like a `Makefile` and
|
process will generate deployment configuration files (like a `Makefile` and
|
||||||
`Dockerfile`) in your project directory.
|
`Dockerfile`) in your project directory.
|
||||||
|
|
||||||
4. Add `google-adk[toolbox]` as a dependency to the new project:
|
4. Add `toolbox-core` as a dependency to the new project:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
uv add google-adk[toolbox]
|
uv add toolbox-core
|
||||||
```
|
```
|
||||||
|
|
||||||
## Step 3: Configure Google Cloud Authentication
|
## Step 3: Configure Google Cloud Authentication
|
||||||
@@ -95,23 +95,22 @@ authentication token.
|
|||||||
```python
|
```python
|
||||||
from google.adk import Agent
|
from google.adk import Agent
|
||||||
from google.adk.apps import App
|
from google.adk.apps import App
|
||||||
from google.adk.tools.toolbox_toolset import ToolboxToolset
|
from toolbox_core import ToolboxSyncClient, auth_methods
|
||||||
from toolbox_adk import CredentialStrategy
|
|
||||||
|
|
||||||
# TODO(developer): Replace with your Toolbox Cloud Run Service URL
|
# TODO(developer): Replace with your Toolbox Cloud Run Service URL
|
||||||
TOOLBOX_URL = "https://your-toolbox-service-xyz.a.run.app"
|
TOOLBOX_URL = "https://your-toolbox-service-xyz.a.run.app"
|
||||||
|
|
||||||
# Initialize the toolset with Workload Identity (generates ID token for the URL)
|
# Initialize the client with the Cloud Run URL and Auth headers
|
||||||
toolset = ToolboxToolset(
|
client = ToolboxSyncClient(
|
||||||
server_url=TOOLBOX_URL,
|
TOOLBOX_URL,
|
||||||
credentials=CredentialStrategy.workload_identity(target_audience=TOOLBOX_URL)
|
client_headers={"Authorization": auth_methods.get_google_id_token(TOOLBOX_URL)}
|
||||||
)
|
)
|
||||||
|
|
||||||
root_agent = Agent(
|
root_agent = Agent(
|
||||||
name='root_agent',
|
name='root_agent',
|
||||||
model='gemini-2.5-flash',
|
model='gemini-2.5-flash',
|
||||||
instruction="You are a helpful AI assistant designed to provide accurate and useful information.",
|
instruction="You are a helpful AI assistant designed to provide accurate and useful information.",
|
||||||
tools=[toolset],
|
tools=client.load_toolset(),
|
||||||
)
|
)
|
||||||
|
|
||||||
app = App(root_agent=root_agent, name="my_agent")
|
app = App(root_agent=root_agent, name="my_agent")
|
||||||
|
|||||||
@@ -207,7 +207,6 @@ You can connect to Toolbox Cloud Run instances directly through the SDK.
|
|||||||
{{< tab header="Python" lang="python" >}}
|
{{< tab header="Python" lang="python" >}}
|
||||||
import asyncio
|
import asyncio
|
||||||
from toolbox_core import ToolboxClient, auth_methods
|
from toolbox_core import ToolboxClient, auth_methods
|
||||||
from toolbox_core.protocol import Protocol
|
|
||||||
|
|
||||||
# Replace with the Cloud Run service URL generated in the previous step
|
# Replace with the Cloud Run service URL generated in the previous step
|
||||||
URL = "https://cloud-run-url.app"
|
URL = "https://cloud-run-url.app"
|
||||||
@@ -218,7 +217,6 @@ async def main():
|
|||||||
async with ToolboxClient(
|
async with ToolboxClient(
|
||||||
URL,
|
URL,
|
||||||
client_headers={"Authorization": auth_token_provider},
|
client_headers={"Authorization": auth_token_provider},
|
||||||
protocol=Protocol.TOOLBOX,
|
|
||||||
) as toolbox:
|
) as toolbox:
|
||||||
toolset = await toolbox.load_toolset()
|
toolset = await toolbox.load_toolset()
|
||||||
# ...
|
# ...
|
||||||
@@ -283,5 +281,3 @@ contain the specific error message needed to diagnose the problem.
|
|||||||
Manager, it means the Toolbox service account is missing permissions.
|
Manager, it means the Toolbox service account is missing permissions.
|
||||||
- Ensure the `toolbox-identity` service account has the **Secret Manager
|
- Ensure the `toolbox-identity` service account has the **Secret Manager
|
||||||
Secret Accessor** (`roles/secretmanager.secretAccessor`) IAM role.
|
Secret Accessor** (`roles/secretmanager.secretAccessor`) IAM role.
|
||||||
|
|
||||||
- **Cloud Run Connections via IAP:** Currently we do not support Cloud Run connections via [IAP](https://docs.cloud.google.com/iap/docs/concepts-overview). Please disable IAP if you are using it.
|
|
||||||
@@ -1,74 +0,0 @@
|
|||||||
---
|
|
||||||
title: "Invoke Tools via CLI"
|
|
||||||
type: docs
|
|
||||||
weight: 10
|
|
||||||
description: >
|
|
||||||
Learn how to invoke your tools directly from the command line using the `invoke` command.
|
|
||||||
---
|
|
||||||
|
|
||||||
The `invoke` command allows you to invoke tools defined in your configuration directly from the CLI. This is useful for:
|
|
||||||
|
|
||||||
- **Ephemeral Invocation:** Executing a tool without spinning up a full MCP server/client.
|
|
||||||
- **Debugging:** Isolating tool execution logic and testing with various parameter combinations.
|
|
||||||
|
|
||||||
{{< notice tip >}}
|
|
||||||
**Keep configurations minimal:** The `invoke` command initializes *all* resources (sources, tools, etc.) defined in your configuration files during execution. To ensure fast response times, consider using a minimal configuration file containing only the tools you need for the specific invocation.
|
|
||||||
{{< notice tip >}}
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
- You have the `toolbox` binary installed or built.
|
|
||||||
- You have a valid tool configuration file (e.g., `tools.yaml`).
|
|
||||||
|
|
||||||
## Basic Usage
|
|
||||||
|
|
||||||
The basic syntax for the command is:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
toolbox [--tools-file <path> | --prebuilt <name>] invoke <tool-name> [params]
|
|
||||||
```
|
|
||||||
|
|
||||||
- `<tool-name>`: The name of the tool you want to call. This must match the name defined in your `tools.yaml`.
|
|
||||||
- `[params]`: (Optional) A JSON string representing the arguments for the tool.
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
### 1. Calling a Tool without Parameters
|
|
||||||
|
|
||||||
If your tool takes no parameters, simply provide the tool name:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
toolbox --tools-file tools.yaml invoke my-simple-tool
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Calling a Tool with Parameters
|
|
||||||
|
|
||||||
For tools that require arguments, pass them as a JSON string. Ensure you escape quotes correctly for your shell.
|
|
||||||
|
|
||||||
**Example: A tool that takes parameters**
|
|
||||||
|
|
||||||
Assuming a tool named `mytool` taking `a` and `b`:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
toolbox --tools-file tools.yaml invoke mytool '{"a": 10, "b": 20}'
|
|
||||||
```
|
|
||||||
|
|
||||||
**Example: A tool that queries a database**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
toolbox --tools-file tools.yaml invoke db-query '{"sql": "SELECT * FROM users LIMIT 5"}'
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Using Prebuilt Configurations
|
|
||||||
|
|
||||||
You can also use the `--prebuilt` flag to load prebuilt toolsets.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
toolbox --prebuilt cloudsql-postgres invoke cloudsql-postgres-list-instances
|
|
||||||
```
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
- **Tool not found:** Ensure the `<tool-name>` matches exactly what is in your YAML file and that the file is correctly loaded via `--tools-file`.
|
|
||||||
- **Invalid parameters:** Double-check your JSON syntax. The error message will usually indicate if the JSON parsing failed or if the parameters didn't match the tool's schema.
|
|
||||||
- **Auth errors:** The `invoke` command currently does not support flows requiring client-side authorization (like OAuth flow initiation via the CLI). It works best for tools using service-side authentication (e.g., Application Default Credentials).
|
|
||||||
@@ -16,7 +16,7 @@ description: >
|
|||||||
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
|
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
|
||||||
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
|
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
|
||||||
| `-p` | `--port` | Port the server will listen on. | `5000` |
|
| `-p` | `--port` | Port the server will listen on. | `5000` |
|
||||||
| | `--prebuilt` | Use one or more prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
|
| | `--prebuilt` | Use a prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
|
||||||
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
|
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
|
||||||
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
|
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
|
||||||
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
|
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
|
||||||
@@ -27,24 +27,8 @@ description: >
|
|||||||
| | `--ui` | Launches the Toolbox UI web server. | |
|
| | `--ui` | Launches the Toolbox UI web server. | |
|
||||||
| | `--allowed-origins` | Specifies a list of origins permitted to access this server for CORs access. | `*` |
|
| | `--allowed-origins` | Specifies a list of origins permitted to access this server for CORs access. | `*` |
|
||||||
| | `--allowed-hosts` | Specifies a list of hosts permitted to access this server to prevent DNS rebinding attacks. | `*` |
|
| | `--allowed-hosts` | Specifies a list of hosts permitted to access this server to prevent DNS rebinding attacks. | `*` |
|
||||||
| | `--user-agent-metadata` | Appends additional metadata to the User-Agent. | |
|
|
||||||
| `-v` | `--version` | version for toolbox | |
|
| `-v` | `--version` | version for toolbox | |
|
||||||
|
|
||||||
## Sub Commands
|
|
||||||
|
|
||||||
### `invoke`
|
|
||||||
|
|
||||||
Executes a tool directly with the provided parameters. This is useful for testing tool configurations and parameters without needing a full client setup.
|
|
||||||
|
|
||||||
**Syntax:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
toolbox invoke <tool-name> [params]
|
|
||||||
```
|
|
||||||
|
|
||||||
- `<tool-name>`: The name of the tool to execute (as defined in your configuration).
|
|
||||||
- `[params]`: (Optional) A JSON string containing the parameters for the tool.
|
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
### Transport Configuration
|
### Transport Configuration
|
||||||
@@ -66,11 +50,6 @@ toolbox invoke <tool-name> [params]
|
|||||||
|
|
||||||
# Server with prebuilt + custom tools configurations
|
# Server with prebuilt + custom tools configurations
|
||||||
./toolbox --tools-file tools.yaml --prebuilt alloydb-postgres
|
./toolbox --tools-file tools.yaml --prebuilt alloydb-postgres
|
||||||
|
|
||||||
# Server with multiple prebuilt tools configurations
|
|
||||||
./toolbox --prebuilt alloydb-postgres,alloydb-postgres-admin
|
|
||||||
# OR
|
|
||||||
./toolbox --prebuilt alloydb-postgres --prebuilt alloydb-postgres-admin
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Tool Configuration Sources
|
### Tool Configuration Sources
|
||||||
@@ -91,7 +70,7 @@ The CLI supports multiple mutually exclusive ways to specify tool configurations
|
|||||||
|
|
||||||
**Prebuilt Configurations:**
|
**Prebuilt Configurations:**
|
||||||
|
|
||||||
- `--prebuilt`: Use one or more predefined configurations for specific database types (e.g.,
|
- `--prebuilt`: Use predefined configurations for specific database types (e.g.,
|
||||||
'bigquery', 'postgres', 'spanner'). See [Prebuilt Tools
|
'bigquery', 'postgres', 'spanner'). See [Prebuilt Tools
|
||||||
Reference](prebuilt-tools.md) for allowed values.
|
Reference](prebuilt-tools.md) for allowed values.
|
||||||
|
|
||||||
|
|||||||
@@ -16,9 +16,6 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
|||||||
{{< notice tip >}}
|
{{< notice tip >}}
|
||||||
You can now use `--prebuilt` along `--tools-file`, `--tools-files`, or
|
You can now use `--prebuilt` along `--tools-file`, `--tools-files`, or
|
||||||
`--tools-folder` to combine prebuilt configs with custom tools.
|
`--tools-folder` to combine prebuilt configs with custom tools.
|
||||||
|
|
||||||
You can also combine multiple prebuilt configs.
|
|
||||||
|
|
||||||
See [Usage Examples](../reference/cli.md#examples).
|
See [Usage Examples](../reference/cli.md#examples).
|
||||||
{{< /notice >}}
|
{{< /notice >}}
|
||||||
|
|
||||||
@@ -100,43 +97,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
(timeseries metrics) for queries running in an AlloyDB instance using a
|
(timeseries metrics) for queries running in an AlloyDB instance using a
|
||||||
PromQL query.
|
PromQL query.
|
||||||
|
|
||||||
## AlloyDB Omni
|
|
||||||
|
|
||||||
* `--prebuilt` value: `alloydb-omni`
|
|
||||||
* **Environment Variables:**
|
|
||||||
* `ALLOYDB_OMNI_HOST`: (Optional) The hostname or IP address (Default: localhost).
|
|
||||||
* `ALLOYDB_OMNI_PORT`: (Optional) The port number (Default: 5432).
|
|
||||||
* `ALLOYDB_OMNI_DATABASE`: The name of the database to connect to.
|
|
||||||
* `ALLOYDB_OMNI_USER`: The database username.
|
|
||||||
* `ALLOYDB_OMNI_PASSWORD`: (Optional) The password for the database user.
|
|
||||||
* `ALLOYDB_OMNI_QUERY_PARAMS`: (Optional) Connection query parameters.
|
|
||||||
* **Tools:**
|
|
||||||
* `execute_sql`: Executes a SQL query.
|
|
||||||
* `list_tables`: Lists tables in the database.
|
|
||||||
* `list_autovacuum_configurations`: Lists autovacuum configurations in the
|
|
||||||
database.
|
|
||||||
* `list_columnar_configurations`: List AlloyDB Omni columnar-related configurations.
|
|
||||||
* `list_columnar_recommended_columns`: Lists columns that AlloyDB Omni recommends adding to the columnar engine.
|
|
||||||
* `list_memory_configurations`: Lists memory-related configurations in the
|
|
||||||
database.
|
|
||||||
* `list_top_bloated_tables`: List top bloated tables in the database.
|
|
||||||
* `list_replication_slots`: Lists replication slots in the database.
|
|
||||||
* `list_invalid_indexes`: Lists invalid indexes in the database.
|
|
||||||
* `get_query_plan`: Generate the execution plan of a statement.
|
|
||||||
* `list_views`: Lists views in the database from pg_views with a default
|
|
||||||
limit of 50 rows. Returns schemaname, viewname and the ownername.
|
|
||||||
* `list_schemas`: Lists schemas in the database.
|
|
||||||
* `database_overview`: Fetches the current state of the PostgreSQL server.
|
|
||||||
* `list_triggers`: Lists triggers in the database.
|
|
||||||
* `list_indexes`: List available user indexes in a PostgreSQL database.
|
|
||||||
* `list_sequences`: List sequences in a PostgreSQL database.
|
|
||||||
* `list_publication_tables`: List publication tables in a PostgreSQL database.
|
|
||||||
* `list_tablespaces`: Lists tablespaces in the database.
|
|
||||||
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
|
|
||||||
* `list_database_stats`: Lists the key performance and activity statistics for
|
|
||||||
each database in the AlloyDB instance.
|
|
||||||
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
|
|
||||||
|
|
||||||
## BigQuery
|
## BigQuery
|
||||||
|
|
||||||
* `--prebuilt` value: `bigquery`
|
* `--prebuilt` value: `bigquery`
|
||||||
@@ -234,7 +194,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
* `create_instance`
|
* `create_instance`
|
||||||
* `create_user`
|
* `create_user`
|
||||||
* `clone_instance`
|
* `clone_instance`
|
||||||
* `restore_backup`
|
|
||||||
|
|
||||||
* **Tools:**
|
* **Tools:**
|
||||||
* `create_instance`: Creates a new Cloud SQL for MySQL instance.
|
* `create_instance`: Creates a new Cloud SQL for MySQL instance.
|
||||||
@@ -246,7 +205,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for MySQL instance.
|
* `clone_instance`: Creates a clone for an existing Cloud SQL for MySQL instance.
|
||||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||||
* `restore_backup`: Restores a backup of a Cloud SQL instance.
|
|
||||||
|
|
||||||
## Cloud SQL for PostgreSQL
|
## Cloud SQL for PostgreSQL
|
||||||
|
|
||||||
@@ -326,7 +284,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
* `create_instance`
|
* `create_instance`
|
||||||
* `create_user`
|
* `create_user`
|
||||||
* `clone_instance`
|
* `clone_instance`
|
||||||
* `restore_backup`
|
|
||||||
* **Tools:**
|
* **Tools:**
|
||||||
* `create_instance`: Creates a new Cloud SQL for PostgreSQL instance.
|
* `create_instance`: Creates a new Cloud SQL for PostgreSQL instance.
|
||||||
* `get_instance`: Gets information about a Cloud SQL instance.
|
* `get_instance`: Gets information about a Cloud SQL instance.
|
||||||
@@ -337,7 +294,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for PostgreSQL instance.
|
* `clone_instance`: Creates a clone for an existing Cloud SQL for PostgreSQL instance.
|
||||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||||
* `restore_backup`: Restores a backup of a Cloud SQL instance.
|
|
||||||
|
|
||||||
## Cloud SQL for SQL Server
|
## Cloud SQL for SQL Server
|
||||||
|
|
||||||
@@ -391,7 +347,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
* `create_instance`
|
* `create_instance`
|
||||||
* `create_user`
|
* `create_user`
|
||||||
* `clone_instance`
|
* `clone_instance`
|
||||||
* `restore_backup`
|
|
||||||
* **Tools:**
|
* **Tools:**
|
||||||
* `create_instance`: Creates a new Cloud SQL for SQL Server instance.
|
* `create_instance`: Creates a new Cloud SQL for SQL Server instance.
|
||||||
* `get_instance`: Gets information about a Cloud SQL instance.
|
* `get_instance`: Gets information about a Cloud SQL instance.
|
||||||
@@ -402,7 +357,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for SQL Server instance.
|
* `clone_instance`: Creates a clone for an existing Cloud SQL for SQL Server instance.
|
||||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||||
* `restore_backup`: Restores a backup of a Cloud SQL instance.
|
|
||||||
|
|
||||||
## Dataplex
|
## Dataplex
|
||||||
|
|
||||||
|
|||||||
@@ -3,14 +3,13 @@ title: "EmbeddingModels"
|
|||||||
type: docs
|
type: docs
|
||||||
weight: 2
|
weight: 2
|
||||||
description: >
|
description: >
|
||||||
EmbeddingModels represent services that transform text into vector embeddings
|
EmbeddingModels represent services that transform text into vector embeddings for semantic search.
|
||||||
for semantic search.
|
|
||||||
---
|
---
|
||||||
|
|
||||||
EmbeddingModels represent services that generate vector representations of text
|
EmbeddingModels represent services that generate vector representations of text
|
||||||
data. In the MCP Toolbox, these models enable **Semantic Queries**, allowing
|
data. In the MCP Toolbox, these models enable **Semantic Queries**,
|
||||||
[Tools](../tools/) to automatically convert human-readable text into numerical
|
allowing [Tools](../tools/) to automatically convert human-readable text into
|
||||||
vectors before using them in a query.
|
numerical vectors before using them in a query.
|
||||||
|
|
||||||
This is primarily used in two scenarios:
|
This is primarily used in two scenarios:
|
||||||
|
|
||||||
@@ -20,33 +19,14 @@ This is primarily used in two scenarios:
|
|||||||
- **Semantic Search**: Converting a natural language query into a vector to
|
- **Semantic Search**: Converting a natural language query into a vector to
|
||||||
perform similarity searches.
|
perform similarity searches.
|
||||||
|
|
||||||
## Hidden Parameter Duplication (valueFromParam)
|
|
||||||
|
|
||||||
When building tools for vector ingestion, you often need the same input string
|
|
||||||
twice:
|
|
||||||
|
|
||||||
1. To store the original text in a TEXT column.
|
|
||||||
1. To generate the vector embedding for a VECTOR column.
|
|
||||||
|
|
||||||
Requesting an Agent (LLM) to output the exact same string twice is inefficient
|
|
||||||
and error-prone. The `valueFromParam` field solves this by allowing a parameter
|
|
||||||
to inherit its value from another parameter in the same tool.
|
|
||||||
|
|
||||||
### Key Behaviors
|
|
||||||
|
|
||||||
1. Hidden from Manifest: Parameters with valueFromParam set are excluded from
|
|
||||||
the tool definition sent to the Agent. The Agent does not know this parameter
|
|
||||||
exists.
|
|
||||||
1. Auto-Filled: When the tool is executed, the Toolbox automatically copies the
|
|
||||||
value from the referenced parameter before processing embeddings.
|
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
||||||
The following configuration defines an embedding model and applies it to
|
The following configuration defines an embedding model and applies it to
|
||||||
specific tool parameters.
|
specific tool parameters.
|
||||||
|
|
||||||
{{< notice tip >}} Use environment variable replacement with the format
|
{{< notice tip >}}
|
||||||
${ENV_NAME} instead of hardcoding your API keys into the configuration file.
|
Use environment variable replacement with the format ${ENV_NAME}
|
||||||
|
instead of hardcoding your API keys into the configuration file.
|
||||||
{{< /notice >}}
|
{{< /notice >}}
|
||||||
|
|
||||||
### Step 1 - Define an Embedding Model
|
### Step 1 - Define an Embedding Model
|
||||||
@@ -55,7 +35,7 @@ Define an embedding model in the `embeddingModels` section:
|
|||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: embeddingModels
|
kind: embeddingModels
|
||||||
name: gemini-model # Name of the embedding model
|
name: gemini-model: # Name of the embedding model
|
||||||
type: gemini
|
type: gemini
|
||||||
model: gemini-embedding-001
|
model: gemini-embedding-001
|
||||||
apiKey: ${GOOGLE_API_KEY}
|
apiKey: ${GOOGLE_API_KEY}
|
||||||
@@ -65,7 +45,8 @@ dimension: 768
|
|||||||
### Step 2 - Embed Tool Parameters
|
### Step 2 - Embed Tool Parameters
|
||||||
|
|
||||||
Use the defined embedding model, embed your query parameters using the
|
Use the defined embedding model, embed your query parameters using the
|
||||||
`embeddedBy` field. Only string-typed parameters can be embedded:
|
`embeddedBy` field. Only string-typed
|
||||||
|
parameters can be embedded:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
# Vector ingestion tool
|
# Vector ingestion tool
|
||||||
@@ -79,13 +60,10 @@ statement: |
|
|||||||
parameters:
|
parameters:
|
||||||
- name: content
|
- name: content
|
||||||
type: string
|
type: string
|
||||||
description: The raw text content to be stored in the database.
|
|
||||||
- name: vector_string
|
- name: vector_string
|
||||||
type: string
|
type: string
|
||||||
# This parameter is hidden from the LLM.
|
description: The text to be vectorized and stored.
|
||||||
# It automatically copies the value from 'content' and embeds it.
|
embeddedBy: gemini-model # refers to the name of a defined embedding model
|
||||||
valueFromParam: content
|
|
||||||
embeddedBy: gemini-model
|
|
||||||
---
|
---
|
||||||
# Semantic search tool
|
# Semantic search tool
|
||||||
kind: tools
|
kind: tools
|
||||||
|
|||||||
@@ -20,10 +20,10 @@ kind: prompts
|
|||||||
name: code_review
|
name: code_review
|
||||||
description: "Asks the LLM to analyze code quality and suggest improvements."
|
description: "Asks the LLM to analyze code quality and suggest improvements."
|
||||||
messages:
|
messages:
|
||||||
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
||||||
arguments:
|
arguments:
|
||||||
- name: "code"
|
- name: "code"
|
||||||
description: "The code to review"
|
description: "The code to review"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Prompt Schema
|
## Prompt Schema
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ Authentication can be handled in two ways:
|
|||||||
```yaml
|
```yaml
|
||||||
kind: sources
|
kind: sources
|
||||||
name: my-alloydb-admin
|
name: my-alloydb-admin
|
||||||
type: alloydb-admin
|
type: alloy-admin
|
||||||
---
|
---
|
||||||
kind: sources
|
kind: sources
|
||||||
name: my-oauth-alloydb-admin
|
name: my-oauth-alloydb-admin
|
||||||
|
|||||||
@@ -1,71 +0,0 @@
|
|||||||
---
|
|
||||||
title: "Cloud Logging Admin"
|
|
||||||
type: docs
|
|
||||||
weight: 1
|
|
||||||
description: >
|
|
||||||
The Cloud Logging Admin source enables tools to interact with the Cloud Logging API, allowing for the retrieval of log names, monitored resource types, and the querying of log data.
|
|
||||||
---
|
|
||||||
|
|
||||||
## About
|
|
||||||
|
|
||||||
The Cloud Logging Admin source provides a client to interact with the [Google
|
|
||||||
Cloud Logging API](https://cloud.google.com/logging/docs). This allows tools to list log names, monitored resource types, and query log entries.
|
|
||||||
|
|
||||||
Authentication can be handled in two ways:
|
|
||||||
|
|
||||||
1. **Application Default Credentials (ADC):** By default, the source uses ADC
|
|
||||||
to authenticate with the API.
|
|
||||||
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will
|
|
||||||
expect an OAuth 2.0 access token to be provided by the client (e.g., a web
|
|
||||||
browser) for each request.
|
|
||||||
|
|
||||||
## Available Tools
|
|
||||||
|
|
||||||
- [`cloud-logging-admin-list-log-names`](../tools/cloudloggingadmin/cloud-logging-admin-list-log-names.md)
|
|
||||||
Lists the log names in the project.
|
|
||||||
|
|
||||||
- [`cloud-logging-admin-list-resource-types`](../tools/cloudloggingadmin/cloud-logging-admin-list-resource-types.md)
|
|
||||||
Lists the monitored resource types.
|
|
||||||
|
|
||||||
- [`cloud-logging-admin-query-logs`](../tools/cloudloggingadmin/cloud-logging-admin-query-logs.md)
|
|
||||||
Queries log entries.
|
|
||||||
|
|
||||||
## Example
|
|
||||||
|
|
||||||
Initialize a Cloud Logging Admin source that uses ADC:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
kind: sources
|
|
||||||
name: my-cloud-logging
|
|
||||||
type: cloud-logging-admin
|
|
||||||
project: my-project-id
|
|
||||||
```
|
|
||||||
|
|
||||||
Initialize a Cloud Logging Admin source that uses client-side OAuth:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
kind: sources
|
|
||||||
name: my-oauth-cloud-logging
|
|
||||||
type: cloud-logging-admin
|
|
||||||
project: my-project-id
|
|
||||||
useClientOAuth: true
|
|
||||||
```
|
|
||||||
|
|
||||||
Initialize a Cloud Logging Admin source that uses service account impersonation:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
kind: sources
|
|
||||||
name: my-impersonated-cloud-logging
|
|
||||||
type: cloud-logging-admin
|
|
||||||
project: my-project-id
|
|
||||||
impersonateServiceAccount: "my-service-account@my-project.iam.gserviceaccount.com"
|
|
||||||
```
|
|
||||||
|
|
||||||
## Reference
|
|
||||||
|
|
||||||
| **field** | **type** | **required** | **description** |
|
|
||||||
|-----------------------------|:--------:|:------------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
|
||||||
| type | string | true | Must be "cloud-logging-admin". |
|
|
||||||
| project | string | true | ID of the GCP project. |
|
|
||||||
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. Cannot be used with `impersonateServiceAccount`. |
|
|
||||||
| impersonateServiceAccount | string | false | The service account to impersonate for API calls. Cannot be used with `useClientOAuth`. |
|
|
||||||
@@ -7,17 +7,6 @@ description: >
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
{{< notice note >}}
|
|
||||||
**⚠️ Best Effort Maintenance**
|
|
||||||
|
|
||||||
This integration is maintained on a best-effort basis by the project
|
|
||||||
team/community. While we strive to address issues and provide workarounds when
|
|
||||||
resources are available, there are no guaranteed response times or code fixes.
|
|
||||||
|
|
||||||
The automated integration tests for this module are currently non-functional or
|
|
||||||
failing.
|
|
||||||
{{< /notice >}}
|
|
||||||
|
|
||||||
## About
|
## About
|
||||||
|
|
||||||
[Dgraph][dgraph-docs] is an open-source graph database. It is designed for
|
[Dgraph][dgraph-docs] is an open-source graph database. It is designed for
|
||||||
|
|||||||
@@ -12,9 +12,6 @@ aliases:
|
|||||||
|
|
||||||
The `cloud-gemini-data-analytics-query` tool allows you to send natural language questions to the Gemini Data Analytics API and receive structured responses containing SQL queries, natural language answers, and explanations. For details on defining data agent context for database data sources, see the official [documentation](https://docs.cloud.google.com/gemini/docs/conversational-analytics-api/data-agent-authored-context-databases).
|
The `cloud-gemini-data-analytics-query` tool allows you to send natural language questions to the Gemini Data Analytics API and receive structured responses containing SQL queries, natural language answers, and explanations. For details on defining data agent context for database data sources, see the official [documentation](https://docs.cloud.google.com/gemini/docs/conversational-analytics-api/data-agent-authored-context-databases).
|
||||||
|
|
||||||
> [!NOTE]
|
|
||||||
> Only `alloydb`, `spannerReference`, and `cloudSqlReference` are supported as [datasource references](https://clouddocs.devsite.corp.google.com/gemini/docs/conversational-analytics-api/reference/rest/v1beta/projects.locations.dataAgents#DatasourceReferences).
|
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
@@ -44,13 +41,13 @@ generationOptions:
|
|||||||
|
|
||||||
### Usage Flow
|
### Usage Flow
|
||||||
|
|
||||||
When using this tool, a `query` parameter containing a natural language query is provided to the tool (typically by an agent). The tool then interacts with the Gemini Data Analytics API using the context defined in your configuration.
|
When using this tool, a `prompt` parameter containing a natural language query is provided to the tool (typically by an agent). The tool then interacts with the Gemini Data Analytics API using the context defined in your configuration.
|
||||||
|
|
||||||
The structure of the response depends on the `generationOptions` configured in your tool definition (e.g., enabling `generateQueryResult` will include the SQL query results).
|
The structure of the response depends on the `generationOptions` configured in your tool definition (e.g., enabling `generateQueryResult` will include the SQL query results).
|
||||||
|
|
||||||
See [Data Analytics API REST documentation](https://clouddocs.devsite.corp.google.com/gemini/docs/conversational-analytics-api/reference/rest/v1alpha/projects.locations/queryData?rep_location=global) for details.
|
See [Data Analytics API REST documentation](https://clouddocs.devsite.corp.google.com/gemini/docs/conversational-analytics-api/reference/rest/v1alpha/projects.locations/queryData?rep_location=global) for details.
|
||||||
|
|
||||||
**Example Input Query:**
|
**Example Input Prompt:**
|
||||||
|
|
||||||
```text
|
```text
|
||||||
How many accounts who have region in Prague are eligible for loans? A3 contains the data of region.
|
How many accounts who have region in Prague are eligible for loans? A3 contains the data of region.
|
||||||
|
|||||||
@@ -1,39 +0,0 @@
|
|||||||
---
|
|
||||||
title: "cloud-logging-admin-list-log-names"
|
|
||||||
type: docs
|
|
||||||
description: >
|
|
||||||
A "cloud-logging-admin-list-log-names" tool lists the log names in the project.
|
|
||||||
aliases:
|
|
||||||
- /resources/tools/cloud-logging-admin-list-log-names
|
|
||||||
---
|
|
||||||
|
|
||||||
## About
|
|
||||||
|
|
||||||
The `cloud-logging-admin-list-log-names` tool lists the log names available in the Google Cloud project.
|
|
||||||
It's compatible with the following sources:
|
|
||||||
|
|
||||||
- [cloud-logging-admin](../../sources/cloud-logging-admin.md)
|
|
||||||
|
|
||||||
## Example
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
kind: tools
|
|
||||||
name: list_log_names
|
|
||||||
type: cloud-logging-admin-list-log-names
|
|
||||||
source: my-cloud-logging
|
|
||||||
description: Lists all log names in the project.
|
|
||||||
```
|
|
||||||
|
|
||||||
## Reference
|
|
||||||
|
|
||||||
| **field** | **type** | **required** | **description** |
|
|
||||||
|-------------|:--------:|:------------:|----------------------------------------------------|
|
|
||||||
| type | string | true | Must be "cloud-logging-admin-list-log-names". |
|
|
||||||
| source | string | true | Name of the cloud-logging-admin source. |
|
|
||||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
|
||||||
|
|
||||||
### Parameters
|
|
||||||
|
|
||||||
| **parameter** | **type** | **required** | **description** |
|
|
||||||
|:--------------|:--------:|:------------:|:----------------|
|
|
||||||
| limit | integer | false | Maximum number of log entries to return (default: 200). |
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
---
|
|
||||||
title: "cloud-logging-admin-list-resource-types"
|
|
||||||
type: docs
|
|
||||||
description: >
|
|
||||||
A "cloud-logging-admin-list-resource-types" tool lists the monitored resource types.
|
|
||||||
aliases:
|
|
||||||
- /resources/tools/cloud-logging-admin-list-resource-types
|
|
||||||
---
|
|
||||||
|
|
||||||
## About
|
|
||||||
|
|
||||||
The `cloud-logging-admin-list-resource-types` tool lists the monitored resource types available in Google Cloud Logging.
|
|
||||||
It's compatible with the following sources:
|
|
||||||
|
|
||||||
- [cloud-logging-admin](../../sources/cloud-logging-admin.md)
|
|
||||||
|
|
||||||
## Example
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
kind: tools
|
|
||||||
name: list_resource_types
|
|
||||||
type: cloud-logging-admin-list-resource-types
|
|
||||||
source: my-cloud-logging
|
|
||||||
description: Lists monitored resource types.
|
|
||||||
```
|
|
||||||
|
|
||||||
## Reference
|
|
||||||
|
|
||||||
| **field** | **type** | **required** | **description** |
|
|
||||||
|-------------|:--------:|:------------:|----------------------------------------------------|
|
|
||||||
| type | string | true | Must be "cloud-logging-admin-list-resource-types".|
|
|
||||||
| source | string | true | Name of the cloud-logging-admin source. |
|
|
||||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
|
||||||
|
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
---
|
|
||||||
title: "cloud-logging-admin-query-logs"
|
|
||||||
type: docs
|
|
||||||
description: >
|
|
||||||
A "cloud-logging-admin-query-logs" tool queries log entries.
|
|
||||||
aliases:
|
|
||||||
- /resources/tools/cloud-logging-admin-query-logs
|
|
||||||
---
|
|
||||||
|
|
||||||
## About
|
|
||||||
|
|
||||||
The `cloud-logging-admin-query-logs` tool allows you to query log entries from Google Cloud Logging using the advanced logs filter syntax.
|
|
||||||
It's compatible with the following sources:
|
|
||||||
|
|
||||||
- [cloud-logging-admin](../../sources/cloud-logging-admin.md)
|
|
||||||
|
|
||||||
## Example
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
kind: tools
|
|
||||||
name: query_logs
|
|
||||||
type: cloud-logging-admin-query-logs
|
|
||||||
source: my-cloud-logging
|
|
||||||
description: Queries log entries from Cloud Logging.
|
|
||||||
```
|
|
||||||
|
|
||||||
## Reference
|
|
||||||
|
|
||||||
| **field** | **type** | **required** | **description** |
|
|
||||||
|-------------|:--------:|:------------:|----------------------------------------------------|
|
|
||||||
| type | string | true | Must be "cloud-logging-admin-query-logs". |
|
|
||||||
| source | string | true | Name of the cloud-logging-admin source. |
|
|
||||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
|
||||||
|
|
||||||
### Parameters
|
|
||||||
|
|
||||||
| **parameter** | **type** | **required** | **description** |
|
|
||||||
|:--------------|:--------:|:------------:|:----------------|
|
|
||||||
| filter | string | false | Cloud Logging filter query. Common fields: resource.type, resource.labels.*, logName, severity, textPayload, jsonPayload.*, protoPayload.*, labels.*, httpRequest.*. Operators: =, !=, <, <=, >, >=, :, =~, AND, OR, NOT. |
|
|
||||||
| newestFirst | boolean | false | Set to true for newest logs first. Defaults to oldest first. |
|
|
||||||
| startTime | string | false | Start time in RFC3339 format (e.g., 2025-12-09T00:00:00Z). Defaults to 30 days ago. |
|
|
||||||
| endTime | string | false | End time in RFC3339 format (e.g., 2025-12-09T23:59:59Z). Defaults to now. |
|
|
||||||
| verbose | boolean | false | Include additional fields (insertId, trace, spanId, httpRequest, labels, operation, sourceLocation). Defaults to false. |
|
|
||||||
| limit | integer | false | Maximum number of log entries to return. Default: `200`. |
|
|
||||||
@@ -1,53 +0,0 @@
|
|||||||
---
|
|
||||||
title: cloud-sql-restore-backup
|
|
||||||
type: docs
|
|
||||||
weight: 10
|
|
||||||
description: "Restores a backup of a Cloud SQL instance."
|
|
||||||
---
|
|
||||||
|
|
||||||
The `cloud-sql-restore-backup` tool restores a backup on a Cloud SQL instance using the Cloud SQL Admin API.
|
|
||||||
|
|
||||||
{{< notice info dd>}}
|
|
||||||
This tool uses a `source` of type `cloud-sql-admin`.
|
|
||||||
{{< /notice >}}
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
Basic backup restore
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
kind: tools
|
|
||||||
name: backup-restore-basic
|
|
||||||
type: cloud-sql-restore-backup
|
|
||||||
source: cloud-sql-admin-source
|
|
||||||
description: "Restores a backup onto the given Cloud SQL instance."
|
|
||||||
```
|
|
||||||
|
|
||||||
## Reference
|
|
||||||
|
|
||||||
### Tool Configuration
|
|
||||||
| **field** | **type** | **required** | **description** |
|
|
||||||
| -------------- | :------: | :----------: | ------------------------------------------------ |
|
|
||||||
| type | string | true | Must be "cloud-sql-restore-backup". |
|
|
||||||
| source | string | true | The name of the `cloud-sql-admin` source to use. |
|
|
||||||
| description | string | false | A description of the tool. |
|
|
||||||
|
|
||||||
### Tool Inputs
|
|
||||||
|
|
||||||
| **parameter** | **type** | **required** | **description** |
|
|
||||||
| ------------------| :------: | :----------: | -----------------------------------------------------------------------------|
|
|
||||||
| target_project | string | true | The project ID of the instance to restore the backup onto. |
|
|
||||||
| target_instance | string | true | The instance to restore the backup onto. Does not include the project ID. |
|
|
||||||
| backup_id | string | true | The identifier of the backup being restored. |
|
|
||||||
| source_project | string | false | (Optional) The project ID of the instance that the backup belongs to. |
|
|
||||||
| source_instance | string | false | (Optional) Cloud SQL instance ID of the instance that the backup belongs to. |
|
|
||||||
|
|
||||||
## Usage Notes
|
|
||||||
|
|
||||||
- The `backup_id` field can be a BackupRun ID (which will be an int64), backup name, or BackupDR backup name.
|
|
||||||
- If the `backup_id` field contains a BackupRun ID (i.e. an int64), the optional fields `source_project` and `source_instance` must also be provided.
|
|
||||||
|
|
||||||
## See Also
|
|
||||||
- [Cloud SQL Admin API documentation](https://cloud.google.com/sql/docs/mysql/admin-api)
|
|
||||||
- [Toolbox Cloud SQL tools documentation](../cloudsql)
|
|
||||||
- [Cloud SQL Restore API documentation](https://cloud.google.com/sql/docs/mysql/backup-recovery/restoring)
|
|
||||||
@@ -9,17 +9,6 @@ aliases:
|
|||||||
- /resources/tools/dgraph-dql
|
- /resources/tools/dgraph-dql
|
||||||
---
|
---
|
||||||
|
|
||||||
{{< notice note >}}
|
|
||||||
**⚠️ Best Effort Maintenance**
|
|
||||||
|
|
||||||
This integration is maintained on a best-effort basis by the project
|
|
||||||
team/community. While we strive to address issues and provide workarounds when
|
|
||||||
resources are available, there are no guaranteed response times or code fixes.
|
|
||||||
|
|
||||||
The automated integration tests for this module are currently non-functional or
|
|
||||||
failing.
|
|
||||||
{{< /notice >}}
|
|
||||||
|
|
||||||
## About
|
## About
|
||||||
|
|
||||||
A `dgraph-dql` tool executes a pre-defined DQL statement against a Dgraph
|
A `dgraph-dql` tool executes a pre-defined DQL statement against a Dgraph
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ with filters, ordering, and limit capabilities.
|
|||||||
To use this tool, you need to configure it in your YAML configuration file:
|
To use this tool, you need to configure it in your YAML configuration file:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: sources
|
kind: source
|
||||||
name: my-firestore
|
name: my-firestore
|
||||||
type: firestore
|
type: firestore
|
||||||
project: my-gcp-project
|
project: my-gcp-project
|
||||||
|
|||||||
@@ -30,10 +30,6 @@ following config for example:
|
|||||||
- name: userNames
|
- name: userNames
|
||||||
type: array
|
type: array
|
||||||
description: The user names to be set.
|
description: The user names to be set.
|
||||||
items:
|
|
||||||
name: userName # the item name doesn't matter but it has to exist
|
|
||||||
type: string
|
|
||||||
description: username
|
|
||||||
```
|
```
|
||||||
|
|
||||||
If the input is an array of strings `["Alice", "Sid", "Bob"]`, The final command
|
If the input is an array of strings `["Alice", "Sid", "Bob"]`, The final command
|
||||||
|
|||||||
@@ -771,7 +771,7 @@
|
|||||||
},
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"version = \"0.26.0\" # x-release-please-version\n",
|
"version = \"0.25.0\" # x-release-please-version\n",
|
||||||
"! curl -L -o /content/toolbox https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
"! curl -L -o /content/toolbox https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# Make the binary executable\n",
|
"# Make the binary executable\n",
|
||||||
|
|||||||
@@ -123,7 +123,7 @@ In this section, we will download and install the Toolbox binary.
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
```bash
|
```bash
|
||||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||||
export VERSION="0.26.0"
|
export VERSION="0.25.0"
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -220,7 +220,7 @@
|
|||||||
},
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"version = \"0.26.0\" # x-release-please-version\n",
|
"version = \"0.25.0\" # x-release-please-version\n",
|
||||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# Make the binary executable\n",
|
"# Make the binary executable\n",
|
||||||
|
|||||||
@@ -179,7 +179,7 @@ to use BigQuery, and then run the Toolbox server.
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
```bash
|
```bash
|
||||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|
||||||
@@ -335,7 +335,7 @@ pip install toolbox-llamaindex
|
|||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< tab header="ADK" lang="bash" >}}
|
{{< tab header="ADK" lang="bash" >}}
|
||||||
|
|
||||||
pip install google-adk[toolbox]
|
pip install google-adk
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
@@ -375,7 +375,7 @@ pip install llama-index-llms-google-genai
|
|||||||
|
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< tab header="ADK" lang="bash" >}}
|
{{< tab header="ADK" lang="bash" >}}
|
||||||
# No other dependencies required for ADK
|
pip install toolbox-core
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
|
|
||||||
@@ -617,8 +617,8 @@ from google.adk.agents import Agent
|
|||||||
from google.adk.runners import Runner
|
from google.adk.runners import Runner
|
||||||
from google.adk.sessions import InMemorySessionService
|
from google.adk.sessions import InMemorySessionService
|
||||||
from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService
|
from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService
|
||||||
from google.adk.tools.toolbox_toolset import ToolboxToolset
|
|
||||||
from google.genai import types # For constructing message content
|
from google.genai import types # For constructing message content
|
||||||
|
from toolbox_core import ToolboxSyncClient
|
||||||
|
|
||||||
import os
|
import os
|
||||||
os.environ['GOOGLE_GENAI_USE_VERTEXAI'] = 'True'
|
os.environ['GOOGLE_GENAI_USE_VERTEXAI'] = 'True'
|
||||||
@@ -633,47 +633,48 @@ os.environ['GOOGLE_CLOUD_LOCATION'] = 'us-central1'
|
|||||||
|
|
||||||
# --- Load Tools from Toolbox ---
|
# --- Load Tools from Toolbox ---
|
||||||
|
|
||||||
# TODO(developer): Ensure the Toolbox server is running at http://127.0.0.1:5000
|
# TODO(developer): Ensure the Toolbox server is running at <http://127.0.0.1:5000>
|
||||||
toolset = ToolboxToolset(server_url="http://127.0.0.1:5000")
|
|
||||||
|
|
||||||
# --- Define the Agent's Prompt ---
|
with ToolboxSyncClient("<http://127.0.0.1:5000>") as toolbox_client:
|
||||||
prompt = """
|
# TODO(developer): Replace "my-toolset" with the actual ID of your toolset as configured in your MCP Toolbox server.
|
||||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
agent_toolset = toolbox_client.load_toolset("my-toolset")
|
||||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
|
||||||
location and price tier. Always mention hotel ids while performing any
|
|
||||||
searches. This is very important for any operations. For any bookings or
|
|
||||||
cancellations, please provide the appropriate confirmation. Be sure to
|
|
||||||
update checkin or checkout dates if mentioned by the user.
|
|
||||||
Don't ask for confirmations from the user.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# --- Configure the Agent ---
|
# --- Define the Agent's Prompt ---
|
||||||
|
prompt = """
|
||||||
|
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||||
|
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||||
|
location and price tier. Always mention hotel ids while performing any
|
||||||
|
searches. This is very important for any operations. For any bookings or
|
||||||
|
cancellations, please provide the appropriate confirmation. Be sure to
|
||||||
|
update checkin or checkout dates if mentioned by the user.
|
||||||
|
Don't ask for confirmations from the user.
|
||||||
|
"""
|
||||||
|
|
||||||
root_agent = Agent(
|
# --- Configure the Agent ---
|
||||||
model='gemini-2.0-flash-001',
|
|
||||||
name='hotel_agent',
|
|
||||||
description='A helpful AI assistant that can search and book hotels.',
|
|
||||||
instruction=prompt,
|
|
||||||
tools=[toolset], # Pass the loaded toolset
|
|
||||||
)
|
|
||||||
|
|
||||||
# --- Initialize Services for Running the Agent ---
|
root_agent = Agent(
|
||||||
session_service = InMemorySessionService()
|
model='gemini-2.0-flash-001',
|
||||||
artifacts_service = InMemoryArtifactService()
|
name='hotel_agent',
|
||||||
|
description='A helpful AI assistant that can search and book hotels.',
|
||||||
|
instruction=prompt,
|
||||||
|
tools=agent_toolset, # Pass the loaded toolset
|
||||||
|
)
|
||||||
|
|
||||||
runner = Runner(
|
# --- Initialize Services for Running the Agent ---
|
||||||
app_name='hotel_agent',
|
session_service = InMemorySessionService()
|
||||||
agent=root_agent,
|
artifacts_service = InMemoryArtifactService()
|
||||||
artifact_service=artifacts_service,
|
|
||||||
session_service=session_service,
|
|
||||||
)
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
# Create a new session for the interaction.
|
# Create a new session for the interaction.
|
||||||
session = await session_service.create_session(
|
session = session_service.create_session(
|
||||||
state={}, app_name='hotel_agent', user_id='123'
|
state={}, app_name='hotel_agent', user_id='123'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
runner = Runner(
|
||||||
|
app_name='hotel_agent',
|
||||||
|
agent=root_agent,
|
||||||
|
artifact_service=artifacts_service,
|
||||||
|
session_service=session_service,
|
||||||
|
)
|
||||||
|
|
||||||
# --- Define Queries and Run the Agent ---
|
# --- Define Queries and Run the Agent ---
|
||||||
queries = [
|
queries = [
|
||||||
"Find hotels in Basel with Basel in it's name.",
|
"Find hotels in Basel with Basel in it's name.",
|
||||||
@@ -696,10 +697,6 @@ async def main():
|
|||||||
|
|
||||||
for text in responses:
|
for text in responses:
|
||||||
print(text)
|
print(text)
|
||||||
|
|
||||||
import asyncio
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
|
|
||||||
|
|||||||
@@ -98,7 +98,7 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
```bash
|
```bash
|
||||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
```bash
|
```bash
|
||||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|
||||||
|
|||||||
@@ -48,7 +48,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
```bash
|
```bash
|
||||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
```bash
|
```bash
|
||||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|
||||||
|
|||||||
@@ -1,47 +0,0 @@
|
|||||||
---
|
|
||||||
title: "Pre and Post processing"
|
|
||||||
type: docs
|
|
||||||
weight: 1
|
|
||||||
description: >
|
|
||||||
Pre and Post processing in GenAI applications.
|
|
||||||
---
|
|
||||||
|
|
||||||
Pre and post processing allow developers to intercept and modify interactions between the agent and its tools or the user.
|
|
||||||
|
|
||||||
> **Note**: These capabilities are typically features of **orchestration frameworks** (like LangChain, LangGraph, or Agent Builder) rather than the Toolbox SDK itself. However, Toolbox tools are designed to fully leverage these framework capabilities to support robust, secure, and compliant agent architectures.
|
|
||||||
|
|
||||||
## Types of Processing
|
|
||||||
|
|
||||||
### Pre-processing
|
|
||||||
|
|
||||||
Pre-processing occurs before a tool is executed or an agent processes a message. Key types include:
|
|
||||||
|
|
||||||
- **Input Sanitization & Redaction**: Detecting and masking sensitive information (like PII) in user queries or tool arguments to prevent it from being logged or sent to unauthorized systems.
|
|
||||||
- **Business Logic Validation**: Verifying that the proposed action complies with business rules (e.g., ensuring a requested hotel stay does not exceed 14 days, or checking if a user has sufficient permission).
|
|
||||||
- **Security Guardrails**: Analyzing inputs for potential prompt injection attacks or malicious payloads.
|
|
||||||
|
|
||||||
### Post-processing
|
|
||||||
|
|
||||||
Post-processing occurs after a tool has executed or the model has generated a response. Key types include:
|
|
||||||
|
|
||||||
- **Response Enrichment**: Injecting additional data into the tool output that wasn't part of the raw API response (e.g., calculating loyalty points earned based on the booking value).
|
|
||||||
- **Output Formatting**: Transforming raw data (like JSON or XML) into a more human-readable or model-friendly format to improve the agent's understanding.
|
|
||||||
- **Compliance Auditing**: Logging the final outcome of transactions, including the original request and the result, to a secure audit trail.
|
|
||||||
|
|
||||||
## Processing Scopes
|
|
||||||
|
|
||||||
While processing logic can be applied at various levels (Agent, Model, Tool), this guide primarily focuses on **Tool Level** processing, which is most relevant for granular control over tool execution.
|
|
||||||
|
|
||||||
### Tool Level (Primary Focus)
|
|
||||||
|
|
||||||
Wraps individual tool executions. This is best for logic specific to a single tool or a set of tools.
|
|
||||||
|
|
||||||
- **Scope**: Intercepts the raw inputs (arguments) to a tool and its outputs.
|
|
||||||
- **Use Cases**: Argument validation, output formatting, specific privacy rules for sensitive tools.
|
|
||||||
|
|
||||||
### Comparison with Other Levels
|
|
||||||
|
|
||||||
It is helpful to understand how tool-level processing differs from other scopes:
|
|
||||||
|
|
||||||
- **Model Level**: Intercepts individual calls to the LLM (prompts and responses). Unlike tool-level, this applies globally to all text sent/received, making it better for global PII redaction or token tracking.
|
|
||||||
- **Agent Level**: Wraps the high-level execution loop (e.g., a "turn" in the conversation). Unlike tool-level, this envelopes the entire turn (user input to final response), making it suitable for session management or end-to-end auditing.
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
Final Client Response:
|
|
||||||
AI:
|
|
||||||
Booking Confirmed!
|
|
||||||
Loyalty Points
|
|
||||||
POLICY CHECK: Intercepting 'book-hotel'
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
---
|
|
||||||
title: "(Python) Pre and post processing"
|
|
||||||
type: docs
|
|
||||||
weight: 4
|
|
||||||
description: >
|
|
||||||
How to add pre and post processing to your Python toolbox applications.
|
|
||||||
---
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
This tutorial assumes that you have set up a basic toolbox application as described in the [local quickstart](../../getting-started/local_quickstart).
|
|
||||||
|
|
||||||
This guide demonstrates how to implement these patterns in your Toolbox applications.
|
|
||||||
|
|
||||||
## Python
|
|
||||||
|
|
||||||
{{< tabpane persist=header >}}
|
|
||||||
{{% tab header="ADK" text=true %}}
|
|
||||||
Coming soon.
|
|
||||||
{{% /tab %}}
|
|
||||||
{{% tab header="Langchain" text=true %}}
|
|
||||||
The following example demonstrates how to use `ToolboxClient` with LangChain's middleware to implement pre and post processing for tool calls.
|
|
||||||
|
|
||||||
```py
|
|
||||||
{{< include "python/langchain/agent.py" >}}
|
|
||||||
```
|
|
||||||
|
|
||||||
For more information, see the [LangChain Middleware documentation](https://docs.langchain.com/oss/python/langchain/middleware/custom#wrap-style-hooks).
|
|
||||||
You can also add model-level (`wrap_model`) and agent-level (`before_agent`, `after_agent`) hooks to intercept messages at different stages of the execution loop. See the [LangChain Middleware documentation](https://docs.langchain.com/oss/python/langchain/middleware/custom#wrap-style-hooks) for details on these additional hook types.
|
|
||||||
{{% /tab %}}
|
|
||||||
{{< /tabpane >}}
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
# This file makes the 'pre_post_processing/python' directory a Python package.
|
|
||||||
|
|
||||||
# You can include any package-level initialization logic here if needed.
|
|
||||||
# For now, this file is empty.
|
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
# Copyright 2026 Google LLC
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import importlib
|
|
||||||
import os
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
ORCH_NAME = os.environ.get("ORCH_NAME")
|
|
||||||
module_path = f"python.{ORCH_NAME}.agent"
|
|
||||||
agent = importlib.import_module(module_path)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="module")
|
|
||||||
def golden_keywords():
|
|
||||||
"""Loads expected keywords from the golden.txt file."""
|
|
||||||
golden_file_path = Path(__file__).resolve().parent.parent / "golden.txt"
|
|
||||||
if not golden_file_path.exists():
|
|
||||||
pytest.fail(f"Golden file not found: {golden_file_path}")
|
|
||||||
try:
|
|
||||||
with open(golden_file_path, "r") as f:
|
|
||||||
return [line.strip() for line in f.readlines() if line.strip()]
|
|
||||||
except Exception as e:
|
|
||||||
pytest.fail(f"Could not read golden.txt: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
# --- Execution Tests ---
|
|
||||||
class TestExecution:
|
|
||||||
"""Test framework execution and output validation."""
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
|
||||||
def script_output(self, capsys):
|
|
||||||
"""Run the agent function and return its output."""
|
|
||||||
asyncio.run(agent.main())
|
|
||||||
return capsys.readouterr()
|
|
||||||
|
|
||||||
def test_script_runs_without_errors(self, script_output):
|
|
||||||
"""Test that the script runs and produces no stderr."""
|
|
||||||
assert script_output.err == "", f"Script produced stderr: {script_output.err}"
|
|
||||||
|
|
||||||
def test_keywords_in_output(self, script_output, golden_keywords):
|
|
||||||
"""Test that expected keywords are present in the script's output."""
|
|
||||||
output = script_output.out
|
|
||||||
missing_keywords = [kw for kw in golden_keywords if kw not in output]
|
|
||||||
assert not missing_keywords, f"Missing keywords in output: {missing_keywords}"
|
|
||||||
@@ -1,111 +0,0 @@
|
|||||||
# Copyright 2026 Google LLC
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from langchain.agents import create_agent
|
|
||||||
from langchain.agents.middleware import wrap_tool_call
|
|
||||||
from langchain_core.messages import ToolMessage
|
|
||||||
from langchain_google_vertexai import ChatVertexAI
|
|
||||||
from toolbox_langchain import ToolboxClient
|
|
||||||
|
|
||||||
system_prompt = """
|
|
||||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
|
||||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
|
||||||
location and price tier. Always mention hotel ids while performing any
|
|
||||||
searches. This is very important for any operations. For any bookings or
|
|
||||||
cancellations, please provide the appropriate confirmation. Be sure to
|
|
||||||
update checkin or checkout dates if mentioned by the user.
|
|
||||||
Don't ask for confirmations from the user.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
# Pre processing
|
|
||||||
@wrap_tool_call
|
|
||||||
async def enforce_business_rules(request, handler):
|
|
||||||
"""
|
|
||||||
Business Logic Validation:
|
|
||||||
Enforces max stay duration (e.g., max 14 days).
|
|
||||||
"""
|
|
||||||
tool_call = request.tool_call
|
|
||||||
name = tool_call["name"]
|
|
||||||
args = tool_call["args"]
|
|
||||||
|
|
||||||
print(f"POLICY CHECK: Intercepting '{name}'")
|
|
||||||
|
|
||||||
if name == "update-hotel":
|
|
||||||
if "checkin_date" in args and "checkout_date" in args:
|
|
||||||
try:
|
|
||||||
start = datetime.fromisoformat(args["checkin_date"])
|
|
||||||
end = datetime.fromisoformat(args["checkout_date"])
|
|
||||||
duration = (end - start).days
|
|
||||||
|
|
||||||
if duration > 14:
|
|
||||||
print("BLOCKED: Stay too long")
|
|
||||||
return ToolMessage(
|
|
||||||
content="Error: Maximum stay duration is 14 days.",
|
|
||||||
tool_call_id=tool_call["id"],
|
|
||||||
)
|
|
||||||
except ValueError:
|
|
||||||
pass # Ignore invalid date formats
|
|
||||||
|
|
||||||
return await handler(request)
|
|
||||||
|
|
||||||
|
|
||||||
# Post processing
|
|
||||||
@wrap_tool_call
|
|
||||||
async def enrich_response(request, handler):
|
|
||||||
"""
|
|
||||||
Post-Processing & Enrichment:
|
|
||||||
Adds loyalty points information to successful bookings.
|
|
||||||
Standardizes output format.
|
|
||||||
"""
|
|
||||||
result = await handler(request)
|
|
||||||
|
|
||||||
if isinstance(result, ToolMessage):
|
|
||||||
content = str(result.content)
|
|
||||||
tool_name = request.tool_call["name"]
|
|
||||||
|
|
||||||
if tool_name == "book-hotel" and "Error" not in content:
|
|
||||||
loyalty_bonus = 500
|
|
||||||
result.content = f"Booking Confirmed! \n You earned {loyalty_bonus} Loyalty Points with this stay.\n\nSystem Details: {content}"
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
|
||||||
tools = await client.aload_toolset("my-toolset")
|
|
||||||
model = ChatVertexAI(model="gemini-2.5-flash")
|
|
||||||
agent = create_agent(
|
|
||||||
system_prompt=system_prompt,
|
|
||||||
model=model,
|
|
||||||
tools=tools,
|
|
||||||
middleware=[enforce_business_rules, enrich_response],
|
|
||||||
)
|
|
||||||
|
|
||||||
user_input = "Book hotel with id 3."
|
|
||||||
response = await agent.ainvoke(
|
|
||||||
{"messages": [{"role": "user", "content": user_input}]}
|
|
||||||
)
|
|
||||||
|
|
||||||
print("-" * 50)
|
|
||||||
print("Final Client Response:")
|
|
||||||
last_ai_msg = response["messages"][-1].content
|
|
||||||
print(f"AI: {last_ai_msg}")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
langchain==1.2.6
|
|
||||||
toolbox-langchain==0.5.7
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "mcp-toolbox-for-databases",
|
"name": "mcp-toolbox-for-databases",
|
||||||
"version": "0.26.0",
|
"version": "0.25.0",
|
||||||
"description": "MCP Toolbox for Databases is an open-source MCP server for more than 30 different datasources.",
|
"description": "MCP Toolbox for Databases is an open-source MCP server for more than 30 different datasources.",
|
||||||
"contextFileName": "MCP-TOOLBOX-EXTENSION.md"
|
"contextFileName": "MCP-TOOLBOX-EXTENSION.md"
|
||||||
}
|
}
|
||||||
46
go.mod
46
go.mod
@@ -13,7 +13,6 @@ require (
|
|||||||
cloud.google.com/go/dataproc/v2 v2.15.0
|
cloud.google.com/go/dataproc/v2 v2.15.0
|
||||||
cloud.google.com/go/firestore v1.20.0
|
cloud.google.com/go/firestore v1.20.0
|
||||||
cloud.google.com/go/geminidataanalytics v0.3.0
|
cloud.google.com/go/geminidataanalytics v0.3.0
|
||||||
cloud.google.com/go/logging v1.13.1
|
|
||||||
cloud.google.com/go/longrunning v0.7.0
|
cloud.google.com/go/longrunning v0.7.0
|
||||||
cloud.google.com/go/spanner v1.86.1
|
cloud.google.com/go/spanner v1.86.1
|
||||||
github.com/ClickHouse/clickhouse-go/v2 v2.40.3
|
github.com/ClickHouse/clickhouse-go/v2 v2.40.3
|
||||||
@@ -39,7 +38,7 @@ require (
|
|||||||
github.com/google/uuid v1.6.0
|
github.com/google/uuid v1.6.0
|
||||||
github.com/jackc/pgx/v5 v5.7.6
|
github.com/jackc/pgx/v5 v5.7.6
|
||||||
github.com/jmoiron/sqlx v1.4.0
|
github.com/jmoiron/sqlx v1.4.0
|
||||||
github.com/looker-open-source/sdk-codegen/go v0.25.22
|
github.com/looker-open-source/sdk-codegen/go v0.25.21
|
||||||
github.com/microsoft/go-mssqldb v1.9.3
|
github.com/microsoft/go-mssqldb v1.9.3
|
||||||
github.com/nakagami/firebirdsql v0.9.15
|
github.com/nakagami/firebirdsql v0.9.15
|
||||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4
|
github.com/neo4j/neo4j-go-driver/v5 v5.28.4
|
||||||
@@ -47,12 +46,11 @@ require (
|
|||||||
github.com/sijms/go-ora/v2 v2.9.0
|
github.com/sijms/go-ora/v2 v2.9.0
|
||||||
github.com/snowflakedb/gosnowflake v1.18.1
|
github.com/snowflakedb/gosnowflake v1.18.1
|
||||||
github.com/spf13/cobra v1.10.1
|
github.com/spf13/cobra v1.10.1
|
||||||
github.com/testcontainers/testcontainers-go v0.40.0
|
|
||||||
github.com/thlib/go-timezone-local v0.0.7
|
github.com/thlib/go-timezone-local v0.0.7
|
||||||
github.com/trinodb/trino-go-client v0.330.0
|
github.com/trinodb/trino-go-client v0.330.0
|
||||||
github.com/valkey-io/valkey-go v1.0.68
|
github.com/valkey-io/valkey-go v1.0.68
|
||||||
github.com/yugabyte/pgx/v5 v5.5.3-yb-5
|
github.com/yugabyte/pgx/v5 v5.5.3-yb-5
|
||||||
go.mongodb.org/mongo-driver/v2 v2.4.2
|
go.mongodb.org/mongo-driver v1.17.4
|
||||||
go.opentelemetry.io/contrib/propagators/autoprop v0.62.0
|
go.opentelemetry.io/contrib/propagators/autoprop v0.62.0
|
||||||
go.opentelemetry.io/otel v1.38.0
|
go.opentelemetry.io/otel v1.38.0
|
||||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.38.0
|
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.38.0
|
||||||
@@ -65,7 +63,6 @@ require (
|
|||||||
google.golang.org/api v0.256.0
|
google.golang.org/api v0.256.0
|
||||||
google.golang.org/genai v1.37.0
|
google.golang.org/genai v1.37.0
|
||||||
google.golang.org/genproto v0.0.0-20251022142026-3a174f9686a8
|
google.golang.org/genproto v0.0.0-20251022142026-3a174f9686a8
|
||||||
google.golang.org/grpc v1.76.0
|
|
||||||
google.golang.org/protobuf v1.36.10
|
google.golang.org/protobuf v1.36.10
|
||||||
modernc.org/sqlite v1.40.0
|
modernc.org/sqlite v1.40.0
|
||||||
)
|
)
|
||||||
@@ -92,19 +89,16 @@ require (
|
|||||||
cloud.google.com/go/iam v1.5.3 // indirect
|
cloud.google.com/go/iam v1.5.3 // indirect
|
||||||
cloud.google.com/go/monitoring v1.24.3 // indirect
|
cloud.google.com/go/monitoring v1.24.3 // indirect
|
||||||
cloud.google.com/go/trace v1.11.7 // indirect
|
cloud.google.com/go/trace v1.11.7 // indirect
|
||||||
dario.cat/mergo v1.0.2 // indirect
|
|
||||||
filippo.io/edwards25519 v1.1.0 // indirect
|
filippo.io/edwards25519 v1.1.0 // indirect
|
||||||
github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect
|
github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect
|
||||||
github.com/99designs/keyring v1.2.2 // indirect
|
github.com/99designs/keyring v1.2.2 // indirect
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 // indirect
|
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 // indirect
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
|
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 // indirect
|
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 // indirect
|
||||||
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c // indirect
|
|
||||||
github.com/BurntSushi/toml v1.4.0 // indirect
|
github.com/BurntSushi/toml v1.4.0 // indirect
|
||||||
github.com/GoogleCloudPlatform/grpc-gcp-go/grpcgcp v1.5.3 // indirect
|
github.com/GoogleCloudPlatform/grpc-gcp-go/grpcgcp v1.5.3 // indirect
|
||||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0 // indirect
|
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0 // indirect
|
||||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.54.0 // indirect
|
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.54.0 // indirect
|
||||||
github.com/Microsoft/go-winio v0.6.2 // indirect
|
|
||||||
github.com/PuerkitoBio/goquery v1.10.3 // indirect
|
github.com/PuerkitoBio/goquery v1.10.3 // indirect
|
||||||
github.com/VictoriaMetrics/easyproto v0.1.4 // indirect
|
github.com/VictoriaMetrics/easyproto v0.1.4 // indirect
|
||||||
github.com/ajg/form v1.5.1 // indirect
|
github.com/ajg/form v1.5.1 // indirect
|
||||||
@@ -130,29 +124,17 @@ require (
|
|||||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.34.4 // indirect
|
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.34.4 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/service/sts v1.38.4 // indirect
|
github.com/aws/aws-sdk-go-v2/service/sts v1.38.4 // indirect
|
||||||
github.com/aws/smithy-go v1.23.0 // indirect
|
github.com/aws/smithy-go v1.23.0 // indirect
|
||||||
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
|
|
||||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||||
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 // indirect
|
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 // indirect
|
||||||
github.com/containerd/errdefs v1.0.0 // indirect
|
|
||||||
github.com/containerd/errdefs/pkg v0.3.0 // indirect
|
|
||||||
github.com/containerd/log v0.1.0 // indirect
|
|
||||||
github.com/containerd/platforms v0.2.1 // indirect
|
|
||||||
github.com/couchbase/gocbcore/v10 v10.8.1 // indirect
|
github.com/couchbase/gocbcore/v10 v10.8.1 // indirect
|
||||||
github.com/couchbase/gocbcoreps v0.1.4 // indirect
|
github.com/couchbase/gocbcoreps v0.1.4 // indirect
|
||||||
github.com/couchbase/goprotostellar v1.0.2 // indirect
|
github.com/couchbase/goprotostellar v1.0.2 // indirect
|
||||||
github.com/couchbase/tools-common/errors v1.0.0 // indirect
|
github.com/couchbase/tools-common/errors v1.0.0 // indirect
|
||||||
github.com/couchbaselabs/gocbconnstr/v2 v2.0.0 // indirect
|
github.com/couchbaselabs/gocbconnstr/v2 v2.0.0 // indirect
|
||||||
github.com/cpuguy83/dockercfg v0.3.2 // indirect
|
|
||||||
github.com/danieljoos/wincred v1.2.2 // indirect
|
github.com/danieljoos/wincred v1.2.2 // indirect
|
||||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
|
||||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||||
github.com/distribution/reference v0.6.0 // indirect
|
|
||||||
github.com/docker/docker v28.5.1+incompatible // indirect
|
|
||||||
github.com/docker/go-connections v0.6.0 // indirect
|
|
||||||
github.com/docker/go-units v0.5.0 // indirect
|
|
||||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||||
github.com/dvsekhvalnov/jose2go v1.7.0 // indirect
|
github.com/dvsekhvalnov/jose2go v1.7.0 // indirect
|
||||||
github.com/ebitengine/purego v0.8.4 // indirect
|
|
||||||
github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect
|
github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect
|
||||||
github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect
|
github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect
|
||||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||||
@@ -161,7 +143,6 @@ require (
|
|||||||
github.com/go-logfmt/logfmt v0.6.0 // indirect
|
github.com/go-logfmt/logfmt v0.6.0 // indirect
|
||||||
github.com/go-logr/logr v1.4.3 // indirect
|
github.com/go-logr/logr v1.4.3 // indirect
|
||||||
github.com/go-logr/stdr v1.2.2 // indirect
|
github.com/go-logr/stdr v1.2.2 // indirect
|
||||||
github.com/go-ole/go-ole v1.2.6 // indirect
|
|
||||||
github.com/go-playground/locales v0.14.1 // indirect
|
github.com/go-playground/locales v0.14.1 // indirect
|
||||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||||
github.com/goccy/go-json v0.10.5 // indirect
|
github.com/goccy/go-json v0.10.5 // indirect
|
||||||
@@ -197,46 +178,27 @@ require (
|
|||||||
github.com/klauspost/compress v1.18.0 // indirect
|
github.com/klauspost/compress v1.18.0 // indirect
|
||||||
github.com/klauspost/cpuid/v2 v2.2.11 // indirect
|
github.com/klauspost/cpuid/v2 v2.2.11 // indirect
|
||||||
github.com/leodido/go-urn v1.4.0 // indirect
|
github.com/leodido/go-urn v1.4.0 // indirect
|
||||||
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
|
|
||||||
github.com/magiconair/properties v1.8.10 // indirect
|
|
||||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 // indirect
|
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 // indirect
|
||||||
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 // indirect
|
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 // indirect
|
||||||
github.com/moby/docker-image-spec v1.3.1 // indirect
|
|
||||||
github.com/moby/go-archive v0.1.0 // indirect
|
|
||||||
github.com/moby/patternmatcher v0.6.0 // indirect
|
|
||||||
github.com/moby/sys/sequential v0.6.0 // indirect
|
|
||||||
github.com/moby/sys/user v0.4.0 // indirect
|
|
||||||
github.com/moby/sys/userns v0.1.0 // indirect
|
|
||||||
github.com/moby/term v0.5.2 // indirect
|
|
||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||||
github.com/morikuni/aec v1.0.0 // indirect
|
github.com/montanaflynn/stats v0.7.1 // indirect
|
||||||
github.com/mtibben/percent v0.2.1 // indirect
|
github.com/mtibben/percent v0.2.1 // indirect
|
||||||
github.com/nakagami/chacha20 v0.1.0 // indirect
|
github.com/nakagami/chacha20 v0.1.0 // indirect
|
||||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||||
github.com/opencontainers/go-digest v1.0.0 // indirect
|
|
||||||
github.com/opencontainers/image-spec v1.1.1 // indirect
|
|
||||||
github.com/pierrec/lz4 v2.6.1+incompatible // indirect
|
github.com/pierrec/lz4 v2.6.1+incompatible // indirect
|
||||||
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
||||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
|
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
|
||||||
github.com/pkg/errors v0.9.1 // indirect
|
|
||||||
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect
|
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect
|
||||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
|
||||||
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect
|
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||||
github.com/shirou/gopsutil/v4 v4.25.6 // indirect
|
|
||||||
github.com/sirupsen/logrus v1.9.3 // indirect
|
github.com/sirupsen/logrus v1.9.3 // indirect
|
||||||
github.com/spf13/pflag v1.0.9 // indirect
|
github.com/spf13/pflag v1.0.9 // indirect
|
||||||
github.com/spiffe/go-spiffe/v2 v2.5.0 // indirect
|
github.com/spiffe/go-spiffe/v2 v2.5.0 // indirect
|
||||||
github.com/stretchr/testify v1.11.1 // indirect
|
|
||||||
github.com/tklauser/go-sysconf v0.3.12 // indirect
|
|
||||||
github.com/tklauser/numcpus v0.6.1 // indirect
|
|
||||||
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
||||||
github.com/xdg-go/scram v1.1.2 // indirect
|
github.com/xdg-go/scram v1.1.2 // indirect
|
||||||
github.com/xdg-go/stringprep v1.0.4 // indirect
|
github.com/xdg-go/stringprep v1.0.4 // indirect
|
||||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
|
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
|
||||||
github.com/yusufpapurcu/wmi v1.2.4 // indirect
|
|
||||||
github.com/zeebo/errs v1.4.0 // indirect
|
github.com/zeebo/errs v1.4.0 // indirect
|
||||||
github.com/zeebo/xxh3 v1.0.2 // indirect
|
github.com/zeebo/xxh3 v1.0.2 // indirect
|
||||||
gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b // indirect
|
gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b // indirect
|
||||||
@@ -267,9 +229,9 @@ require (
|
|||||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect
|
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect
|
||||||
google.golang.org/genproto/googleapis/api v0.0.0-20251111163417-95abcf5c77ba // indirect
|
google.golang.org/genproto/googleapis/api v0.0.0-20251111163417-95abcf5c77ba // indirect
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251103181224-f26f9409b101 // indirect
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20251103181224-f26f9409b101 // indirect
|
||||||
|
google.golang.org/grpc v1.76.0 // indirect
|
||||||
gopkg.in/inf.v0 v0.9.1 // indirect
|
gopkg.in/inf.v0 v0.9.1 // indirect
|
||||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
|
||||||
modernc.org/libc v1.66.10 // indirect
|
modernc.org/libc v1.66.10 // indirect
|
||||||
modernc.org/mathutil v1.7.1 // indirect
|
modernc.org/mathutil v1.7.1 // indirect
|
||||||
modernc.org/memory v1.11.0 // indirect
|
modernc.org/memory v1.11.0 // indirect
|
||||||
|
|||||||
71
go.sum
71
go.sum
@@ -370,8 +370,8 @@ cloud.google.com/go/lifesciences v0.6.0/go.mod h1:ddj6tSX/7BOnhxCSd3ZcETvtNr8NZ6
|
|||||||
cloud.google.com/go/lifesciences v0.8.0/go.mod h1:lFxiEOMqII6XggGbOnKiyZ7IBwoIqA84ClvoezaA/bo=
|
cloud.google.com/go/lifesciences v0.8.0/go.mod h1:lFxiEOMqII6XggGbOnKiyZ7IBwoIqA84ClvoezaA/bo=
|
||||||
cloud.google.com/go/logging v1.6.1/go.mod h1:5ZO0mHHbvm8gEmeEUHrmDlTDSu5imF6MUP9OfilNXBw=
|
cloud.google.com/go/logging v1.6.1/go.mod h1:5ZO0mHHbvm8gEmeEUHrmDlTDSu5imF6MUP9OfilNXBw=
|
||||||
cloud.google.com/go/logging v1.7.0/go.mod h1:3xjP2CjkM3ZkO73aj4ASA5wRPGGCRrPIAeNqVNkzY8M=
|
cloud.google.com/go/logging v1.7.0/go.mod h1:3xjP2CjkM3ZkO73aj4ASA5wRPGGCRrPIAeNqVNkzY8M=
|
||||||
cloud.google.com/go/logging v1.13.1 h1:O7LvmO0kGLaHY/gq8cV7T0dyp6zJhYAOtZPX4TF3QtY=
|
cloud.google.com/go/logging v1.13.0 h1:7j0HgAp0B94o1YRDqiqm26w4q1rDMH7XNRU34lJXHYc=
|
||||||
cloud.google.com/go/logging v1.13.1/go.mod h1:XAQkfkMBxQRjQek96WLPNze7vsOmay9H5PqfsNYDqvw=
|
cloud.google.com/go/logging v1.13.0/go.mod h1:36CoKh6KA/M0PbhPKMq6/qety2DCAErbhXT62TuXALA=
|
||||||
cloud.google.com/go/longrunning v0.1.1/go.mod h1:UUFxuDWkv22EuY93jjmDMFT5GPQKeFVJBIF6QlTqdsE=
|
cloud.google.com/go/longrunning v0.1.1/go.mod h1:UUFxuDWkv22EuY93jjmDMFT5GPQKeFVJBIF6QlTqdsE=
|
||||||
cloud.google.com/go/longrunning v0.3.0/go.mod h1:qth9Y41RRSUE69rDcOn6DdK3HfQfsUI0YSmW3iIlLJc=
|
cloud.google.com/go/longrunning v0.3.0/go.mod h1:qth9Y41RRSUE69rDcOn6DdK3HfQfsUI0YSmW3iIlLJc=
|
||||||
cloud.google.com/go/longrunning v0.4.1/go.mod h1:4iWDqhBZ70CvZ6BfETbvam3T8FMvLK+eFj0E6AaRQTo=
|
cloud.google.com/go/longrunning v0.4.1/go.mod h1:4iWDqhBZ70CvZ6BfETbvam3T8FMvLK+eFj0E6AaRQTo=
|
||||||
@@ -647,8 +647,6 @@ github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMb
|
|||||||
github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4=
|
github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4=
|
||||||
github.com/99designs/keyring v1.2.2 h1:pZd3neh/EmUzWONb35LxQfvuY7kiSXAq3HQd97+XBn0=
|
github.com/99designs/keyring v1.2.2 h1:pZd3neh/EmUzWONb35LxQfvuY7kiSXAq3HQd97+XBn0=
|
||||||
github.com/99designs/keyring v1.2.2/go.mod h1:wes/FrByc8j7lFOAGLGSNEg8f/PaI3cgTBqhFkHUrPk=
|
github.com/99designs/keyring v1.2.2/go.mod h1:wes/FrByc8j7lFOAGLGSNEg8f/PaI3cgTBqhFkHUrPk=
|
||||||
github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6 h1:He8afgbRMd7mFxO99hRNu+6tazq8nFF9lIwo9JFroBk=
|
|
||||||
github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8=
|
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U=
|
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=
|
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1 h1:B+blDbyVIG3WaikNxPnhPiJ1MThR03b3vKGtER95TP4=
|
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1 h1:B+blDbyVIG3WaikNxPnhPiJ1MThR03b3vKGtER95TP4=
|
||||||
@@ -802,14 +800,6 @@ github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv
|
|||||||
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8=
|
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8=
|
||||||
github.com/containerd/continuity v0.4.5 h1:ZRoN1sXq9u7V6QoHMcVWGhOwDFqZ4B9i5H6un1Wh0x4=
|
github.com/containerd/continuity v0.4.5 h1:ZRoN1sXq9u7V6QoHMcVWGhOwDFqZ4B9i5H6un1Wh0x4=
|
||||||
github.com/containerd/continuity v0.4.5/go.mod h1:/lNJvtJKUQStBzpVQ1+rasXO1LAWtUQssk28EZvJ3nE=
|
github.com/containerd/continuity v0.4.5/go.mod h1:/lNJvtJKUQStBzpVQ1+rasXO1LAWtUQssk28EZvJ3nE=
|
||||||
github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI=
|
|
||||||
github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M=
|
|
||||||
github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE=
|
|
||||||
github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk=
|
|
||||||
github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I=
|
|
||||||
github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo=
|
|
||||||
github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A=
|
|
||||||
github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw=
|
|
||||||
github.com/couchbase/gocb/v2 v2.11.1 h1:xWDco7Qk/XSvGUjbUWRaXi0V35nsMijJnm4vHXN/rqY=
|
github.com/couchbase/gocb/v2 v2.11.1 h1:xWDco7Qk/XSvGUjbUWRaXi0V35nsMijJnm4vHXN/rqY=
|
||||||
github.com/couchbase/gocb/v2 v2.11.1/go.mod h1:aSh1Cmd1sPRpYyiBD5iWPehPWaTVF/oYhrtOAITWb/4=
|
github.com/couchbase/gocb/v2 v2.11.1/go.mod h1:aSh1Cmd1sPRpYyiBD5iWPehPWaTVF/oYhrtOAITWb/4=
|
||||||
github.com/couchbase/gocbcore/v10 v10.8.1 h1:i4SnH0DH9APGC4GS2vS2m+3u08V7oJwviamOXdgAZOQ=
|
github.com/couchbase/gocbcore/v10 v10.8.1 h1:i4SnH0DH9APGC4GS2vS2m+3u08V7oJwviamOXdgAZOQ=
|
||||||
@@ -826,12 +816,8 @@ github.com/couchbaselabs/gocaves/client v0.0.0-20250107114554-f96479220ae8 h1:MQ
|
|||||||
github.com/couchbaselabs/gocaves/client v0.0.0-20250107114554-f96479220ae8/go.mod h1:AVekAZwIY2stsJOMWLAS/0uA/+qdp7pjO8EHnl61QkY=
|
github.com/couchbaselabs/gocaves/client v0.0.0-20250107114554-f96479220ae8/go.mod h1:AVekAZwIY2stsJOMWLAS/0uA/+qdp7pjO8EHnl61QkY=
|
||||||
github.com/couchbaselabs/gocbconnstr/v2 v2.0.0 h1:HU9DlAYYWR69jQnLN6cpg0fh0hxW/8d5hnglCXXjW78=
|
github.com/couchbaselabs/gocbconnstr/v2 v2.0.0 h1:HU9DlAYYWR69jQnLN6cpg0fh0hxW/8d5hnglCXXjW78=
|
||||||
github.com/couchbaselabs/gocbconnstr/v2 v2.0.0/go.mod h1:o7T431UOfFVHDNvMBUmUxpHnhivwv7BziUao/nMl81E=
|
github.com/couchbaselabs/gocbconnstr/v2 v2.0.0/go.mod h1:o7T431UOfFVHDNvMBUmUxpHnhivwv7BziUao/nMl81E=
|
||||||
github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA=
|
|
||||||
github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc=
|
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||||
github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY=
|
|
||||||
github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
|
|
||||||
github.com/danieljoos/wincred v1.2.2 h1:774zMFJrqaeYCK2W57BgAem/MLi6mtSE47MB6BOJ0i0=
|
github.com/danieljoos/wincred v1.2.2 h1:774zMFJrqaeYCK2W57BgAem/MLi6mtSE47MB6BOJ0i0=
|
||||||
github.com/danieljoos/wincred v1.2.2/go.mod h1:w7w4Utbrz8lqeMbDAK0lkNJUv5sAOkFi7nd/ogr0Uh8=
|
github.com/danieljoos/wincred v1.2.2/go.mod h1:w7w4Utbrz8lqeMbDAK0lkNJUv5sAOkFi7nd/ogr0Uh8=
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
@@ -840,12 +826,10 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1
|
|||||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
||||||
github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
|
|
||||||
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
|
|
||||||
github.com/docker/cli v28.4.0+incompatible h1:RBcf3Kjw2pMtwui5V0DIMdyeab8glEw5QY0UUU4C9kY=
|
github.com/docker/cli v28.4.0+incompatible h1:RBcf3Kjw2pMtwui5V0DIMdyeab8glEw5QY0UUU4C9kY=
|
||||||
github.com/docker/cli v28.4.0+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
|
github.com/docker/cli v28.4.0+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
|
||||||
github.com/docker/docker v28.5.1+incompatible h1:Bm8DchhSD2J6PsFzxC35TZo4TLGR2PdW/E69rU45NhM=
|
github.com/docker/docker v28.4.0+incompatible h1:KVC7bz5zJY/4AZe/78BIvCnPsLaC9T/zh72xnlrTTOk=
|
||||||
github.com/docker/docker v28.5.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
github.com/docker/docker v28.4.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||||
github.com/docker/go-connections v0.6.0 h1:LlMG9azAe1TqfR7sO+NJttz1gy6KO7VJBh+pMmjSD94=
|
github.com/docker/go-connections v0.6.0 h1:LlMG9azAe1TqfR7sO+NJttz1gy6KO7VJBh+pMmjSD94=
|
||||||
github.com/docker/go-connections v0.6.0/go.mod h1:AahvXYshr6JgfUJGdDCs2b5EZG/vmaMAntpSFH5BFKE=
|
github.com/docker/go-connections v0.6.0/go.mod h1:AahvXYshr6JgfUJGdDCs2b5EZG/vmaMAntpSFH5BFKE=
|
||||||
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
|
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
|
||||||
@@ -856,8 +840,6 @@ github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkp
|
|||||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||||
github.com/dvsekhvalnov/jose2go v1.7.0 h1:bnQc8+GMnidJZA8zc6lLEAb4xNrIqHwO+9TzqvtQZPo=
|
github.com/dvsekhvalnov/jose2go v1.7.0 h1:bnQc8+GMnidJZA8zc6lLEAb4xNrIqHwO+9TzqvtQZPo=
|
||||||
github.com/dvsekhvalnov/jose2go v1.7.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU=
|
github.com/dvsekhvalnov/jose2go v1.7.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU=
|
||||||
github.com/ebitengine/purego v0.8.4 h1:CF7LEKg5FFOsASUj0+QwaXf8Ht6TlFxg09+S9wz0omw=
|
|
||||||
github.com/ebitengine/purego v0.8.4/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ=
|
|
||||||
github.com/elastic/elastic-transport-go/v8 v8.8.0 h1:7k1Ua+qluFr6p1jfJjGDl97ssJS/P7cHNInzfxgBQAo=
|
github.com/elastic/elastic-transport-go/v8 v8.8.0 h1:7k1Ua+qluFr6p1jfJjGDl97ssJS/P7cHNInzfxgBQAo=
|
||||||
github.com/elastic/elastic-transport-go/v8 v8.8.0/go.mod h1:YLHer5cj0csTzNFXoNQ8qhtGY1GTvSqPnKWKaqQE3Hk=
|
github.com/elastic/elastic-transport-go/v8 v8.8.0/go.mod h1:YLHer5cj0csTzNFXoNQ8qhtGY1GTvSqPnKWKaqQE3Hk=
|
||||||
github.com/elastic/go-elasticsearch/v9 v9.2.0 h1:COeL/g20+ixnUbffe4Wfbu88emrHjAq/LhVfmrjqRQs=
|
github.com/elastic/go-elasticsearch/v9 v9.2.0 h1:COeL/g20+ixnUbffe4Wfbu88emrHjAq/LhVfmrjqRQs=
|
||||||
@@ -931,8 +913,6 @@ github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
|
|||||||
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||||
github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY=
|
|
||||||
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
|
|
||||||
github.com/go-pdf/fpdf v0.5.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M=
|
github.com/go-pdf/fpdf v0.5.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M=
|
||||||
github.com/go-pdf/fpdf v0.6.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M=
|
github.com/go-pdf/fpdf v0.6.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M=
|
||||||
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
||||||
@@ -1192,15 +1172,11 @@ github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
|
|||||||
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
|
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
|
||||||
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||||
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||||
github.com/looker-open-source/sdk-codegen/go v0.25.22 h1:DGYt1v2R2uE/m71sWAvgxsJnDLM9B7C40N5/CTDlE2A=
|
github.com/looker-open-source/sdk-codegen/go v0.25.21 h1:nlZ1nz22SKluBNkzplrMHBPEVgJO3zVLF6aAws1rrRA=
|
||||||
github.com/looker-open-source/sdk-codegen/go v0.25.22/go.mod h1:Br1ntSiruDJ/4nYNjpYyWyCbqJ7+GQceWbIgn0hYims=
|
github.com/looker-open-source/sdk-codegen/go v0.25.21/go.mod h1:Br1ntSiruDJ/4nYNjpYyWyCbqJ7+GQceWbIgn0hYims=
|
||||||
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4=
|
|
||||||
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I=
|
|
||||||
github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
|
github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
|
||||||
github.com/lyft/protoc-gen-star v0.6.1/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
|
github.com/lyft/protoc-gen-star v0.6.1/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
|
||||||
github.com/lyft/protoc-gen-star/v2 v2.0.1/go.mod h1:RcCdONR2ScXaYnQC5tUzxzlpA3WVYF7/opLeUgcQs/o=
|
github.com/lyft/protoc-gen-star/v2 v2.0.1/go.mod h1:RcCdONR2ScXaYnQC5tUzxzlpA3WVYF7/opLeUgcQs/o=
|
||||||
github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE=
|
|
||||||
github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
|
|
||||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||||
@@ -1218,18 +1194,8 @@ github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8D
|
|||||||
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE=
|
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE=
|
||||||
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
|
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
|
||||||
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
|
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
|
||||||
github.com/moby/go-archive v0.1.0 h1:Kk/5rdW/g+H8NHdJW2gsXyZ7UnzvJNOy6VKJqueWdcQ=
|
|
||||||
github.com/moby/go-archive v0.1.0/go.mod h1:G9B+YoujNohJmrIYFBpSd54GTUB4lt9S+xVQvsJyFuo=
|
|
||||||
github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk=
|
|
||||||
github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc=
|
|
||||||
github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw=
|
|
||||||
github.com/moby/sys/atomicwriter v0.1.0/go.mod h1:Ul8oqv2ZMNHOceF643P6FKPXeCmYtlQMvpizfsSoaWs=
|
|
||||||
github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU=
|
|
||||||
github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko=
|
|
||||||
github.com/moby/sys/user v0.4.0 h1:jhcMKit7SA80hivmFJcbB1vqmw//wU61Zdui2eQXuMs=
|
github.com/moby/sys/user v0.4.0 h1:jhcMKit7SA80hivmFJcbB1vqmw//wU61Zdui2eQXuMs=
|
||||||
github.com/moby/sys/user v0.4.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs=
|
github.com/moby/sys/user v0.4.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs=
|
||||||
github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g=
|
|
||||||
github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28=
|
|
||||||
github.com/moby/term v0.5.2 h1:6qk3FJAFDs6i/q3W/pQ97SX192qKfZgGjCQqfCJkgzQ=
|
github.com/moby/term v0.5.2 h1:6qk3FJAFDs6i/q3W/pQ97SX192qKfZgGjCQqfCJkgzQ=
|
||||||
github.com/moby/term v0.5.2/go.mod h1:d3djjFCrjnB+fl8NJux+EJzu0msscUP+f8it8hPkFLc=
|
github.com/moby/term v0.5.2/go.mod h1:d3djjFCrjnB+fl8NJux+EJzu0msscUP+f8it8hPkFLc=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
@@ -1238,8 +1204,8 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJ
|
|||||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
||||||
github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
|
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
|
||||||
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
|
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
|
||||||
github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs=
|
github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs=
|
||||||
github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns=
|
github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns=
|
||||||
github.com/nakagami/chacha20 v0.1.0 h1:2fbf5KeVUw7oRpAe6/A7DqvBJLYYu0ka5WstFbnkEVo=
|
github.com/nakagami/chacha20 v0.1.0 h1:2fbf5KeVUw7oRpAe6/A7DqvBJLYYu0ka5WstFbnkEVo=
|
||||||
@@ -1288,8 +1254,6 @@ github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1
|
|||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
||||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw=
|
|
||||||
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
|
|
||||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||||
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||||
github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w=
|
github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w=
|
||||||
@@ -1311,8 +1275,6 @@ github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfF
|
|||||||
github.com/ruudk/golang-pdf417 v0.0.0-20201230142125-a7e3863a1245/go.mod h1:pQAZKsJ8yyVxGRWYNEm9oFB8ieLgKFnamEyDmSA0BRk=
|
github.com/ruudk/golang-pdf417 v0.0.0-20201230142125-a7e3863a1245/go.mod h1:pQAZKsJ8yyVxGRWYNEm9oFB8ieLgKFnamEyDmSA0BRk=
|
||||||
github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=
|
github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=
|
||||||
github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
|
github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
|
||||||
github.com/shirou/gopsutil/v4 v4.25.6 h1:kLysI2JsKorfaFPcYmcJqbzROzsBWEOAtw6A7dIfqXs=
|
|
||||||
github.com/shirou/gopsutil/v4 v4.25.6/go.mod h1:PfybzyydfZcN+JMMjkF6Zb8Mq1A/VcogFFg7hj50W9c=
|
|
||||||
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
|
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
|
||||||
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
|
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
|
||||||
github.com/sijms/go-ora/v2 v2.9.0 h1:+iQbUeTeCOFMb5BsOMgUhV8KWyrv9yjKpcK4x7+MFrg=
|
github.com/sijms/go-ora/v2 v2.9.0 h1:+iQbUeTeCOFMb5BsOMgUhV8KWyrv9yjKpcK4x7+MFrg=
|
||||||
@@ -1350,15 +1312,9 @@ github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o
|
|||||||
github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||||
github.com/testcontainers/testcontainers-go v0.40.0 h1:pSdJYLOVgLE8YdUY2FHQ1Fxu+aMnb6JfVz1mxk7OeMU=
|
|
||||||
github.com/testcontainers/testcontainers-go v0.40.0/go.mod h1:FSXV5KQtX2HAMlm7U3APNyLkkap35zNLxukw9oBi/MY=
|
|
||||||
github.com/thlib/go-timezone-local v0.0.7 h1:fX8zd3aJydqLlTs/TrROrIIdztzsdFV23OzOQx31jII=
|
github.com/thlib/go-timezone-local v0.0.7 h1:fX8zd3aJydqLlTs/TrROrIIdztzsdFV23OzOQx31jII=
|
||||||
github.com/thlib/go-timezone-local v0.0.7/go.mod h1:/Tnicc6m/lsJE0irFMA0LfIwTBo4QP7A8IfyIv4zZKI=
|
github.com/thlib/go-timezone-local v0.0.7/go.mod h1:/Tnicc6m/lsJE0irFMA0LfIwTBo4QP7A8IfyIv4zZKI=
|
||||||
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
||||||
github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU=
|
|
||||||
github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI=
|
|
||||||
github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk=
|
|
||||||
github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY=
|
|
||||||
github.com/trinodb/trino-go-client v0.330.0 h1:TBbHjFBuRjYbGtkNyRAJfzLOcwvz8ECihtMtxSzXqOc=
|
github.com/trinodb/trino-go-client v0.330.0 h1:TBbHjFBuRjYbGtkNyRAJfzLOcwvz8ECihtMtxSzXqOc=
|
||||||
github.com/trinodb/trino-go-client v0.330.0/go.mod h1:BXj9QNy6pA4Gn8eIu9dVdRhetABCjFAOZ6xxsVsOZJE=
|
github.com/trinodb/trino-go-client v0.330.0/go.mod h1:BXj9QNy6pA4Gn8eIu9dVdRhetABCjFAOZ6xxsVsOZJE=
|
||||||
github.com/valkey-io/valkey-go v1.0.68 h1:bTbfonp49b41DqrF30q+y2JL3gcbjd2IiacFAtO4JBA=
|
github.com/valkey-io/valkey-go v1.0.68 h1:bTbfonp49b41DqrF30q+y2JL3gcbjd2IiacFAtO4JBA=
|
||||||
@@ -1391,8 +1347,6 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec
|
|||||||
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||||
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
|
|
||||||
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
|
|
||||||
github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ=
|
github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ=
|
||||||
github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0=
|
github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0=
|
||||||
github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM=
|
github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM=
|
||||||
@@ -1402,8 +1356,8 @@ github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaD
|
|||||||
gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b h1:7gd+rd8P3bqcn/96gOZa3F5dpJr/vEiDQYlNb/y2uNs=
|
gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b h1:7gd+rd8P3bqcn/96gOZa3F5dpJr/vEiDQYlNb/y2uNs=
|
||||||
gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b/go.mod h1:T3BPAOm2cqquPa0MKWeNkmOM5RQsRhkrwMWonFMN7fE=
|
gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b/go.mod h1:T3BPAOm2cqquPa0MKWeNkmOM5RQsRhkrwMWonFMN7fE=
|
||||||
go.mongodb.org/mongo-driver v1.11.4/go.mod h1:PTSz5yu21bkT/wXpkS7WR5f0ddqw5quethTUn9WM+2g=
|
go.mongodb.org/mongo-driver v1.11.4/go.mod h1:PTSz5yu21bkT/wXpkS7WR5f0ddqw5quethTUn9WM+2g=
|
||||||
go.mongodb.org/mongo-driver/v2 v2.4.2 h1:HrJ+Auygxceby9MLp3YITobef5a8Bv4HcPFIkml1U7U=
|
go.mongodb.org/mongo-driver v1.17.4 h1:jUorfmVzljjr0FLzYQsGP8cgN/qzzxlY9Vh0C9KFXVw=
|
||||||
go.mongodb.org/mongo-driver/v2 v2.4.2/go.mod h1:jHeEDJHJq7tm6ZF45Issun9dbogjfnPySb1vXA7EeAI=
|
go.mongodb.org/mongo-driver v1.17.4/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ=
|
||||||
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
|
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
|
||||||
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
|
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
|
||||||
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||||
@@ -1673,7 +1627,6 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7w
|
|||||||
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
@@ -1695,7 +1648,6 @@ golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7w
|
|||||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
@@ -1748,7 +1700,6 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|||||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
@@ -2178,8 +2129,6 @@ gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C
|
|||||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q=
|
|
||||||
gotest.tools/v3 v3.5.2/go.mod h1:LtdLGcnqToBH83WByAAi/wiwSFCArdFIUV/xxN4pcjA=
|
|
||||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
|
|||||||
@@ -1,161 +0,0 @@
|
|||||||
// Copyright 2026 Google LLC
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package invoke
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/log"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/server"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
|
||||||
|
|
||||||
// RootCommand defines the interface for required by invoke subcommand.
|
|
||||||
// This allows subcommands to access shared resources and functionality without
|
|
||||||
// direct coupling to the root command's implementation.
|
|
||||||
type RootCommand interface {
|
|
||||||
// Config returns a copy of the current server configuration.
|
|
||||||
Config() server.ServerConfig
|
|
||||||
|
|
||||||
// Out returns the writer used for standard output.
|
|
||||||
Out() io.Writer
|
|
||||||
|
|
||||||
// LoadConfig loads and merges the configuration from files, folders, and prebuilts.
|
|
||||||
LoadConfig(ctx context.Context) error
|
|
||||||
|
|
||||||
// Setup initializes the runtime environment, including logging and telemetry.
|
|
||||||
// It returns the updated context and a shutdown function to be called when finished.
|
|
||||||
Setup(ctx context.Context) (context.Context, func(context.Context) error, error)
|
|
||||||
|
|
||||||
// Logger returns the logger instance.
|
|
||||||
Logger() log.Logger
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewCommand(rootCmd RootCommand) *cobra.Command {
|
|
||||||
cmd := &cobra.Command{
|
|
||||||
Use: "invoke <tool-name> [params]",
|
|
||||||
Short: "Execute a tool directly",
|
|
||||||
Long: `Execute a tool directly with parameters.
|
|
||||||
Params must be a JSON string.
|
|
||||||
Example:
|
|
||||||
toolbox invoke my-tool '{"param1": "value1"}'`,
|
|
||||||
Args: cobra.MinimumNArgs(1),
|
|
||||||
RunE: func(c *cobra.Command, args []string) error {
|
|
||||||
return runInvoke(c, args, rootCmd)
|
|
||||||
},
|
|
||||||
}
|
|
||||||
return cmd
|
|
||||||
}
|
|
||||||
|
|
||||||
func runInvoke(cmd *cobra.Command, args []string, rootCmd RootCommand) error {
|
|
||||||
ctx, cancel := context.WithCancel(cmd.Context())
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
ctx, shutdown, err := rootCmd.Setup(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer func() {
|
|
||||||
_ = shutdown(ctx)
|
|
||||||
}()
|
|
||||||
|
|
||||||
// Load and merge tool configurations
|
|
||||||
if err := rootCmd.LoadConfig(ctx); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initialize Resources
|
|
||||||
sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, rootCmd.Config())
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("failed to initialize resources: %w", err)
|
|
||||||
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
resourceMgr := resources.NewResourceManager(sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap)
|
|
||||||
|
|
||||||
// Execute Tool
|
|
||||||
toolName := args[0]
|
|
||||||
tool, ok := resourceMgr.GetTool(toolName)
|
|
||||||
if !ok {
|
|
||||||
errMsg := fmt.Errorf("tool %q not found", toolName)
|
|
||||||
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
var paramsInput string
|
|
||||||
if len(args) > 1 {
|
|
||||||
paramsInput = args[1]
|
|
||||||
}
|
|
||||||
|
|
||||||
params := make(map[string]any)
|
|
||||||
if paramsInput != "" {
|
|
||||||
if err := json.Unmarshal([]byte(paramsInput), ¶ms); err != nil {
|
|
||||||
errMsg := fmt.Errorf("params must be a valid JSON string: %w", err)
|
|
||||||
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
parsedParams, err := parameters.ParseParams(tool.GetParameters(), params, nil)
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("invalid parameters: %w", err)
|
|
||||||
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
parsedParams, err = tool.EmbedParams(ctx, parsedParams, resourceMgr.GetEmbeddingModelMap())
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("error embedding parameters: %w", err)
|
|
||||||
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
// Client Auth not supported for ephemeral CLI call
|
|
||||||
requiresAuth, err := tool.RequiresClientAuthorization(resourceMgr)
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("failed to check auth requirements: %w", err)
|
|
||||||
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
if requiresAuth {
|
|
||||||
errMsg := fmt.Errorf("client authorization is not supported")
|
|
||||||
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
result, err := tool.Invoke(ctx, resourceMgr, parsedParams, "")
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("tool execution failed: %w", err)
|
|
||||||
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
|
|
||||||
// Print Result
|
|
||||||
output, err := json.MarshalIndent(result, "", " ")
|
|
||||||
if err != nil {
|
|
||||||
errMsg := fmt.Errorf("failed to marshal result: %w", err)
|
|
||||||
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
|
|
||||||
return errMsg
|
|
||||||
}
|
|
||||||
fmt.Fprintln(rootCmd.Out(), string(output))
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
@@ -22,18 +22,6 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewLogger creates a new logger based on the provided format and level.
|
|
||||||
func NewLogger(format, level string, out, err io.Writer) (Logger, error) {
|
|
||||||
switch strings.ToLower(format) {
|
|
||||||
case "json":
|
|
||||||
return NewStructuredLogger(out, err, level)
|
|
||||||
case "standard":
|
|
||||||
return NewStdLogger(out, err, level)
|
|
||||||
default:
|
|
||||||
return nil, fmt.Errorf("logging format invalid: %s", format)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// StdLogger is the standard logger
|
// StdLogger is the standard logger
|
||||||
type StdLogger struct {
|
type StdLogger struct {
|
||||||
outLogger *slog.Logger
|
outLogger *slog.Logger
|
||||||
|
|||||||
@@ -21,7 +21,6 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var expectedToolSources = []string{
|
var expectedToolSources = []string{
|
||||||
"alloydb-omni",
|
|
||||||
"alloydb-postgres-admin",
|
"alloydb-postgres-admin",
|
||||||
"alloydb-postgres-observability",
|
"alloydb-postgres-observability",
|
||||||
"alloydb-postgres",
|
"alloydb-postgres",
|
||||||
@@ -100,40 +99,36 @@ func TestLoadPrebuiltToolYAMLs(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestGetPrebuiltTool(t *testing.T) {
|
func TestGetPrebuiltTool(t *testing.T) {
|
||||||
alloydb_omni_config := getOrFatal(t, "alloydb-omni")
|
alloydb_admin_config, _ := Get("alloydb-postgres-admin")
|
||||||
alloydb_admin_config := getOrFatal(t, "alloydb-postgres-admin")
|
alloydb_observability_config, _ := Get("alloydb-postgres-observability")
|
||||||
alloydb_observability_config := getOrFatal(t, "alloydb-postgres-observability")
|
alloydb_config, _ := Get("alloydb-postgres")
|
||||||
alloydb_config := getOrFatal(t, "alloydb-postgres")
|
bigquery_config, _ := Get("bigquery")
|
||||||
bigquery_config := getOrFatal(t, "bigquery")
|
clickhouse_config, _ := Get("clickhouse")
|
||||||
clickhouse_config := getOrFatal(t, "clickhouse")
|
cloudsqlpg_observability_config, _ := Get("cloud-sql-postgres-observability")
|
||||||
cloudsqlpg_observability_config := getOrFatal(t, "cloud-sql-postgres-observability")
|
cloudsqlpg_config, _ := Get("cloud-sql-postgres")
|
||||||
cloudsqlpg_config := getOrFatal(t, "cloud-sql-postgres")
|
cloudsqlpg_admin_config, _ := Get("cloud-sql-postgres-admin")
|
||||||
cloudsqlpg_admin_config := getOrFatal(t, "cloud-sql-postgres-admin")
|
cloudsqlmysql_admin_config, _ := Get("cloud-sql-mysql-admin")
|
||||||
cloudsqlmysql_admin_config := getOrFatal(t, "cloud-sql-mysql-admin")
|
cloudsqlmssql_admin_config, _ := Get("cloud-sql-mssql-admin")
|
||||||
cloudsqlmssql_admin_config := getOrFatal(t, "cloud-sql-mssql-admin")
|
cloudsqlmysql_observability_config, _ := Get("cloud-sql-mysql-observability")
|
||||||
cloudsqlmysql_observability_config := getOrFatal(t, "cloud-sql-mysql-observability")
|
cloudsqlmysql_config, _ := Get("cloud-sql-mysql")
|
||||||
cloudsqlmysql_config := getOrFatal(t, "cloud-sql-mysql")
|
cloudsqlmssql_observability_config, _ := Get("cloud-sql-mssql-observability")
|
||||||
cloudsqlmssql_observability_config := getOrFatal(t, "cloud-sql-mssql-observability")
|
cloudsqlmssql_config, _ := Get("cloud-sql-mssql")
|
||||||
cloudsqlmssql_config := getOrFatal(t, "cloud-sql-mssql")
|
dataplex_config, _ := Get("dataplex")
|
||||||
dataplex_config := getOrFatal(t, "dataplex")
|
firestoreconfig, _ := Get("firestore")
|
||||||
firestoreconfig := getOrFatal(t, "firestore")
|
looker_config, _ := Get("looker")
|
||||||
looker_config := getOrFatal(t, "looker")
|
lookerca_config, _ := Get("looker-conversational-analytics")
|
||||||
lookerca_config := getOrFatal(t, "looker-conversational-analytics")
|
mysql_config, _ := Get("mysql")
|
||||||
mysql_config := getOrFatal(t, "mysql")
|
mssql_config, _ := Get("mssql")
|
||||||
mssql_config := getOrFatal(t, "mssql")
|
oceanbase_config, _ := Get("oceanbase")
|
||||||
oceanbase_config := getOrFatal(t, "oceanbase")
|
postgresconfig, _ := Get("postgres")
|
||||||
postgresconfig := getOrFatal(t, "postgres")
|
singlestore_config, _ := Get("singlestore")
|
||||||
singlestore_config := getOrFatal(t, "singlestore")
|
spanner_config, _ := Get("spanner")
|
||||||
spanner_config := getOrFatal(t, "spanner")
|
spannerpg_config, _ := Get("spanner-postgres")
|
||||||
spannerpg_config := getOrFatal(t, "spanner-postgres")
|
mindsdb_config, _ := Get("mindsdb")
|
||||||
mindsdb_config := getOrFatal(t, "mindsdb")
|
sqlite_config, _ := Get("sqlite")
|
||||||
sqlite_config := getOrFatal(t, "sqlite")
|
neo4jconfig, _ := Get("neo4j")
|
||||||
neo4jconfig := getOrFatal(t, "neo4j")
|
healthcare_config, _ := Get("cloud-healthcare")
|
||||||
healthcare_config := getOrFatal(t, "cloud-healthcare")
|
snowflake_config, _ := Get("snowflake")
|
||||||
snowflake_config := getOrFatal(t, "snowflake")
|
|
||||||
if len(alloydb_omni_config) <= 0 {
|
|
||||||
t.Fatalf("unexpected error: could not fetch alloydb omni prebuilt tools yaml")
|
|
||||||
}
|
|
||||||
if len(alloydb_admin_config) <= 0 {
|
if len(alloydb_admin_config) <= 0 {
|
||||||
t.Fatalf("unexpected error: could not fetch alloydb admin prebuilt tools yaml")
|
t.Fatalf("unexpected error: could not fetch alloydb admin prebuilt tools yaml")
|
||||||
}
|
}
|
||||||
@@ -238,11 +233,3 @@ func TestFailGetPrebuiltTool(t *testing.T) {
|
|||||||
t.Fatalf("unexpected an error but got nil.")
|
t.Fatalf("unexpected an error but got nil.")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func getOrFatal(t *testing.T, prebuiltSourceConfig string) []byte {
|
|
||||||
bytes, err := Get(prebuiltSourceConfig)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Cannot get prebuilt config for %q, error %v", prebuiltSourceConfig, err)
|
|
||||||
}
|
|
||||||
return bytes
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,277 +0,0 @@
|
|||||||
# Copyright 2026 Google LLC
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
sources:
|
|
||||||
alloydb-omni-source:
|
|
||||||
kind: postgres
|
|
||||||
host: ${ALLOYDB_OMNI_HOST:localhost}
|
|
||||||
port: ${ALLOYDB_OMNI_PORT:5432}
|
|
||||||
database: ${ALLOYDB_OMNI_DATABASE}
|
|
||||||
user: ${ALLOYDB_OMNI_USER}
|
|
||||||
password: ${ALLOYDB_OMNI_PASSWORD:}
|
|
||||||
queryParams: ${ALLOYDB_OMNI_QUERY_PARAMS:}
|
|
||||||
|
|
||||||
tools:
|
|
||||||
execute_sql:
|
|
||||||
kind: postgres-execute-sql
|
|
||||||
source: alloydb-omni-source
|
|
||||||
description: Use this tool to execute sql.
|
|
||||||
|
|
||||||
list_tables:
|
|
||||||
kind: postgres-list-tables
|
|
||||||
source: alloydb-omni-source
|
|
||||||
description: "Lists detailed schema information (object type, columns, constraints, indexes, triggers, owner, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
|
|
||||||
|
|
||||||
list_active_queries:
|
|
||||||
kind: postgres-list-active-queries
|
|
||||||
source: alloydb-omni-source
|
|
||||||
description: "List the top N (default 50) currently running queries (state='active') from pg_stat_activity, ordered by longest-running first. Returns pid, user, database, application_name, client_addr, state, wait_event_type/wait_event, backend/xact/query start times, computed query_duration, and the SQL text."
|
|
||||||
|
|
||||||
list_available_extensions:
|
|
||||||
kind: postgres-list-available-extensions
|
|
||||||
source: alloydb-omni-source
|
|
||||||
description: "Discover all PostgreSQL extensions available for installation on this server, returning name, default_version, and description."
|
|
||||||
|
|
||||||
list_installed_extensions:
|
|
||||||
kind: postgres-list-installed-extensions
|
|
||||||
source: alloydb-omni-source
|
|
||||||
description: "List all installed PostgreSQL extensions with their name, version, schema, owner, and description."
|
|
||||||
|
|
||||||
long_running_transactions:
|
|
||||||
kind: postgres-long-running-transactions
|
|
||||||
source: alloydb-omni-source
|
|
||||||
|
|
||||||
list_locks:
|
|
||||||
kind: postgres-list-locks
|
|
||||||
source: alloydb-omni-source
|
|
||||||
|
|
||||||
replication_stats:
|
|
||||||
kind: postgres-replication-stats
|
|
||||||
source: alloydb-omni-source
|
|
||||||
|
|
||||||
list_autovacuum_configurations:
|
|
||||||
kind: postgres-sql
|
|
||||||
source: alloydb-omni-source
|
|
||||||
description: "List PostgreSQL autovacuum-related configurations (name and current setting) from pg_settings."
|
|
||||||
statement: |
|
|
||||||
SELECT name,
|
|
||||||
setting
|
|
||||||
FROM pg_settings
|
|
||||||
WHERE category = 'Autovacuum';
|
|
||||||
|
|
||||||
list_columnar_configurations:
|
|
||||||
kind: postgres-sql
|
|
||||||
source: alloydb-omni-source
|
|
||||||
description: "List AlloyDB Omni columnar-related configurations (name and current setting) from pg_settings."
|
|
||||||
statement: |
|
|
||||||
SELECT name,
|
|
||||||
setting
|
|
||||||
FROM pg_settings
|
|
||||||
WHERE name like 'google_columnar_engine.%';
|
|
||||||
|
|
||||||
list_columnar_recommended_columns:
|
|
||||||
kind: postgres-sql
|
|
||||||
source: alloydb-omni-source
|
|
||||||
description: "Lists columns that AlloyDB Omni recommends adding to the columnar engine to improve query performance."
|
|
||||||
statement: select * from g_columnar_recommended_columns;
|
|
||||||
|
|
||||||
list_memory_configurations:
|
|
||||||
kind: postgres-sql
|
|
||||||
source: alloydb-omni-source
|
|
||||||
description: "List PostgreSQL memory-related configurations (name and current setting) from pg_settings."
|
|
||||||
statement: |
|
|
||||||
(
|
|
||||||
SELECT
|
|
||||||
name,
|
|
||||||
pg_size_pretty((setting::bigint * 1024)::bigint) setting
|
|
||||||
FROM pg_settings
|
|
||||||
WHERE name IN ('work_mem', 'maintenance_work_mem')
|
|
||||||
)
|
|
||||||
UNION ALL
|
|
||||||
(
|
|
||||||
SELECT
|
|
||||||
name,
|
|
||||||
pg_size_pretty((((setting::bigint) * 8) * 1024)::bigint)
|
|
||||||
FROM pg_settings
|
|
||||||
WHERE name IN ('shared_buffers', 'wal_buffers', 'effective_cache_size', 'temp_buffers')
|
|
||||||
)
|
|
||||||
ORDER BY 1 DESC;
|
|
||||||
|
|
||||||
list_top_bloated_tables:
|
|
||||||
kind: postgres-sql
|
|
||||||
source: alloydb-omni-source
|
|
||||||
description: |
|
|
||||||
List the top tables by dead-tuple (approximate bloat signal), returning schema, table, live/dead tuples, percentage, and last vacuum/analyze times.
|
|
||||||
statement: |
|
|
||||||
SELECT
|
|
||||||
schemaname AS schema_name,
|
|
||||||
relname AS relation_name,
|
|
||||||
n_live_tup AS live_tuples,
|
|
||||||
n_dead_tup AS dead_tuples,
|
|
||||||
TRUNC((n_dead_tup::NUMERIC / NULLIF(n_live_tup + n_dead_tup, 0)) * 100, 2) AS dead_tuple_percentage,
|
|
||||||
last_vacuum,
|
|
||||||
last_autovacuum,
|
|
||||||
last_analyze,
|
|
||||||
last_autoanalyze
|
|
||||||
FROM pg_stat_user_tables
|
|
||||||
ORDER BY n_dead_tup DESC
|
|
||||||
LIMIT COALESCE($1::int, 50);
|
|
||||||
parameters:
|
|
||||||
- name: limit
|
|
||||||
description: "The maximum number of results to return."
|
|
||||||
type: integer
|
|
||||||
default: 50
|
|
||||||
|
|
||||||
list_replication_slots:
|
|
||||||
kind: postgres-sql
|
|
||||||
source: alloydb-omni-source
|
|
||||||
description: "List key details for all PostgreSQL replication slots (e.g., type, database, active status) and calculates the size of the outstanding WAL that is being prevented from removal by the slot."
|
|
||||||
statement: |
|
|
||||||
SELECT
|
|
||||||
slot_name,
|
|
||||||
slot_type,
|
|
||||||
plugin,
|
|
||||||
database,
|
|
||||||
temporary,
|
|
||||||
active,
|
|
||||||
restart_lsn,
|
|
||||||
confirmed_flush_lsn,
|
|
||||||
xmin,
|
|
||||||
catalog_xmin,
|
|
||||||
pg_size_pretty(pg_wal_lsn_diff(pg_current_wal_lsn(), restart_lsn)) AS retained_wal
|
|
||||||
FROM pg_replication_slots;
|
|
||||||
|
|
||||||
list_invalid_indexes:
|
|
||||||
kind: postgres-sql
|
|
||||||
source: alloydb-omni-source
|
|
||||||
description: "Lists all invalid PostgreSQL indexes which are taking up disk space but are unusable by the query planner. Typically created by failed CREATE INDEX CONCURRENTLY operations."
|
|
||||||
statement: |
|
|
||||||
SELECT
|
|
||||||
nspname AS schema_name,
|
|
||||||
indexrelid::regclass AS index_name,
|
|
||||||
indrelid::regclass AS table_name,
|
|
||||||
pg_size_pretty(pg_total_relation_size(indexrelid)) AS index_size,
|
|
||||||
indisready,
|
|
||||||
indisvalid,
|
|
||||||
pg_get_indexdef(pg_class.oid) AS index_def
|
|
||||||
FROM pg_index
|
|
||||||
JOIN pg_class ON pg_class.oid = pg_index.indexrelid
|
|
||||||
JOIN pg_namespace ON pg_namespace.oid = pg_class.relnamespace
|
|
||||||
WHERE indisvalid = FALSE;
|
|
||||||
|
|
||||||
get_query_plan:
|
|
||||||
kind: postgres-sql
|
|
||||||
source: alloydb-omni-source
|
|
||||||
description: "Generate a PostgreSQL EXPLAIN plan in JSON format for a single SQL statement—without executing it. This returns the optimizer's estimated plan, costs, and rows (no ANALYZE, no extra options). Use in production safely for plan inspection, regression checks, and query tuning workflows."
|
|
||||||
statement: |
|
|
||||||
EXPLAIN (FORMAT JSON) {{.query}};
|
|
||||||
templateParameters:
|
|
||||||
- name: query
|
|
||||||
type: string
|
|
||||||
description: "The SQL statement for which you want to generate plan (omit the EXPLAIN keyword)."
|
|
||||||
required: true
|
|
||||||
|
|
||||||
list_views:
|
|
||||||
kind: postgres-list-views
|
|
||||||
source: alloydb-omni-source
|
|
||||||
|
|
||||||
list_schemas:
|
|
||||||
kind: postgres-list-schemas
|
|
||||||
source: alloydb-omni-source
|
|
||||||
|
|
||||||
list_indexes:
|
|
||||||
kind: postgres-list-indexes
|
|
||||||
source: alloydb-omni-source
|
|
||||||
|
|
||||||
list_sequences:
|
|
||||||
kind: postgres-list-sequences
|
|
||||||
source: alloydb-omni-source
|
|
||||||
|
|
||||||
database_overview:
|
|
||||||
kind: postgres-database-overview
|
|
||||||
source: alloydb-omni-source
|
|
||||||
|
|
||||||
list_triggers:
|
|
||||||
kind: postgres-list-triggers
|
|
||||||
source: alloydb-omni-source
|
|
||||||
|
|
||||||
list_query_stats:
|
|
||||||
kind: postgres-list-query-stats
|
|
||||||
source: alloydb-omni-source
|
|
||||||
|
|
||||||
get_column_cardinality:
|
|
||||||
kind: postgres-get-column-cardinality
|
|
||||||
source: alloydb-omni-source
|
|
||||||
|
|
||||||
list_table_stats:
|
|
||||||
kind: postgres-list-table-stats
|
|
||||||
source: alloydb-omni-source
|
|
||||||
|
|
||||||
list_publication_tables:
|
|
||||||
kind: postgres-list-publication-tables
|
|
||||||
source: alloydb-omni-source
|
|
||||||
|
|
||||||
list_tablespaces:
|
|
||||||
kind: postgres-list-tablespaces
|
|
||||||
source: alloydb-omni-source
|
|
||||||
|
|
||||||
list_pg_settings:
|
|
||||||
kind: postgres-list-pg-settings
|
|
||||||
source: alloydb-omni-source
|
|
||||||
|
|
||||||
list_database_stats:
|
|
||||||
kind: postgres-list-database-stats
|
|
||||||
source: alloydb-omni-source
|
|
||||||
|
|
||||||
list_roles:
|
|
||||||
kind: postgres-list-roles
|
|
||||||
source: alloydb-omni-source
|
|
||||||
|
|
||||||
list_stored_procedure:
|
|
||||||
kind: postgres-list-stored-procedure
|
|
||||||
source: alloydb-omni-source
|
|
||||||
|
|
||||||
toolsets:
|
|
||||||
alloydb_omni_database_tools:
|
|
||||||
- execute_sql
|
|
||||||
- list_tables
|
|
||||||
- list_active_queries
|
|
||||||
- list_available_extensions
|
|
||||||
- list_installed_extensions
|
|
||||||
- list_autovacuum_configurations
|
|
||||||
- list_columnar_configurations
|
|
||||||
- list_columnar_recommended_columns
|
|
||||||
- list_memory_configurations
|
|
||||||
- list_top_bloated_tables
|
|
||||||
- list_replication_slots
|
|
||||||
- list_invalid_indexes
|
|
||||||
- get_query_plan
|
|
||||||
- list_views
|
|
||||||
- list_schemas
|
|
||||||
- database_overview
|
|
||||||
- list_triggers
|
|
||||||
- list_indexes
|
|
||||||
- list_sequences
|
|
||||||
- long_running_transactions
|
|
||||||
- list_locks
|
|
||||||
- replication_stats
|
|
||||||
- list_query_stats
|
|
||||||
- get_column_cardinality
|
|
||||||
- list_publication_tables
|
|
||||||
- list_tablespaces
|
|
||||||
- list_pg_settings
|
|
||||||
- list_database_stats
|
|
||||||
- list_roles
|
|
||||||
- list_table_stats
|
|
||||||
- list_stored_procedure
|
|
||||||
@@ -46,9 +46,6 @@ tools:
|
|||||||
create_backup:
|
create_backup:
|
||||||
kind: cloud-sql-create-backup
|
kind: cloud-sql-create-backup
|
||||||
source: cloud-sql-admin-source
|
source: cloud-sql-admin-source
|
||||||
restore_backup:
|
|
||||||
kind: cloud-sql-restore-backup
|
|
||||||
source: cloud-sql-admin-source
|
|
||||||
|
|
||||||
toolsets:
|
toolsets:
|
||||||
cloud_sql_mssql_admin_tools:
|
cloud_sql_mssql_admin_tools:
|
||||||
@@ -61,4 +58,3 @@ toolsets:
|
|||||||
- wait_for_operation
|
- wait_for_operation
|
||||||
- clone_instance
|
- clone_instance
|
||||||
- create_backup
|
- create_backup
|
||||||
- restore_backup
|
|
||||||
|
|||||||
@@ -46,9 +46,6 @@ tools:
|
|||||||
create_backup:
|
create_backup:
|
||||||
kind: cloud-sql-create-backup
|
kind: cloud-sql-create-backup
|
||||||
source: cloud-sql-admin-source
|
source: cloud-sql-admin-source
|
||||||
restore_backup:
|
|
||||||
kind: cloud-sql-restore-backup
|
|
||||||
source: cloud-sql-admin-source
|
|
||||||
|
|
||||||
toolsets:
|
toolsets:
|
||||||
cloud_sql_mysql_admin_tools:
|
cloud_sql_mysql_admin_tools:
|
||||||
@@ -61,4 +58,3 @@ toolsets:
|
|||||||
- wait_for_operation
|
- wait_for_operation
|
||||||
- clone_instance
|
- clone_instance
|
||||||
- create_backup
|
- create_backup
|
||||||
- restore_backup
|
|
||||||
|
|||||||
@@ -49,9 +49,6 @@ tools:
|
|||||||
create_backup:
|
create_backup:
|
||||||
kind: cloud-sql-create-backup
|
kind: cloud-sql-create-backup
|
||||||
source: cloud-sql-admin-source
|
source: cloud-sql-admin-source
|
||||||
restore_backup:
|
|
||||||
kind: cloud-sql-restore-backup
|
|
||||||
source: cloud-sql-admin-source
|
|
||||||
|
|
||||||
toolsets:
|
toolsets:
|
||||||
cloud_sql_postgres_admin_tools:
|
cloud_sql_postgres_admin_tools:
|
||||||
@@ -65,4 +62,3 @@ toolsets:
|
|||||||
- postgres_upgrade_precheck
|
- postgres_upgrade_precheck
|
||||||
- clone_instance
|
- clone_instance
|
||||||
- create_backup
|
- create_backup
|
||||||
- restore_backup
|
|
||||||
|
|||||||
@@ -26,7 +26,6 @@ import (
|
|||||||
"github.com/go-chi/render"
|
"github.com/go-chi/render"
|
||||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||||
"github.com/googleapis/genai-toolbox/internal/util"
|
"github.com/googleapis/genai-toolbox/internal/util"
|
||||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
|
||||||
"go.opentelemetry.io/otel/attribute"
|
"go.opentelemetry.io/otel/attribute"
|
||||||
"go.opentelemetry.io/otel/codes"
|
"go.opentelemetry.io/otel/codes"
|
||||||
"go.opentelemetry.io/otel/metric"
|
"go.opentelemetry.io/otel/metric"
|
||||||
@@ -232,7 +231,7 @@ func toolInvokeHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
params, err := parameters.ParseParams(tool.GetParameters(), data, claimsFromAuth)
|
params, err := tool.ParseParams(data, claimsFromAuth)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// If auth error, return 401
|
// If auth error, return 401
|
||||||
if errors.Is(err, util.ErrUnauthorized) {
|
if errors.Is(err, util.ErrUnauthorized) {
|
||||||
|
|||||||
@@ -87,10 +87,6 @@ func (t MockTool) RequiresClientAuthorization(tools.SourceProvider) (bool, error
|
|||||||
return t.requiresClientAuthrorization, nil
|
return t.requiresClientAuthrorization, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t MockTool) GetParameters() parameters.Parameters {
|
|
||||||
return t.Params
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t MockTool) McpManifest() tools.McpManifest {
|
func (t MockTool) McpManifest() tools.McpManifest {
|
||||||
properties := make(map[string]parameters.ParameterMcpManifest)
|
properties := make(map[string]parameters.ParameterMcpManifest)
|
||||||
required := make([]string, 0)
|
required := make([]string, 0)
|
||||||
|
|||||||
@@ -18,7 +18,6 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"regexp"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
yaml "github.com/goccy/go-yaml"
|
yaml "github.com/goccy/go-yaml"
|
||||||
@@ -67,14 +66,12 @@ type ServerConfig struct {
|
|||||||
Stdio bool
|
Stdio bool
|
||||||
// DisableReload indicates if the user has disabled dynamic reloading for Toolbox.
|
// DisableReload indicates if the user has disabled dynamic reloading for Toolbox.
|
||||||
DisableReload bool
|
DisableReload bool
|
||||||
// UI indicates if Toolbox UI endpoints (/ui) are available.
|
// UI indicates if Toolbox UI endpoints (/ui) are available
|
||||||
UI bool
|
UI bool
|
||||||
// Specifies a list of origins permitted to access this server.
|
// Specifies a list of origins permitted to access this server.
|
||||||
AllowedOrigins []string
|
AllowedOrigins []string
|
||||||
// Specifies a list of hosts permitted to access this server.
|
// Specifies a list of hosts permitted to access this server
|
||||||
AllowedHosts []string
|
AllowedHosts []string
|
||||||
// UserAgentMetadata specifies additional metadata to append to the User-Agent string.
|
|
||||||
UserAgentMetadata []string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type logFormat string
|
type logFormat string
|
||||||
@@ -139,12 +136,12 @@ type PromptsetConfigs map[string]prompts.PromptsetConfig
|
|||||||
|
|
||||||
func UnmarshalResourceConfig(ctx context.Context, raw []byte) (SourceConfigs, AuthServiceConfigs, EmbeddingModelConfigs, ToolConfigs, ToolsetConfigs, PromptConfigs, error) {
|
func UnmarshalResourceConfig(ctx context.Context, raw []byte) (SourceConfigs, AuthServiceConfigs, EmbeddingModelConfigs, ToolConfigs, ToolsetConfigs, PromptConfigs, error) {
|
||||||
// prepare configs map
|
// prepare configs map
|
||||||
var sourceConfigs SourceConfigs
|
sourceConfigs := make(map[string]sources.SourceConfig)
|
||||||
var authServiceConfigs AuthServiceConfigs
|
authServiceConfigs := make(AuthServiceConfigs)
|
||||||
var embeddingModelConfigs EmbeddingModelConfigs
|
embeddingModelConfigs := make(EmbeddingModelConfigs)
|
||||||
var toolConfigs ToolConfigs
|
toolConfigs := make(ToolConfigs)
|
||||||
var toolsetConfigs ToolsetConfigs
|
toolsetConfigs := make(ToolsetConfigs)
|
||||||
var promptConfigs PromptConfigs
|
promptConfigs := make(PromptConfigs)
|
||||||
// promptset configs is not yet supported
|
// promptset configs is not yet supported
|
||||||
|
|
||||||
decoder := yaml.NewDecoder(bytes.NewReader(raw))
|
decoder := yaml.NewDecoder(bytes.NewReader(raw))
|
||||||
@@ -160,7 +157,7 @@ func UnmarshalResourceConfig(ctx context.Context, raw []byte) (SourceConfigs, Au
|
|||||||
var kind, name string
|
var kind, name string
|
||||||
var ok bool
|
var ok bool
|
||||||
if kind, ok = resource["kind"].(string); !ok {
|
if kind, ok = resource["kind"].(string); !ok {
|
||||||
return nil, nil, nil, nil, nil, nil, fmt.Errorf("missing 'kind' field or it is not a string: %v", resource)
|
return nil, nil, nil, nil, nil, nil, fmt.Errorf("missing 'kind' field or it is not a string")
|
||||||
}
|
}
|
||||||
if name, ok = resource["name"].(string); !ok {
|
if name, ok = resource["name"].(string); !ok {
|
||||||
return nil, nil, nil, nil, nil, nil, fmt.Errorf("missing 'name' field or it is not a string")
|
return nil, nil, nil, nil, nil, nil, fmt.Errorf("missing 'name' field or it is not a string")
|
||||||
@@ -174,54 +171,36 @@ func UnmarshalResourceConfig(ctx context.Context, raw []byte) (SourceConfigs, Au
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
||||||
}
|
}
|
||||||
if sourceConfigs == nil {
|
|
||||||
sourceConfigs = make(SourceConfigs)
|
|
||||||
}
|
|
||||||
sourceConfigs[name] = c
|
sourceConfigs[name] = c
|
||||||
case "authServices":
|
case "authServices":
|
||||||
c, err := UnmarshalYAMLAuthServiceConfig(ctx, name, resource)
|
c, err := UnmarshalYAMLAuthServiceConfig(ctx, name, resource)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
||||||
}
|
}
|
||||||
if authServiceConfigs == nil {
|
|
||||||
authServiceConfigs = make(AuthServiceConfigs)
|
|
||||||
}
|
|
||||||
authServiceConfigs[name] = c
|
authServiceConfigs[name] = c
|
||||||
case "tools":
|
case "tools":
|
||||||
c, err := UnmarshalYAMLToolConfig(ctx, name, resource)
|
c, err := UnmarshalYAMLToolConfig(ctx, name, resource)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
||||||
}
|
}
|
||||||
if toolConfigs == nil {
|
|
||||||
toolConfigs = make(ToolConfigs)
|
|
||||||
}
|
|
||||||
toolConfigs[name] = c
|
toolConfigs[name] = c
|
||||||
case "toolsets":
|
case "toolsets":
|
||||||
c, err := UnmarshalYAMLToolsetConfig(ctx, name, resource)
|
c, err := UnmarshalYAMLToolsetConfig(ctx, name, resource)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
||||||
}
|
}
|
||||||
if toolsetConfigs == nil {
|
|
||||||
toolsetConfigs = make(ToolsetConfigs)
|
|
||||||
}
|
|
||||||
toolsetConfigs[name] = c
|
toolsetConfigs[name] = c
|
||||||
case "embeddingModels":
|
case "embeddingModels":
|
||||||
c, err := UnmarshalYAMLEmbeddingModelConfig(ctx, name, resource)
|
c, err := UnmarshalYAMLEmbeddingModelConfig(ctx, name, resource)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
||||||
}
|
}
|
||||||
if embeddingModelConfigs == nil {
|
|
||||||
embeddingModelConfigs = make(EmbeddingModelConfigs)
|
|
||||||
}
|
|
||||||
embeddingModelConfigs[name] = c
|
embeddingModelConfigs[name] = c
|
||||||
case "prompts":
|
case "prompts":
|
||||||
c, err := UnmarshalYAMLPromptConfig(ctx, name, resource)
|
c, err := UnmarshalYAMLPromptConfig(ctx, name, resource)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
||||||
}
|
}
|
||||||
if promptConfigs == nil {
|
|
||||||
promptConfigs = make(PromptConfigs)
|
|
||||||
}
|
|
||||||
promptConfigs[name] = c
|
promptConfigs[name] = c
|
||||||
default:
|
default:
|
||||||
return nil, nil, nil, nil, nil, nil, fmt.Errorf("invalid kind %s", kind)
|
return nil, nil, nil, nil, nil, nil, fmt.Errorf("invalid kind %s", kind)
|
||||||
@@ -297,45 +276,6 @@ func UnmarshalYAMLToolConfig(ctx context.Context, name string, r map[string]any)
|
|||||||
if r["authRequired"] == nil {
|
if r["authRequired"] == nil {
|
||||||
r["authRequired"] = []string{}
|
r["authRequired"] = []string{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// validify parameter references
|
|
||||||
if rawParams, ok := r["parameters"]; ok {
|
|
||||||
if paramsList, ok := rawParams.([]any); ok {
|
|
||||||
// Turn params into a map
|
|
||||||
validParamNames := make(map[string]bool)
|
|
||||||
for _, rawP := range paramsList {
|
|
||||||
if pMap, ok := rawP.(map[string]any); ok {
|
|
||||||
if pName, ok := pMap["name"].(string); ok && pName != "" {
|
|
||||||
validParamNames[pName] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate references
|
|
||||||
for i, rawP := range paramsList {
|
|
||||||
pMap, ok := rawP.(map[string]any)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
pName, _ := pMap["name"].(string)
|
|
||||||
refName, _ := pMap["valueFromParam"].(string)
|
|
||||||
|
|
||||||
if refName != "" {
|
|
||||||
// Check if the referenced parameter exists
|
|
||||||
if !validParamNames[refName] {
|
|
||||||
return nil, fmt.Errorf("tool %q config error: parameter %q (index %d) references '%q' in the 'valueFromParam' field, which is not a defined parameter", name, pName, i, refName)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for self-reference
|
|
||||||
if refName == pName {
|
|
||||||
return nil, fmt.Errorf("tool %q config error: parameter %q cannot copy value from itself", name, pName)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
dec, err := util.NewStrictDecoder(r)
|
dec, err := util.NewStrictDecoder(r)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("error creating decoder: %s", err)
|
return nil, fmt.Errorf("error creating decoder: %s", err)
|
||||||
@@ -349,11 +289,7 @@ func UnmarshalYAMLToolConfig(ctx context.Context, name string, r map[string]any)
|
|||||||
|
|
||||||
func UnmarshalYAMLToolsetConfig(ctx context.Context, name string, r map[string]any) (tools.ToolsetConfig, error) {
|
func UnmarshalYAMLToolsetConfig(ctx context.Context, name string, r map[string]any) (tools.ToolsetConfig, error) {
|
||||||
var toolsetConfig tools.ToolsetConfig
|
var toolsetConfig tools.ToolsetConfig
|
||||||
toolList, ok := r["tools"].([]any)
|
justTools := map[string]any{"tools": r["tools"]}
|
||||||
if !ok {
|
|
||||||
return toolsetConfig, fmt.Errorf("tools is missing or not a list of strings: %v", r)
|
|
||||||
}
|
|
||||||
justTools := map[string]any{"tools": toolList}
|
|
||||||
dec, err := util.NewStrictDecoder(justTools)
|
dec, err := util.NewStrictDecoder(justTools)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return toolsetConfig, fmt.Errorf("error creating decoder: %s", err)
|
return toolsetConfig, fmt.Errorf("error creating decoder: %s", err)
|
||||||
@@ -388,23 +324,3 @@ func UnmarshalYAMLPromptConfig(ctx context.Context, name string, r map[string]an
|
|||||||
}
|
}
|
||||||
return promptCfg, nil
|
return promptCfg, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tools naming validation is added in the MCP v2025-11-25, but we'll be
|
|
||||||
// implementing it across Toolbox
|
|
||||||
// Tool names SHOULD be between 1 and 128 characters in length (inclusive).
|
|
||||||
// Tool names SHOULD be considered case-sensitive.
|
|
||||||
// The following SHOULD be the only allowed characters: uppercase and lowercase ASCII letters (A-Z, a-z), digits (0-9), underscore (_), hyphen (-), and dot (.)
|
|
||||||
// Tool names SHOULD NOT contain spaces, commas, or other special characters.
|
|
||||||
// Tool names SHOULD be unique within a server.
|
|
||||||
func NameValidation(name string) error {
|
|
||||||
strLen := len(name)
|
|
||||||
if strLen < 1 || strLen > 128 {
|
|
||||||
return fmt.Errorf("resource name SHOULD be between 1 and 128 characters in length (inclusive)")
|
|
||||||
}
|
|
||||||
validChars := regexp.MustCompile("^[a-zA-Z0-9_.-]+$")
|
|
||||||
isValid := validChars.MatchString(name)
|
|
||||||
if !isValid {
|
|
||||||
return fmt.Errorf("invalid character for resource name; only uppercase and lowercase ASCII letters (A-Z, a-z), digits (0-9), underscore (_), hyphen (-), and dot (.) is allowed")
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -27,21 +27,19 @@ import (
|
|||||||
v20241105 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20241105"
|
v20241105 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20241105"
|
||||||
v20250326 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20250326"
|
v20250326 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20250326"
|
||||||
v20250618 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20250618"
|
v20250618 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20250618"
|
||||||
v20251125 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20251125"
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
||||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||||
)
|
)
|
||||||
|
|
||||||
// LATEST_PROTOCOL_VERSION is the latest version of the MCP protocol supported.
|
// LATEST_PROTOCOL_VERSION is the latest version of the MCP protocol supported.
|
||||||
// Update the version used in InitializeResponse when this value is updated.
|
// Update the version used in InitializeResponse when this value is updated.
|
||||||
const LATEST_PROTOCOL_VERSION = v20251125.PROTOCOL_VERSION
|
const LATEST_PROTOCOL_VERSION = v20250618.PROTOCOL_VERSION
|
||||||
|
|
||||||
// SUPPORTED_PROTOCOL_VERSIONS is the MCP protocol versions that are supported.
|
// SUPPORTED_PROTOCOL_VERSIONS is the MCP protocol versions that are supported.
|
||||||
var SUPPORTED_PROTOCOL_VERSIONS = []string{
|
var SUPPORTED_PROTOCOL_VERSIONS = []string{
|
||||||
v20241105.PROTOCOL_VERSION,
|
v20241105.PROTOCOL_VERSION,
|
||||||
v20250326.PROTOCOL_VERSION,
|
v20250326.PROTOCOL_VERSION,
|
||||||
v20250618.PROTOCOL_VERSION,
|
v20250618.PROTOCOL_VERSION,
|
||||||
v20251125.PROTOCOL_VERSION,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// InitializeResponse runs capability negotiation and protocol version agreement.
|
// InitializeResponse runs capability negotiation and protocol version agreement.
|
||||||
@@ -104,8 +102,6 @@ func NotificationHandler(ctx context.Context, body []byte) error {
|
|||||||
// This is the Operation phase of the lifecycle for MCP client-server connections.
|
// This is the Operation phase of the lifecycle for MCP client-server connections.
|
||||||
func ProcessMethod(ctx context.Context, mcpVersion string, id jsonrpc.RequestId, method string, toolset tools.Toolset, promptset prompts.Promptset, resourceMgr *resources.ResourceManager, body []byte, header http.Header) (any, error) {
|
func ProcessMethod(ctx context.Context, mcpVersion string, id jsonrpc.RequestId, method string, toolset tools.Toolset, promptset prompts.Promptset, resourceMgr *resources.ResourceManager, body []byte, header http.Header) (any, error) {
|
||||||
switch mcpVersion {
|
switch mcpVersion {
|
||||||
case v20251125.PROTOCOL_VERSION:
|
|
||||||
return v20251125.ProcessMethod(ctx, id, method, toolset, promptset, resourceMgr, body, header)
|
|
||||||
case v20250618.PROTOCOL_VERSION:
|
case v20250618.PROTOCOL_VERSION:
|
||||||
return v20250618.ProcessMethod(ctx, id, method, toolset, promptset, resourceMgr, body, header)
|
return v20250618.ProcessMethod(ctx, id, method, toolset, promptset, resourceMgr, body, header)
|
||||||
case v20250326.PROTOCOL_VERSION:
|
case v20250326.PROTOCOL_VERSION:
|
||||||
|
|||||||
@@ -28,7 +28,6 @@ import (
|
|||||||
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
||||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||||
"github.com/googleapis/genai-toolbox/internal/util"
|
"github.com/googleapis/genai-toolbox/internal/util"
|
||||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// ProcessMethod returns a response for the request.
|
// ProcessMethod returns a response for the request.
|
||||||
@@ -177,20 +176,13 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
|||||||
}
|
}
|
||||||
logger.DebugContext(ctx, "tool invocation authorized")
|
logger.DebugContext(ctx, "tool invocation authorized")
|
||||||
|
|
||||||
params, err := parameters.ParseParams(tool.GetParameters(), data, claimsFromAuth)
|
params, err := tool.ParseParams(data, claimsFromAuth)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = fmt.Errorf("provided parameters were invalid: %w", err)
|
err = fmt.Errorf("provided parameters were invalid: %w", err)
|
||||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||||
}
|
}
|
||||||
logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
||||||
|
|
||||||
embeddingModels := resourceMgr.GetEmbeddingModelMap()
|
|
||||||
params, err = tool.EmbedParams(ctx, params, embeddingModels)
|
|
||||||
if err != nil {
|
|
||||||
err = fmt.Errorf("error embedding parameters: %w", err)
|
|
||||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
|
||||||
}
|
|
||||||
|
|
||||||
// run tool invocation and generate response.
|
// run tool invocation and generate response.
|
||||||
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@@ -28,7 +28,6 @@ import (
|
|||||||
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
||||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||||
"github.com/googleapis/genai-toolbox/internal/util"
|
"github.com/googleapis/genai-toolbox/internal/util"
|
||||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// ProcessMethod returns a response for the request.
|
// ProcessMethod returns a response for the request.
|
||||||
@@ -177,20 +176,13 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
|||||||
}
|
}
|
||||||
logger.DebugContext(ctx, "tool invocation authorized")
|
logger.DebugContext(ctx, "tool invocation authorized")
|
||||||
|
|
||||||
params, err := parameters.ParseParams(tool.GetParameters(), data, claimsFromAuth)
|
params, err := tool.ParseParams(data, claimsFromAuth)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = fmt.Errorf("provided parameters were invalid: %w", err)
|
err = fmt.Errorf("provided parameters were invalid: %w", err)
|
||||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||||
}
|
}
|
||||||
logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
||||||
|
|
||||||
embeddingModels := resourceMgr.GetEmbeddingModelMap()
|
|
||||||
params, err = tool.EmbedParams(ctx, params, embeddingModels)
|
|
||||||
if err != nil {
|
|
||||||
err = fmt.Errorf("error embedding parameters: %w", err)
|
|
||||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
|
||||||
}
|
|
||||||
|
|
||||||
// run tool invocation and generate response.
|
// run tool invocation and generate response.
|
||||||
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user