mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-01-29 09:18:12 -05:00
Compare commits
45 Commits
config-yam
...
processing
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7d9946026e | ||
|
|
d1358916d8 | ||
|
|
2d5d33388c | ||
|
|
252fc3091a | ||
|
|
10c445b05c | ||
|
|
341316bb63 | ||
|
|
44da09300c | ||
|
|
0dfcf24859 | ||
|
|
be0b7fc96e | ||
|
|
d7016d2251 | ||
|
|
d44283ffcf | ||
|
|
69e3f2eb24 | ||
|
|
c724bea786 | ||
|
|
4bc684d3ed | ||
|
|
9434450a65 | ||
|
|
362ed8df41 | ||
|
|
293c1d6889 | ||
|
|
cf477b529a | ||
|
|
cdc4d0d304 | ||
|
|
3aa1b79c13 | ||
|
|
941ed689b4 | ||
|
|
f298c8f444 | ||
|
|
f6474739e3 | ||
|
|
1d7c498116 | ||
|
|
9294ce39c8 | ||
|
|
86bf7bf8d0 | ||
|
|
ad2893d809 | ||
|
|
e535b372ea | ||
|
|
5054212fa4 | ||
|
|
93ca4578da | ||
|
|
ec936aed03 | ||
|
|
fe69272c84 | ||
|
|
15101b1edb | ||
|
|
e4f60e5633 | ||
|
|
d7af21bdde | ||
|
|
adc9589766 | ||
|
|
c25a2330fe | ||
|
|
6e09b08c6a | ||
|
|
1f15a111f1 | ||
|
|
dfddeb528d | ||
|
|
00c3e6d8cb | ||
|
|
d00b6fdf18 | ||
|
|
4d23a3bbf2 | ||
|
|
5e0999ebf5 | ||
|
|
6b02591703 |
@@ -87,7 +87,7 @@ steps:
|
|||||||
- "CLOUD_SQL_POSTGRES_REGION=$_REGION"
|
- "CLOUD_SQL_POSTGRES_REGION=$_REGION"
|
||||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||||
secretEnv:
|
secretEnv:
|
||||||
["CLOUD_SQL_POSTGRES_USER", "CLOUD_SQL_POSTGRES_PASS", "CLIENT_ID"]
|
["CLOUD_SQL_POSTGRES_USER", "CLOUD_SQL_POSTGRES_PASS", "CLIENT_ID", "API_KEY"]
|
||||||
volumes:
|
volumes:
|
||||||
- name: "go"
|
- name: "go"
|
||||||
path: "/gopath"
|
path: "/gopath"
|
||||||
@@ -134,7 +134,7 @@ steps:
|
|||||||
- "ALLOYDB_POSTGRES_DATABASE=$_DATABASE_NAME"
|
- "ALLOYDB_POSTGRES_DATABASE=$_DATABASE_NAME"
|
||||||
- "ALLOYDB_POSTGRES_REGION=$_REGION"
|
- "ALLOYDB_POSTGRES_REGION=$_REGION"
|
||||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||||
secretEnv: ["ALLOYDB_POSTGRES_USER", "ALLOYDB_POSTGRES_PASS", "CLIENT_ID"]
|
secretEnv: ["ALLOYDB_POSTGRES_USER", "ALLOYDB_POSTGRES_PASS", "CLIENT_ID", "API_KEY"]
|
||||||
volumes:
|
volumes:
|
||||||
- name: "go"
|
- name: "go"
|
||||||
path: "/gopath"
|
path: "/gopath"
|
||||||
@@ -293,7 +293,26 @@ steps:
|
|||||||
.ci/test_with_coverage.sh \
|
.ci/test_with_coverage.sh \
|
||||||
"Cloud Healthcare API" \
|
"Cloud Healthcare API" \
|
||||||
cloudhealthcare \
|
cloudhealthcare \
|
||||||
cloudhealthcare || echo "Integration tests failed."
|
cloudhealthcare
|
||||||
|
|
||||||
|
- id: "cloud-logging-admin"
|
||||||
|
name: golang:1
|
||||||
|
waitFor: ["compile-test-binary"]
|
||||||
|
entrypoint: /bin/bash
|
||||||
|
env:
|
||||||
|
- "GOPATH=/gopath"
|
||||||
|
- "LOGADMIN_PROJECT=$PROJECT_ID"
|
||||||
|
secretEnv: ["CLIENT_ID"]
|
||||||
|
volumes:
|
||||||
|
- name: "go"
|
||||||
|
path: "/gopath"
|
||||||
|
args:
|
||||||
|
- -c
|
||||||
|
- |
|
||||||
|
.ci/test_with_coverage.sh \
|
||||||
|
"Cloud Logging Admin" \
|
||||||
|
cloudloggingadmin \
|
||||||
|
cloudloggingadmin
|
||||||
|
|
||||||
- id: "postgres"
|
- id: "postgres"
|
||||||
name: golang:1
|
name: golang:1
|
||||||
@@ -305,7 +324,7 @@ steps:
|
|||||||
- "POSTGRES_HOST=$_POSTGRES_HOST"
|
- "POSTGRES_HOST=$_POSTGRES_HOST"
|
||||||
- "POSTGRES_PORT=$_POSTGRES_PORT"
|
- "POSTGRES_PORT=$_POSTGRES_PORT"
|
||||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||||
secretEnv: ["POSTGRES_USER", "POSTGRES_PASS", "CLIENT_ID"]
|
secretEnv: ["POSTGRES_USER", "POSTGRES_PASS", "CLIENT_ID", "API_KEY"]
|
||||||
volumes:
|
volumes:
|
||||||
- name: "go"
|
- name: "go"
|
||||||
path: "/gopath"
|
path: "/gopath"
|
||||||
@@ -964,6 +983,13 @@ steps:
|
|||||||
|
|
||||||
availableSecrets:
|
availableSecrets:
|
||||||
secretManager:
|
secretManager:
|
||||||
|
# Common secrets
|
||||||
|
- versionName: projects/$PROJECT_ID/secrets/client_id/versions/latest
|
||||||
|
env: CLIENT_ID
|
||||||
|
- versionName: projects/$PROJECT_ID/secrets/api_key/versions/latest
|
||||||
|
env: API_KEY
|
||||||
|
|
||||||
|
# Resource-specific secrets
|
||||||
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_user/versions/latest
|
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_user/versions/latest
|
||||||
env: CLOUD_SQL_POSTGRES_USER
|
env: CLOUD_SQL_POSTGRES_USER
|
||||||
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_pass/versions/latest
|
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_pass/versions/latest
|
||||||
@@ -980,8 +1006,6 @@ availableSecrets:
|
|||||||
env: POSTGRES_USER
|
env: POSTGRES_USER
|
||||||
- versionName: projects/$PROJECT_ID/secrets/postgres_pass/versions/latest
|
- versionName: projects/$PROJECT_ID/secrets/postgres_pass/versions/latest
|
||||||
env: POSTGRES_PASS
|
env: POSTGRES_PASS
|
||||||
- versionName: projects/$PROJECT_ID/secrets/client_id/versions/latest
|
|
||||||
env: CLIENT_ID
|
|
||||||
- versionName: projects/$PROJECT_ID/secrets/neo4j_user/versions/latest
|
- versionName: projects/$PROJECT_ID/secrets/neo4j_user/versions/latest
|
||||||
env: NEO4J_USER
|
env: NEO4J_USER
|
||||||
- versionName: projects/$PROJECT_ID/secrets/neo4j_pass/versions/latest
|
- versionName: projects/$PROJECT_ID/secrets/neo4j_pass/versions/latest
|
||||||
|
|||||||
4
.github/workflows/deploy_dev_docs.yaml
vendored
4
.github/workflows/deploy_dev_docs.yaml
vendored
@@ -51,12 +51,12 @@ jobs:
|
|||||||
extended: true
|
extended: true
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
|
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
|
||||||
with:
|
with:
|
||||||
node-version: "22"
|
node-version: "22"
|
||||||
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
|
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5
|
||||||
with:
|
with:
|
||||||
path: ~/.npm
|
path: ~/.npm
|
||||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
hugo-version: "0.145.0"
|
hugo-version: "0.145.0"
|
||||||
extended: true
|
extended: true
|
||||||
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
|
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
|
||||||
with:
|
with:
|
||||||
node-version: "22"
|
node-version: "22"
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/deploy_versioned_docs.yaml
vendored
2
.github/workflows/deploy_versioned_docs.yaml
vendored
@@ -44,7 +44,7 @@ jobs:
|
|||||||
extended: true
|
extended: true
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
|
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
|
||||||
with:
|
with:
|
||||||
node-version: "22"
|
node-version: "22"
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/docs_preview_deploy.yaml
vendored
4
.github/workflows/docs_preview_deploy.yaml
vendored
@@ -62,12 +62,12 @@ jobs:
|
|||||||
extended: true
|
extended: true
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
|
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
|
||||||
with:
|
with:
|
||||||
node-version: "22"
|
node-version: "22"
|
||||||
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
|
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5
|
||||||
with:
|
with:
|
||||||
path: ~/.npm
|
path: ~/.npm
|
||||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||||
|
|||||||
3
.github/workflows/link_checker_workflow.yaml
vendored
3
.github/workflows/link_checker_workflow.yaml
vendored
@@ -25,7 +25,7 @@ jobs:
|
|||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||||
|
|
||||||
- name: Restore lychee cache
|
- name: Restore lychee cache
|
||||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
|
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5
|
||||||
with:
|
with:
|
||||||
path: .lycheecache
|
path: .lycheecache
|
||||||
key: cache-lychee-${{ github.sha }}
|
key: cache-lychee-${{ github.sha }}
|
||||||
@@ -39,6 +39,7 @@ jobs:
|
|||||||
--no-progress
|
--no-progress
|
||||||
--cache
|
--cache
|
||||||
--max-cache-age 1d
|
--max-cache-age 1d
|
||||||
|
--exclude '^neo4j\+.*' --exclude '^bolt://.*'
|
||||||
README.md
|
README.md
|
||||||
docs/
|
docs/
|
||||||
output: /tmp/foo.txt
|
output: /tmp/foo.txt
|
||||||
|
|||||||
4
.github/workflows/lint.yaml
vendored
4
.github/workflows/lint.yaml
vendored
@@ -51,11 +51,11 @@ jobs:
|
|||||||
console.log('Failed to remove label. Another job may have already removed it!');
|
console.log('Failed to remove label. Another job may have already removed it!');
|
||||||
}
|
}
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
|
||||||
with:
|
with:
|
||||||
go-version: "1.25"
|
go-version: "1.25"
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||||
|
|||||||
2
.github/workflows/sync-labels.yaml
vendored
2
.github/workflows/sync-labels.yaml
vendored
@@ -29,7 +29,7 @@ jobs:
|
|||||||
issues: 'write'
|
issues: 'write'
|
||||||
pull-requests: 'write'
|
pull-requests: 'write'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
- uses: micnncim/action-label-syncer@3abd5ab72fda571e69fffd97bd4e0033dd5f495c # v1.3.0
|
- uses: micnncim/action-label-syncer@3abd5ab72fda571e69fffd97bd4e0033dd5f495c # v1.3.0
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
4
.github/workflows/tests.yaml
vendored
4
.github/workflows/tests.yaml
vendored
@@ -57,12 +57,12 @@ jobs:
|
|||||||
}
|
}
|
||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
|
||||||
with:
|
with:
|
||||||
go-version: "1.24"
|
go-version: "1.24"
|
||||||
|
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||||
|
|||||||
@@ -51,6 +51,10 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
|
|||||||
# Add a new version block here before every release
|
# Add a new version block here before every release
|
||||||
# The order of versions in this file is mirrored into the dropdown
|
# The order of versions in this file is mirrored into the dropdown
|
||||||
|
|
||||||
|
[[params.versions]]
|
||||||
|
version = "v0.26.0"
|
||||||
|
url = "https://googleapis.github.io/genai-toolbox/v0.26.0/"
|
||||||
|
|
||||||
[[params.versions]]
|
[[params.versions]]
|
||||||
version = "v0.25.0"
|
version = "v0.25.0"
|
||||||
url = "https://googleapis.github.io/genai-toolbox/v0.25.0/"
|
url = "https://googleapis.github.io/genai-toolbox/v0.25.0/"
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ https://dev.mysql.com/doc/refman/8.4/en/user-names.html
|
|||||||
# npmjs links can occasionally trigger rate limiting during high-frequency CI builds
|
# npmjs links can occasionally trigger rate limiting during high-frequency CI builds
|
||||||
https://www.npmjs.com/package/@toolbox-sdk/core
|
https://www.npmjs.com/package/@toolbox-sdk/core
|
||||||
https://www.npmjs.com/package/@toolbox-sdk/adk
|
https://www.npmjs.com/package/@toolbox-sdk/adk
|
||||||
|
https://www.oceanbase.com/
|
||||||
|
|
||||||
# Ignore social media and blog profiles to reduce external request overhead
|
# Ignore social media and blog profiles to reduce external request overhead
|
||||||
https://medium.com/@mcp_toolbox
|
https://medium.com/@mcp_toolbox
|
||||||
|
|||||||
25
CHANGELOG.md
25
CHANGELOG.md
@@ -1,5 +1,30 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## [0.26.0](https://github.com/googleapis/genai-toolbox/compare/v0.25.0...v0.26.0) (2026-01-22)
|
||||||
|
|
||||||
|
|
||||||
|
### ⚠ BREAKING CHANGES
|
||||||
|
|
||||||
|
* Validate tool naming ([#2305](https://github.com/googleapis/genai-toolbox/issues/2305)) ([5054212](https://github.com/googleapis/genai-toolbox/commit/5054212fa43017207fe83275d27b9fbab96e8ab5))
|
||||||
|
* **tools/cloudgda:** Update description and parameter name for cloudgda tool ([#2288](https://github.com/googleapis/genai-toolbox/issues/2288)) ([6b02591](https://github.com/googleapis/genai-toolbox/commit/6b025917032394a66840488259db8ff2c3063016))
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
* Add new `user-agent-metadata` flag ([#2302](https://github.com/googleapis/genai-toolbox/issues/2302)) ([adc9589](https://github.com/googleapis/genai-toolbox/commit/adc9589766904d9e3cbe0a6399222f8d4bb9d0cc))
|
||||||
|
* Add remaining flag to Toolbox server in MCP registry ([#2272](https://github.com/googleapis/genai-toolbox/issues/2272)) ([5e0999e](https://github.com/googleapis/genai-toolbox/commit/5e0999ebf5cdd9046e96857738254b2e0561b6d2))
|
||||||
|
* **embeddingModel:** Add embedding model to MCP handler ([#2310](https://github.com/googleapis/genai-toolbox/issues/2310)) ([e4f60e5](https://github.com/googleapis/genai-toolbox/commit/e4f60e56335b755ef55b9553d3f40b31858ec8d9))
|
||||||
|
* **sources/bigquery:** Make maximum rows returned from queries configurable ([#2262](https://github.com/googleapis/genai-toolbox/issues/2262)) ([4abf0c3](https://github.com/googleapis/genai-toolbox/commit/4abf0c39e717d53b22cc61efb65e09928c598236))
|
||||||
|
* **prebuilt/cloud-sql:** Add create backup tool for Cloud SQL ([#2141](https://github.com/googleapis/genai-toolbox/issues/2141)) ([8e0fb03](https://github.com/googleapis/genai-toolbox/commit/8e0fb0348315a80f63cb47b3c7204869482448f4))
|
||||||
|
* **prebuilt/cloud-sql:** Add restore backup tool for Cloud SQL ([#2171](https://github.com/googleapis/genai-toolbox/issues/2171)) ([00c3e6d](https://github.com/googleapis/genai-toolbox/commit/00c3e6d8cba54e2ab6cb271c7e6b378895df53e1))
|
||||||
|
* Support combining multiple prebuilt configurations ([#2295](https://github.com/googleapis/genai-toolbox/issues/2295)) ([e535b37](https://github.com/googleapis/genai-toolbox/commit/e535b372ea81864d644a67135a1b07e4e519b4b4))
|
||||||
|
* Support MCP specs version 2025-11-25 ([#2303](https://github.com/googleapis/genai-toolbox/issues/2303)) ([4d23a3b](https://github.com/googleapis/genai-toolbox/commit/4d23a3bbf2797b1f7fe328aeb5789e778121da23))
|
||||||
|
* **tools:** Add `valueFromParam` support to Tool config ([#2333](https://github.com/googleapis/genai-toolbox/issues/2333)) ([15101b1](https://github.com/googleapis/genai-toolbox/commit/15101b1edbe2b85a4a5f9f819c23cf83138f4ee1))
|
||||||
|
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* **tools/cloudhealthcare:** Add check for client authorization before retrieving token string ([#2327](https://github.com/googleapis/genai-toolbox/issues/2327)) ([c25a233](https://github.com/googleapis/genai-toolbox/commit/c25a2330fea2ac382a398842c9e572e4e19bcb08))
|
||||||
|
|
||||||
## [0.25.0](https://github.com/googleapis/genai-toolbox/compare/v0.24.0...v0.25.0) (2026-01-08)
|
## [0.25.0](https://github.com/googleapis/genai-toolbox/compare/v0.24.0...v0.25.0) (2026-01-08)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ Tool type serves as a category or type that a user can assign to a tool.
|
|||||||
|
|
||||||
The following guidelines apply to tool types:
|
The following guidelines apply to tool types:
|
||||||
|
|
||||||
* Should user hyphens over underscores (e.g. `firestore-list-collections` or
|
* Should use hyphens over underscores (e.g. `firestore-list-collections` or
|
||||||
`firestore_list_colelctions`).
|
`firestore_list_colelctions`).
|
||||||
* Should use product name in name (e.g. `firestore-list-collections` over
|
* Should use product name in name (e.g. `firestore-list-collections` over
|
||||||
`list-collections`).
|
`list-collections`).
|
||||||
|
|||||||
18
README.md
18
README.md
@@ -2,6 +2,8 @@
|
|||||||
|
|
||||||
# MCP Toolbox for Databases
|
# MCP Toolbox for Databases
|
||||||
|
|
||||||
|
<a href="https://trendshift.io/repositories/13019" target="_blank"><img src="https://trendshift.io/api/badge/repositories/13019" alt="googleapis%2Fgenai-toolbox | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||||
|
|
||||||
[](https://googleapis.github.io/genai-toolbox/)
|
[](https://googleapis.github.io/genai-toolbox/)
|
||||||
[](https://discord.gg/Dmm69peqjh)
|
[](https://discord.gg/Dmm69peqjh)
|
||||||
[](https://medium.com/@mcp_toolbox)
|
[](https://medium.com/@mcp_toolbox)
|
||||||
@@ -105,7 +107,7 @@ redeploying your application.
|
|||||||
|
|
||||||
## Getting Started
|
## Getting Started
|
||||||
|
|
||||||
### (Non-production) Running Toolbox
|
### Quickstart: Running Toolbox using NPX
|
||||||
|
|
||||||
You can run Toolbox directly with a [configuration file](#configuration):
|
You can run Toolbox directly with a [configuration file](#configuration):
|
||||||
|
|
||||||
@@ -140,7 +142,7 @@ To install Toolbox as a binary:
|
|||||||
>
|
>
|
||||||
> ```sh
|
> ```sh
|
||||||
> # see releases page for other versions
|
> # see releases page for other versions
|
||||||
> export VERSION=0.25.0
|
> export VERSION=0.26.0
|
||||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||||
> chmod +x toolbox
|
> chmod +x toolbox
|
||||||
> ```
|
> ```
|
||||||
@@ -153,7 +155,7 @@ To install Toolbox as a binary:
|
|||||||
>
|
>
|
||||||
> ```sh
|
> ```sh
|
||||||
> # see releases page for other versions
|
> # see releases page for other versions
|
||||||
> export VERSION=0.25.0
|
> export VERSION=0.26.0
|
||||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||||
> chmod +x toolbox
|
> chmod +x toolbox
|
||||||
> ```
|
> ```
|
||||||
@@ -166,7 +168,7 @@ To install Toolbox as a binary:
|
|||||||
>
|
>
|
||||||
> ```sh
|
> ```sh
|
||||||
> # see releases page for other versions
|
> # see releases page for other versions
|
||||||
> export VERSION=0.25.0
|
> export VERSION=0.26.0
|
||||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||||
> chmod +x toolbox
|
> chmod +x toolbox
|
||||||
> ```
|
> ```
|
||||||
@@ -179,7 +181,7 @@ To install Toolbox as a binary:
|
|||||||
>
|
>
|
||||||
> ```cmd
|
> ```cmd
|
||||||
> :: see releases page for other versions
|
> :: see releases page for other versions
|
||||||
> set VERSION=0.25.0
|
> set VERSION=0.26.0
|
||||||
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||||
> ```
|
> ```
|
||||||
>
|
>
|
||||||
@@ -191,7 +193,7 @@ To install Toolbox as a binary:
|
|||||||
>
|
>
|
||||||
> ```powershell
|
> ```powershell
|
||||||
> # see releases page for other versions
|
> # see releases page for other versions
|
||||||
> $VERSION = "0.25.0"
|
> $VERSION = "0.26.0"
|
||||||
> curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
> curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||||
> ```
|
> ```
|
||||||
>
|
>
|
||||||
@@ -204,7 +206,7 @@ You can also install Toolbox as a container:
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# see releases page for other versions
|
# see releases page for other versions
|
||||||
export VERSION=0.25.0
|
export VERSION=0.26.0
|
||||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -228,7 +230,7 @@ To install from source, ensure you have the latest version of
|
|||||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
go install github.com/googleapis/genai-toolbox@v0.25.0
|
go install github.com/googleapis/genai-toolbox@v0.26.0
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|
||||||
|
|||||||
230
cmd/root.go
230
cmd/root.go
@@ -91,6 +91,9 @@ import (
|
|||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicominstances"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicominstances"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomseries"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomseries"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomstudies"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomstudies"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminlistlognames"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminlistresourcetypes"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminquerylogs"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcloneinstance"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcloneinstance"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatebackup"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatebackup"
|
||||||
@@ -99,6 +102,7 @@ import (
|
|||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistdatabases"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistdatabases"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistinstances"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistinstances"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlrestorebackup"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmssql/cloudsqlmssqlcreateinstance"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmssql/cloudsqlmssqlcreateinstance"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmysql/cloudsqlmysqlcreateinstance"
|
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmysql/cloudsqlmysqlcreateinstance"
|
||||||
@@ -243,6 +247,7 @@ import (
|
|||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/clickhouse"
|
_ "github.com/googleapis/genai-toolbox/internal/sources/clickhouse"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudgda"
|
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudgda"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudhealthcare"
|
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudhealthcare"
|
||||||
|
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudloggingadmin"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudmonitoring"
|
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudmonitoring"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin"
|
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin"
|
||||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmssql"
|
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmssql"
|
||||||
@@ -315,15 +320,15 @@ func Execute() {
|
|||||||
type Command struct {
|
type Command struct {
|
||||||
*cobra.Command
|
*cobra.Command
|
||||||
|
|
||||||
cfg server.ServerConfig
|
cfg server.ServerConfig
|
||||||
logger log.Logger
|
logger log.Logger
|
||||||
tools_file string
|
tools_file string
|
||||||
tools_files []string
|
tools_files []string
|
||||||
tools_folder string
|
tools_folder string
|
||||||
prebuiltConfig string
|
prebuiltConfigs []string
|
||||||
inStream io.Reader
|
inStream io.Reader
|
||||||
outStream io.Writer
|
outStream io.Writer
|
||||||
errStream io.Writer
|
errStream io.Writer
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewCommand returns a Command object representing an invocation of the CLI.
|
// NewCommand returns a Command object representing an invocation of the CLI.
|
||||||
@@ -376,16 +381,17 @@ func NewCommand(opts ...Option) *Command {
|
|||||||
flags.StringVar(&cmd.cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
|
flags.StringVar(&cmd.cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
|
||||||
// Fetch prebuilt tools sources to customize the help description
|
// Fetch prebuilt tools sources to customize the help description
|
||||||
prebuiltHelp := fmt.Sprintf(
|
prebuiltHelp := fmt.Sprintf(
|
||||||
"Use a prebuilt tool configuration by source type. Allowed: '%s'.",
|
"Use a prebuilt tool configuration by source type. Allowed: '%s'. Can be specified multiple times.",
|
||||||
strings.Join(prebuiltconfigs.GetPrebuiltSources(), "', '"),
|
strings.Join(prebuiltconfigs.GetPrebuiltSources(), "', '"),
|
||||||
)
|
)
|
||||||
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", prebuiltHelp)
|
flags.StringSliceVar(&cmd.prebuiltConfigs, "prebuilt", []string{}, prebuiltHelp)
|
||||||
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
|
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
|
||||||
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
|
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
|
||||||
flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
|
flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
|
||||||
// TODO: Insecure by default. Might consider updating this for v1.0.0
|
// TODO: Insecure by default. Might consider updating this for v1.0.0
|
||||||
flags.StringSliceVar(&cmd.cfg.AllowedOrigins, "allowed-origins", []string{"*"}, "Specifies a list of origins permitted to access this server. Defaults to '*'.")
|
flags.StringSliceVar(&cmd.cfg.AllowedOrigins, "allowed-origins", []string{"*"}, "Specifies a list of origins permitted to access this server. Defaults to '*'.")
|
||||||
flags.StringSliceVar(&cmd.cfg.AllowedHosts, "allowed-hosts", []string{"*"}, "Specifies a list of hosts permitted to access this server. Defaults to '*'.")
|
flags.StringSliceVar(&cmd.cfg.AllowedHosts, "allowed-hosts", []string{"*"}, "Specifies a list of hosts permitted to access this server. Defaults to '*'.")
|
||||||
|
flags.StringSliceVar(&cmd.cfg.UserAgentMetadata, "user-agent-metadata", []string{}, "Appends additional metadata to the User-Agent.")
|
||||||
|
|
||||||
// wrap RunE command so that we have access to original Command object
|
// wrap RunE command so that we have access to original Command object
|
||||||
cmd.RunE = func(*cobra.Command, []string) error { return run(cmd) }
|
cmd.RunE = func(*cobra.Command, []string) error { return run(cmd) }
|
||||||
@@ -425,101 +431,124 @@ func parseEnv(input string) (string, error) {
|
|||||||
return output, err
|
return output, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func convertToolsFile(ctx context.Context, raw []byte) ([]byte, error) {
|
func convertToolsFile(raw []byte) ([]byte, error) {
|
||||||
var input yaml.MapSlice
|
var input yaml.MapSlice
|
||||||
decoder := yaml.NewDecoder(bytes.NewReader(raw), yaml.UseOrderedMap())
|
decoder := yaml.NewDecoder(bytes.NewReader(raw), yaml.UseOrderedMap())
|
||||||
if err := decoder.Decode(&input); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert raw MapSlice to a helper map for quick lookup
|
|
||||||
// while keeping the values as MapSlices to preserve internal order
|
|
||||||
resourceOrder := []string{}
|
|
||||||
lookup := make(map[string]yaml.MapSlice)
|
|
||||||
for _, item := range input {
|
|
||||||
key, ok := item.Key.(string)
|
|
||||||
if !ok {
|
|
||||||
return nil, fmt.Errorf("unexpected non-string key in input: %v", item.Key)
|
|
||||||
}
|
|
||||||
if slice, ok := item.Value.(yaml.MapSlice); ok {
|
|
||||||
// convert authSources to authServices
|
|
||||||
if key == "authSources" {
|
|
||||||
key = "authServices"
|
|
||||||
}
|
|
||||||
// works even if lookup[key] is nil
|
|
||||||
lookup[key] = append(lookup[key], slice...)
|
|
||||||
// preserving the resource's order of original toolsFile
|
|
||||||
if !slices.Contains(resourceOrder, key) {
|
|
||||||
resourceOrder = append(resourceOrder, key)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// toolsfile is already v2
|
|
||||||
if key == "kind" {
|
|
||||||
return raw, nil
|
|
||||||
}
|
|
||||||
return nil, fmt.Errorf("'%s' is not a map", key)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// convert to tools file v2
|
// convert to tools file v2
|
||||||
var buf bytes.Buffer
|
var buf bytes.Buffer
|
||||||
encoder := yaml.NewEncoder(&buf)
|
encoder := yaml.NewEncoder(&buf)
|
||||||
for _, kind := range resourceOrder {
|
|
||||||
data, exists := lookup[kind]
|
v1keys := []string{"sources", "authSources", "authServices", "embeddingModels", "tools", "toolsets", "prompts"}
|
||||||
if !exists {
|
for {
|
||||||
// if this is skipped for all keys, the tools file is in v2
|
if err := decoder.Decode(&input); err != nil {
|
||||||
continue
|
if err == io.EOF {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
// Transform each entry
|
for _, item := range input {
|
||||||
for _, entry := range data {
|
key, ok := item.Key.(string)
|
||||||
entryName, ok := entry.Key.(string)
|
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, fmt.Errorf("unexpected non-string key for entry in '%s': %v", kind, entry.Key)
|
return nil, fmt.Errorf("unexpected non-string key in input: %v", item.Key)
|
||||||
}
|
}
|
||||||
entryBody := ProcessValue(entry.Value, kind == "toolsets")
|
// check if the key is config file v1's key
|
||||||
|
if slices.Contains(v1keys, key) {
|
||||||
transformed := yaml.MapSlice{
|
// check if value conversion to yaml.MapSlice successfully
|
||||||
{Key: "kind", Value: kind},
|
// fields such as "tools" in toolsets might pass the first check but
|
||||||
{Key: "name", Value: entryName},
|
// fail to convert to MapSlice
|
||||||
}
|
if slice, ok := item.Value.(yaml.MapSlice); ok {
|
||||||
|
// Deprecated: convert authSources to authServices
|
||||||
// Merge the transformed body into our result
|
if key == "authSources" {
|
||||||
if bodySlice, ok := entryBody.(yaml.MapSlice); ok {
|
key = "authServices"
|
||||||
transformed = append(transformed, bodySlice...)
|
}
|
||||||
|
transformed, err := transformDocs(key, slice)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// encode per-doc
|
||||||
|
for _, doc := range transformed {
|
||||||
|
if err := encoder.Encode(doc); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// invalid input will be ignored
|
||||||
|
// we don't want to throw error here since the config could
|
||||||
|
// be valid but with a different order such as:
|
||||||
|
// ---
|
||||||
|
// tools:
|
||||||
|
// - tool_a
|
||||||
|
// kind: toolsets
|
||||||
|
// ---
|
||||||
|
continue
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
return nil, fmt.Errorf("unable to convert entryBody to MapSlice")
|
// this doc is already v2, encode to buf
|
||||||
}
|
if err := encoder.Encode(input); err != nil {
|
||||||
|
return nil, err
|
||||||
if err := encoder.Encode(transformed); err != nil {
|
}
|
||||||
return nil, err
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return buf.Bytes(), nil
|
return buf.Bytes(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// transformDocs transforms the configuration file from v1 format to v2
|
||||||
|
// yaml.MapSlice will preserve the order in a map
|
||||||
|
func transformDocs(kind string, input yaml.MapSlice) ([]yaml.MapSlice, error) {
|
||||||
|
var transformed []yaml.MapSlice
|
||||||
|
for _, entry := range input {
|
||||||
|
entryName, ok := entry.Key.(string)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected non-string key for entry in '%s': %v", kind, entry.Key)
|
||||||
|
}
|
||||||
|
entryBody := ProcessValue(entry.Value, kind == "toolsets")
|
||||||
|
|
||||||
|
currentTransformed := yaml.MapSlice{
|
||||||
|
{Key: "kind", Value: kind},
|
||||||
|
{Key: "name", Value: entryName},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge the transformed body into our result
|
||||||
|
if bodySlice, ok := entryBody.(yaml.MapSlice); ok {
|
||||||
|
currentTransformed = append(currentTransformed, bodySlice...)
|
||||||
|
} else {
|
||||||
|
return nil, fmt.Errorf("unable to convert entryBody to MapSlice")
|
||||||
|
}
|
||||||
|
transformed = append(transformed, currentTransformed)
|
||||||
|
}
|
||||||
|
return transformed, nil
|
||||||
|
}
|
||||||
|
|
||||||
// ProcessValue recursively looks for MapSlices to rename 'kind' -> 'type'
|
// ProcessValue recursively looks for MapSlices to rename 'kind' -> 'type'
|
||||||
func ProcessValue(v any, isToolset bool) any {
|
func ProcessValue(v any, isToolset bool) any {
|
||||||
switch val := v.(type) {
|
switch val := v.(type) {
|
||||||
case yaml.MapSlice:
|
case yaml.MapSlice:
|
||||||
for i := range val {
|
// creating a new MapSlice is safer for recursive transformation
|
||||||
|
newVal := make(yaml.MapSlice, len(val))
|
||||||
|
for i, item := range val {
|
||||||
// Perform renaming
|
// Perform renaming
|
||||||
if val[i].Key == "kind" {
|
if item.Key == "kind" {
|
||||||
val[i].Key = "type"
|
item.Key = "type"
|
||||||
}
|
}
|
||||||
// Recursive call for nested values (e.g., nested objects or lists)
|
// Recursive call for nested values (e.g., nested objects or lists)
|
||||||
val[i].Value = ProcessValue(val[i].Value, false)
|
item.Value = ProcessValue(item.Value, false)
|
||||||
|
newVal[i] = item
|
||||||
}
|
}
|
||||||
return val
|
return newVal
|
||||||
case []any:
|
case []any:
|
||||||
// Process lists: If it's a toolset top-level list, wrap it.
|
// Process lists: If it's a toolset top-level list, wrap it.
|
||||||
if isToolset {
|
if isToolset {
|
||||||
return yaml.MapSlice{{Key: "tools", Value: val}}
|
return yaml.MapSlice{{Key: "tools", Value: val}}
|
||||||
}
|
}
|
||||||
// Otherwise, recurse into list items (to catch nested objects)
|
// Otherwise, recurse into list items (to catch nested objects)
|
||||||
|
newVal := make([]any, len(val))
|
||||||
for i := range val {
|
for i := range val {
|
||||||
val[i] = ProcessValue(val[i], false)
|
newVal[i] = ProcessValue(val[i], false)
|
||||||
}
|
}
|
||||||
return val
|
return newVal
|
||||||
default:
|
default:
|
||||||
return val
|
return val
|
||||||
}
|
}
|
||||||
@@ -535,7 +564,7 @@ func parseToolsFile(ctx context.Context, raw []byte) (ToolsFile, error) {
|
|||||||
}
|
}
|
||||||
raw = []byte(output)
|
raw = []byte(output)
|
||||||
|
|
||||||
raw, err = convertToolsFile(ctx, raw)
|
raw, err = convertToolsFile(raw)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return toolsFile, fmt.Errorf("error converting tools file: %s", err)
|
return toolsFile, fmt.Errorf("error converting tools file: %s", err)
|
||||||
}
|
}
|
||||||
@@ -958,24 +987,29 @@ func run(cmd *Command) error {
|
|||||||
var allToolsFiles []ToolsFile
|
var allToolsFiles []ToolsFile
|
||||||
|
|
||||||
// Load Prebuilt Configuration
|
// Load Prebuilt Configuration
|
||||||
if cmd.prebuiltConfig != "" {
|
|
||||||
buf, err := prebuiltconfigs.Get(cmd.prebuiltConfig)
|
|
||||||
if err != nil {
|
|
||||||
cmd.logger.ErrorContext(ctx, err.Error())
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
logMsg := fmt.Sprint("Using prebuilt tool configuration for ", cmd.prebuiltConfig)
|
|
||||||
cmd.logger.InfoContext(ctx, logMsg)
|
|
||||||
// Append prebuilt.source to Version string for the User Agent
|
|
||||||
cmd.cfg.Version += "+prebuilt." + cmd.prebuiltConfig
|
|
||||||
|
|
||||||
parsed, err := parseToolsFile(ctx, buf)
|
if len(cmd.prebuiltConfigs) > 0 {
|
||||||
if err != nil {
|
slices.Sort(cmd.prebuiltConfigs)
|
||||||
errMsg := fmt.Errorf("unable to parse prebuilt tool configuration: %w", err)
|
sourcesList := strings.Join(cmd.prebuiltConfigs, ", ")
|
||||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
logMsg := fmt.Sprintf("Using prebuilt tool configurations for: %s", sourcesList)
|
||||||
return errMsg
|
cmd.logger.InfoContext(ctx, logMsg)
|
||||||
|
|
||||||
|
for _, configName := range cmd.prebuiltConfigs {
|
||||||
|
buf, err := prebuiltconfigs.Get(configName)
|
||||||
|
if err != nil {
|
||||||
|
cmd.logger.ErrorContext(ctx, err.Error())
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse into ToolsFile struct
|
||||||
|
parsed, err := parseToolsFile(ctx, buf)
|
||||||
|
if err != nil {
|
||||||
|
errMsg := fmt.Errorf("unable to parse prebuilt tool configuration for '%s': %w", configName, err)
|
||||||
|
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||||
|
return errMsg
|
||||||
|
}
|
||||||
|
allToolsFiles = append(allToolsFiles, parsed)
|
||||||
}
|
}
|
||||||
allToolsFiles = append(allToolsFiles, parsed)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine if Custom Files should be loaded
|
// Determine if Custom Files should be loaded
|
||||||
@@ -983,7 +1017,7 @@ func run(cmd *Command) error {
|
|||||||
isCustomConfigured := cmd.tools_file != "" || len(cmd.tools_files) > 0 || cmd.tools_folder != ""
|
isCustomConfigured := cmd.tools_file != "" || len(cmd.tools_files) > 0 || cmd.tools_folder != ""
|
||||||
|
|
||||||
// Determine if default 'tools.yaml' should be used (No prebuilt AND No custom flags)
|
// Determine if default 'tools.yaml' should be used (No prebuilt AND No custom flags)
|
||||||
useDefaultToolsFile := cmd.prebuiltConfig == "" && !isCustomConfigured
|
useDefaultToolsFile := len(cmd.prebuiltConfigs) == 0 && !isCustomConfigured
|
||||||
|
|
||||||
if useDefaultToolsFile {
|
if useDefaultToolsFile {
|
||||||
cmd.tools_file = "tools.yaml"
|
cmd.tools_file = "tools.yaml"
|
||||||
@@ -1033,6 +1067,18 @@ func run(cmd *Command) error {
|
|||||||
allToolsFiles = append(allToolsFiles, customTools)
|
allToolsFiles = append(allToolsFiles, customTools)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Modify version string based on loaded configurations
|
||||||
|
if len(cmd.prebuiltConfigs) > 0 {
|
||||||
|
tag := "prebuilt"
|
||||||
|
if isCustomConfigured {
|
||||||
|
tag = "custom"
|
||||||
|
}
|
||||||
|
// cmd.prebuiltConfigs is already sorted above
|
||||||
|
for _, configName := range cmd.prebuiltConfigs {
|
||||||
|
cmd.cfg.Version += fmt.Sprintf("+%s.%s", tag, configName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Merge Everything
|
// Merge Everything
|
||||||
// This will error if custom tools collide with prebuilt tools
|
// This will error if custom tools collide with prebuilt tools
|
||||||
finalToolsFile, err := mergeToolsFiles(allToolsFiles...)
|
finalToolsFile, err := mergeToolsFiles(allToolsFiles...)
|
||||||
|
|||||||
496
cmd/root_test.go
496
cmd/root_test.go
@@ -23,14 +23,12 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"reflect"
|
|
||||||
"regexp"
|
"regexp"
|
||||||
"runtime"
|
"runtime"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
yaml "github.com/goccy/go-yaml"
|
|
||||||
"github.com/google/go-cmp/cmp"
|
"github.com/google/go-cmp/cmp"
|
||||||
|
|
||||||
"github.com/googleapis/genai-toolbox/internal/auth/google"
|
"github.com/googleapis/genai-toolbox/internal/auth/google"
|
||||||
@@ -72,6 +70,9 @@ func withDefaults(c server.ServerConfig) server.ServerConfig {
|
|||||||
if c.AllowedHosts == nil {
|
if c.AllowedHosts == nil {
|
||||||
c.AllowedHosts = []string{"*"}
|
c.AllowedHosts = []string{"*"}
|
||||||
}
|
}
|
||||||
|
if c.UserAgentMetadata == nil {
|
||||||
|
c.UserAgentMetadata = []string{}
|
||||||
|
}
|
||||||
return c
|
return c
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -232,6 +233,13 @@ func TestServerConfigFlags(t *testing.T) {
|
|||||||
AllowedHosts: []string{"http://foo.com", "http://bar.com"},
|
AllowedHosts: []string{"http://foo.com", "http://bar.com"},
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
desc: "user agent metadata",
|
||||||
|
args: []string{"--user-agent-metadata", "foo,bar"},
|
||||||
|
want: withDefaults(server.ServerConfig{
|
||||||
|
UserAgentMetadata: []string{"foo", "bar"},
|
||||||
|
}),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
for _, tc := range tcs {
|
for _, tc := range tcs {
|
||||||
t.Run(tc.desc, func(t *testing.T) {
|
t.Run(tc.desc, func(t *testing.T) {
|
||||||
@@ -422,17 +430,27 @@ func TestPrebuiltFlag(t *testing.T) {
|
|||||||
tcs := []struct {
|
tcs := []struct {
|
||||||
desc string
|
desc string
|
||||||
args []string
|
args []string
|
||||||
want string
|
want []string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
desc: "default value",
|
desc: "default value",
|
||||||
args: []string{},
|
args: []string{},
|
||||||
want: "",
|
want: []string{},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "custom pre built flag",
|
desc: "single prebuilt flag",
|
||||||
args: []string{"--tools-file", "alloydb"},
|
args: []string{"--prebuilt", "alloydb"},
|
||||||
want: "alloydb",
|
want: []string{"alloydb"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "multiple prebuilt flags",
|
||||||
|
args: []string{"--prebuilt", "alloydb", "--prebuilt", "bigquery"},
|
||||||
|
want: []string{"alloydb", "bigquery"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "comma separated prebuilt flags",
|
||||||
|
args: []string{"--prebuilt", "alloydb,bigquery"},
|
||||||
|
want: []string{"alloydb", "bigquery"},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, tc := range tcs {
|
for _, tc := range tcs {
|
||||||
@@ -441,8 +459,8 @@ func TestPrebuiltFlag(t *testing.T) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("unexpected error invoking command: %s", err)
|
t.Fatalf("unexpected error invoking command: %s", err)
|
||||||
}
|
}
|
||||||
if c.tools_file != tc.want {
|
if diff := cmp.Diff(c.prebuiltConfigs, tc.want); diff != "" {
|
||||||
t.Fatalf("got %v, want %v", c.cfg, tc.want)
|
t.Fatalf("got %v, want %v, diff %s", c.prebuiltConfigs, tc.want, diff)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -497,18 +515,6 @@ func TestDefaultLogLevel(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestConvertToolsFile(t *testing.T) {
|
func TestConvertToolsFile(t *testing.T) {
|
||||||
ctx, cancelCtx := context.WithTimeout(context.Background(), time.Minute)
|
|
||||||
defer cancelCtx()
|
|
||||||
pr, pw := io.Pipe()
|
|
||||||
defer pw.Close()
|
|
||||||
defer pr.Close()
|
|
||||||
|
|
||||||
logger, err := log.NewStdLogger(pw, pw, "DEBUG")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup logger %s", err)
|
|
||||||
}
|
|
||||||
ctx = util.WithLogger(ctx, logger)
|
|
||||||
|
|
||||||
tcs := []struct {
|
tcs := []struct {
|
||||||
desc string
|
desc string
|
||||||
in string
|
in string
|
||||||
@@ -537,8 +543,7 @@ func TestConvertToolsFile(t *testing.T) {
|
|||||||
kind: postgres-sql
|
kind: postgres-sql
|
||||||
source: my-pg-instance
|
source: my-pg-instance
|
||||||
description: some description
|
description: some description
|
||||||
statement: |
|
statement: SELECT * FROM SQL_STATEMENT;
|
||||||
SELECT * FROM SQL_STATEMENT;
|
|
||||||
parameters:
|
parameters:
|
||||||
- name: country
|
- name: country
|
||||||
type: string
|
type: string
|
||||||
@@ -560,8 +565,7 @@ func TestConvertToolsFile(t *testing.T) {
|
|||||||
model: gemini-embedding-001
|
model: gemini-embedding-001
|
||||||
apiKey: some-key
|
apiKey: some-key
|
||||||
dimension: 768`,
|
dimension: 768`,
|
||||||
want: `
|
want: `kind: sources
|
||||||
kind: sources
|
|
||||||
name: my-pg-instance
|
name: my-pg-instance
|
||||||
type: cloud-sql-postgres
|
type: cloud-sql-postgres
|
||||||
project: my-project
|
project: my-project
|
||||||
@@ -581,8 +585,7 @@ name: example_tool
|
|||||||
type: postgres-sql
|
type: postgres-sql
|
||||||
source: my-pg-instance
|
source: my-pg-instance
|
||||||
description: some description
|
description: some description
|
||||||
statement: |
|
statement: SELECT * FROM SQL_STATEMENT;
|
||||||
SELECT * FROM SQL_STATEMENT;
|
|
||||||
parameters:
|
parameters:
|
||||||
- name: country
|
- name: country
|
||||||
type: string
|
type: string
|
||||||
@@ -607,18 +610,18 @@ name: gemini-model
|
|||||||
type: gemini
|
type: gemini
|
||||||
model: gemini-embedding-001
|
model: gemini-embedding-001
|
||||||
apiKey: some-key
|
apiKey: some-key
|
||||||
dimension: 768`,
|
dimension: 768
|
||||||
|
`,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "preserve resource order with grouping",
|
desc: "preserve resource order",
|
||||||
in: `
|
in: `
|
||||||
tools:
|
tools:
|
||||||
example_tool:
|
example_tool:
|
||||||
kind: postgres-sql
|
kind: postgres-sql
|
||||||
source: my-pg-instance
|
source: my-pg-instance
|
||||||
description: some description
|
description: some description
|
||||||
statement: |
|
statement: SELECT * FROM SQL_STATEMENT;
|
||||||
SELECT * FROM SQL_STATEMENT;
|
|
||||||
parameters:
|
parameters:
|
||||||
- name: country
|
- name: country
|
||||||
type: string
|
type: string
|
||||||
@@ -640,17 +643,15 @@ dimension: 768`,
|
|||||||
example_toolset:
|
example_toolset:
|
||||||
- example_tool
|
- example_tool
|
||||||
authSources:
|
authSources:
|
||||||
my-google-auth:
|
my-google-auth2:
|
||||||
kind: google
|
kind: google
|
||||||
clientId: testing-id`,
|
clientId: testing-id`,
|
||||||
want: `
|
want: `kind: tools
|
||||||
kind: tools
|
|
||||||
name: example_tool
|
name: example_tool
|
||||||
type: postgres-sql
|
type: postgres-sql
|
||||||
source: my-pg-instance
|
source: my-pg-instance
|
||||||
description: some description
|
description: some description
|
||||||
statement: |
|
statement: SELECT * FROM SQL_STATEMENT;
|
||||||
SELECT * FROM SQL_STATEMENT;
|
|
||||||
parameters:
|
parameters:
|
||||||
- name: country
|
- name: country
|
||||||
type: string
|
type: string
|
||||||
@@ -671,20 +672,203 @@ name: my-google-auth
|
|||||||
type: google
|
type: google
|
||||||
clientId: testing-id
|
clientId: testing-id
|
||||||
---
|
---
|
||||||
|
kind: toolsets
|
||||||
|
name: example_toolset
|
||||||
|
tools:
|
||||||
|
- example_tool
|
||||||
|
---
|
||||||
|
kind: authServices
|
||||||
|
name: my-google-auth2
|
||||||
|
type: google
|
||||||
|
clientId: testing-id
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "convert combination of v1 and v2",
|
||||||
|
in: `
|
||||||
|
sources:
|
||||||
|
my-pg-instance:
|
||||||
|
kind: cloud-sql-postgres
|
||||||
|
project: my-project
|
||||||
|
region: my-region
|
||||||
|
instance: my-instance
|
||||||
|
database: my_db
|
||||||
|
user: my_user
|
||||||
|
password: my_pass
|
||||||
|
authServices:
|
||||||
|
my-google-auth:
|
||||||
|
kind: google
|
||||||
|
clientId: testing-id
|
||||||
|
tools:
|
||||||
|
example_tool:
|
||||||
|
kind: postgres-sql
|
||||||
|
source: my-pg-instance
|
||||||
|
description: some description
|
||||||
|
statement: SELECT * FROM SQL_STATEMENT;
|
||||||
|
parameters:
|
||||||
|
- name: country
|
||||||
|
type: string
|
||||||
|
description: some description
|
||||||
|
toolsets:
|
||||||
|
example_toolset:
|
||||||
|
- example_tool
|
||||||
|
prompts:
|
||||||
|
code_review:
|
||||||
|
description: ask llm to analyze code quality
|
||||||
|
messages:
|
||||||
|
- content: "please review the following code for quality: {{.code}}"
|
||||||
|
arguments:
|
||||||
|
- name: code
|
||||||
|
description: the code to review
|
||||||
|
embeddingModels:
|
||||||
|
gemini-model:
|
||||||
|
kind: gemini
|
||||||
|
model: gemini-embedding-001
|
||||||
|
apiKey: some-key
|
||||||
|
dimension: 768
|
||||||
|
---
|
||||||
|
kind: sources
|
||||||
|
name: my-pg-instance2
|
||||||
|
type: cloud-sql-postgres
|
||||||
|
project: my-project
|
||||||
|
region: my-region
|
||||||
|
instance: my-instance
|
||||||
|
---
|
||||||
|
kind: authServices
|
||||||
|
name: my-google-auth2
|
||||||
|
type: google
|
||||||
|
clientId: testing-id
|
||||||
|
---
|
||||||
|
kind: tools
|
||||||
|
name: example_tool2
|
||||||
|
type: postgres-sql
|
||||||
|
source: my-pg-instance
|
||||||
|
description: some description
|
||||||
|
statement: SELECT * FROM SQL_STATEMENT;
|
||||||
|
parameters:
|
||||||
|
- name: country
|
||||||
|
type: string
|
||||||
|
description: some description
|
||||||
|
---
|
||||||
|
kind: toolsets
|
||||||
|
name: example_toolset2
|
||||||
|
tools:
|
||||||
|
- example_tool
|
||||||
|
---
|
||||||
|
tools:
|
||||||
|
- example_tool
|
||||||
|
kind: toolsets
|
||||||
|
name: example_toolset3
|
||||||
|
---
|
||||||
|
kind: prompts
|
||||||
|
name: code_review2
|
||||||
|
description: ask llm to analyze code quality
|
||||||
|
messages:
|
||||||
|
- content: "please review the following code for quality: {{.code}}"
|
||||||
|
arguments:
|
||||||
|
- name: code
|
||||||
|
description: the code to review
|
||||||
|
---
|
||||||
|
kind: embeddingModels
|
||||||
|
name: gemini-model2
|
||||||
|
type: gemini`,
|
||||||
|
want: `kind: sources
|
||||||
|
name: my-pg-instance
|
||||||
|
type: cloud-sql-postgres
|
||||||
|
project: my-project
|
||||||
|
region: my-region
|
||||||
|
instance: my-instance
|
||||||
|
database: my_db
|
||||||
|
user: my_user
|
||||||
|
password: my_pass
|
||||||
|
---
|
||||||
kind: authServices
|
kind: authServices
|
||||||
name: my-google-auth
|
name: my-google-auth
|
||||||
type: google
|
type: google
|
||||||
clientId: testing-id
|
clientId: testing-id
|
||||||
---
|
---
|
||||||
|
kind: tools
|
||||||
|
name: example_tool
|
||||||
|
type: postgres-sql
|
||||||
|
source: my-pg-instance
|
||||||
|
description: some description
|
||||||
|
statement: SELECT * FROM SQL_STATEMENT;
|
||||||
|
parameters:
|
||||||
|
- name: country
|
||||||
|
type: string
|
||||||
|
description: some description
|
||||||
|
---
|
||||||
kind: toolsets
|
kind: toolsets
|
||||||
name: example_toolset
|
name: example_toolset
|
||||||
tools:
|
tools:
|
||||||
- example_tool`,
|
- example_tool
|
||||||
|
---
|
||||||
|
kind: prompts
|
||||||
|
name: code_review
|
||||||
|
description: ask llm to analyze code quality
|
||||||
|
messages:
|
||||||
|
- content: "please review the following code for quality: {{.code}}"
|
||||||
|
arguments:
|
||||||
|
- name: code
|
||||||
|
description: the code to review
|
||||||
|
---
|
||||||
|
kind: embeddingModels
|
||||||
|
name: gemini-model
|
||||||
|
type: gemini
|
||||||
|
model: gemini-embedding-001
|
||||||
|
apiKey: some-key
|
||||||
|
dimension: 768
|
||||||
|
---
|
||||||
|
kind: sources
|
||||||
|
name: my-pg-instance2
|
||||||
|
type: cloud-sql-postgres
|
||||||
|
project: my-project
|
||||||
|
region: my-region
|
||||||
|
instance: my-instance
|
||||||
|
---
|
||||||
|
kind: authServices
|
||||||
|
name: my-google-auth2
|
||||||
|
type: google
|
||||||
|
clientId: testing-id
|
||||||
|
---
|
||||||
|
kind: tools
|
||||||
|
name: example_tool2
|
||||||
|
type: postgres-sql
|
||||||
|
source: my-pg-instance
|
||||||
|
description: some description
|
||||||
|
statement: SELECT * FROM SQL_STATEMENT;
|
||||||
|
parameters:
|
||||||
|
- name: country
|
||||||
|
type: string
|
||||||
|
description: some description
|
||||||
|
---
|
||||||
|
kind: toolsets
|
||||||
|
name: example_toolset2
|
||||||
|
tools:
|
||||||
|
- example_tool
|
||||||
|
---
|
||||||
|
tools:
|
||||||
|
- example_tool
|
||||||
|
kind: toolsets
|
||||||
|
name: example_toolset3
|
||||||
|
---
|
||||||
|
kind: prompts
|
||||||
|
name: code_review2
|
||||||
|
description: ask llm to analyze code quality
|
||||||
|
messages:
|
||||||
|
- content: "please review the following code for quality: {{.code}}"
|
||||||
|
arguments:
|
||||||
|
- name: code
|
||||||
|
description: the code to review
|
||||||
|
---
|
||||||
|
kind: embeddingModels
|
||||||
|
name: gemini-model2
|
||||||
|
type: gemini
|
||||||
|
`,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "no convertion needed",
|
desc: "no convertion needed",
|
||||||
in: `
|
in: `kind: sources
|
||||||
kind: sources
|
|
||||||
name: my-pg-instance
|
name: my-pg-instance
|
||||||
type: cloud-sql-postgres
|
type: cloud-sql-postgres
|
||||||
project: my-project
|
project: my-project
|
||||||
@@ -699,8 +883,7 @@ name: example_tool
|
|||||||
type: postgres-sql
|
type: postgres-sql
|
||||||
source: my-pg-instance
|
source: my-pg-instance
|
||||||
description: some description
|
description: some description
|
||||||
statement: |
|
statement: SELECT * FROM SQL_STATEMENT;
|
||||||
SELECT * FROM SQL_STATEMENT;
|
|
||||||
parameters:
|
parameters:
|
||||||
- name: country
|
- name: country
|
||||||
type: string
|
type: string
|
||||||
@@ -710,8 +893,7 @@ kind: toolsets
|
|||||||
name: example_toolset
|
name: example_toolset
|
||||||
tools:
|
tools:
|
||||||
- example_tool`,
|
- example_tool`,
|
||||||
want: `
|
want: `kind: sources
|
||||||
kind: sources
|
|
||||||
name: my-pg-instance
|
name: my-pg-instance
|
||||||
type: cloud-sql-postgres
|
type: cloud-sql-postgres
|
||||||
project: my-project
|
project: my-project
|
||||||
@@ -726,8 +908,7 @@ name: example_tool
|
|||||||
type: postgres-sql
|
type: postgres-sql
|
||||||
source: my-pg-instance
|
source: my-pg-instance
|
||||||
description: some description
|
description: some description
|
||||||
statement: |
|
statement: SELECT * FROM SQL_STATEMENT;
|
||||||
SELECT * FROM SQL_STATEMENT;
|
|
||||||
parameters:
|
parameters:
|
||||||
- name: country
|
- name: country
|
||||||
type: string
|
type: string
|
||||||
@@ -736,69 +917,34 @@ parameters:
|
|||||||
kind: toolsets
|
kind: toolsets
|
||||||
name: example_toolset
|
name: example_toolset
|
||||||
tools:
|
tools:
|
||||||
- example_tool`,
|
- example_tool
|
||||||
|
`,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "invalid source",
|
desc: "invalid source",
|
||||||
in: `sources: invalid`,
|
in: `sources: invalid`,
|
||||||
isErr: true,
|
want: "",
|
||||||
errStr: "'sources' is not a map",
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "invalid toolset",
|
desc: "invalid toolset",
|
||||||
in: `toolsets: invalid`,
|
in: `toolsets: invalid`,
|
||||||
isErr: true,
|
want: "",
|
||||||
errStr: "'toolsets' is not a map",
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, tc := range tcs {
|
for _, tc := range tcs {
|
||||||
t.Run(tc.desc, func(t *testing.T) {
|
t.Run(tc.desc, func(t *testing.T) {
|
||||||
output, err := convertToolsFile(ctx, []byte(tc.in))
|
output, err := convertToolsFile([]byte(tc.in))
|
||||||
if tc.isErr {
|
|
||||||
if err == nil {
|
|
||||||
t.Fatalf("missing error: %s", tc.errStr)
|
|
||||||
}
|
|
||||||
if err.Error() != tc.errStr {
|
|
||||||
t.Fatalf("invalid error string: got %s, want %s", err, tc.errStr)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("unexpected error: %s", err)
|
t.Fatalf("unexpected error: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
var docs1, docs2 []yaml.MapSlice
|
if diff := cmp.Diff(string(output), tc.want); diff != "" {
|
||||||
if docs1, err = decodeToMapSlice(string(output)); err != nil {
|
t.Fatalf("incorrect toolsets parse: diff %v", diff)
|
||||||
t.Fatalf("error decoding output: %s", err)
|
|
||||||
}
|
|
||||||
if docs2, err = decodeToMapSlice(tc.want); err != nil {
|
|
||||||
t.Fatalf("Error decoding want: %s", err)
|
|
||||||
}
|
|
||||||
if !reflect.DeepEqual(docs1, docs2) {
|
|
||||||
t.Fatalf("incorrect output: got %s, want %s", string(output), tc.want)
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func decodeToMapSlice(data string) ([]yaml.MapSlice, error) {
|
|
||||||
// ensures that the order is correct
|
|
||||||
var docs []yaml.MapSlice
|
|
||||||
decoder := yaml.NewDecoder(strings.NewReader(data))
|
|
||||||
for {
|
|
||||||
var doc yaml.MapSlice
|
|
||||||
err := decoder.Decode(&doc)
|
|
||||||
if err == io.EOF {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
docs = append(docs, doc)
|
|
||||||
}
|
|
||||||
return docs, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestParseToolFile(t *testing.T) {
|
func TestParseToolFile(t *testing.T) {
|
||||||
ctx, err := testutils.ContextWithNewLogger()
|
ctx, err := testutils.ContextWithNewLogger()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -869,7 +1015,8 @@ func TestParseToolFile(t *testing.T) {
|
|||||||
ToolNames: []string{"example_tool"},
|
ToolNames: []string{"example_tool"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Prompts: nil,
|
AuthServices: nil,
|
||||||
|
Prompts: nil,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -973,7 +1120,7 @@ func TestParseToolFile(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
Prompts: server.PromptConfigs{
|
Prompts: server.PromptConfigs{
|
||||||
"code_review": custom.Config{
|
"code_review": &custom.Config{
|
||||||
Name: "code_review",
|
Name: "code_review",
|
||||||
Description: "ask llm to analyze code quality",
|
Description: "ask llm to analyze code quality",
|
||||||
Arguments: prompts.Arguments{
|
Arguments: prompts.Arguments{
|
||||||
@@ -991,12 +1138,12 @@ func TestParseToolFile(t *testing.T) {
|
|||||||
in: `
|
in: `
|
||||||
kind: prompts
|
kind: prompts
|
||||||
name: my-prompt
|
name: my-prompt
|
||||||
description: A prompt template for data analysis.
|
description: A prompt template for data analysis.
|
||||||
arguments:
|
arguments:
|
||||||
- name: country
|
- name: country
|
||||||
description: The country to analyze.
|
description: The country to analyze.
|
||||||
messages:
|
messages:
|
||||||
- content: Analyze the data for {{.country}}.
|
- content: Analyze the data for {{.country}}.
|
||||||
`,
|
`,
|
||||||
wantToolsFile: ToolsFile{
|
wantToolsFile: ToolsFile{
|
||||||
Sources: nil,
|
Sources: nil,
|
||||||
@@ -1066,17 +1213,17 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
|||||||
database: my_db
|
database: my_db
|
||||||
user: my_user
|
user: my_user
|
||||||
password: my_pass
|
password: my_pass
|
||||||
---
|
---
|
||||||
kind: authServices
|
kind: authServices
|
||||||
name: my-google-service
|
name: my-google-service
|
||||||
type: google
|
type: google
|
||||||
clientId: my-client-id
|
clientId: my-client-id
|
||||||
---
|
---
|
||||||
kind: authServices
|
kind: authServices
|
||||||
name: other-google-service
|
name: other-google-service
|
||||||
type: google
|
type: google
|
||||||
clientId: other-client-id
|
clientId: other-client-id
|
||||||
---
|
---
|
||||||
kind: tools
|
kind: tools
|
||||||
name: example_tool
|
name: example_tool
|
||||||
type: postgres-sql
|
type: postgres-sql
|
||||||
@@ -1102,7 +1249,7 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
|||||||
field: email
|
field: email
|
||||||
- name: other-google-service
|
- name: other-google-service
|
||||||
field: other_email
|
field: other_email
|
||||||
---
|
---
|
||||||
kind: toolsets
|
kind: toolsets
|
||||||
name: example_toolset
|
name: example_toolset
|
||||||
tools:
|
tools:
|
||||||
@@ -1270,17 +1417,17 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
|||||||
database: my_db
|
database: my_db
|
||||||
user: my_user
|
user: my_user
|
||||||
password: my_pass
|
password: my_pass
|
||||||
---
|
---
|
||||||
kind: authServices
|
kind: authServices
|
||||||
name: my-google-service
|
name: my-google-service
|
||||||
type: google
|
type: google
|
||||||
clientId: my-client-id
|
clientId: my-client-id
|
||||||
---
|
---
|
||||||
kind: authServices
|
kind: authServices
|
||||||
name: other-google-service
|
name: other-google-service
|
||||||
type: google
|
type: google
|
||||||
clientId: other-client-id
|
clientId: other-client-id
|
||||||
---
|
---
|
||||||
kind: tools
|
kind: tools
|
||||||
name: example_tool
|
name: example_tool
|
||||||
type: postgres-sql
|
type: postgres-sql
|
||||||
@@ -1308,7 +1455,7 @@ func TestParseToolFileWithAuth(t *testing.T) {
|
|||||||
field: email
|
field: email
|
||||||
- name: other-google-service
|
- name: other-google-service
|
||||||
field: other_email
|
field: other_email
|
||||||
---
|
---
|
||||||
kind: toolsets
|
kind: toolsets
|
||||||
name: example_toolset
|
name: example_toolset
|
||||||
tools:
|
tools:
|
||||||
@@ -1567,17 +1714,17 @@ func TestEnvVarReplacement(t *testing.T) {
|
|||||||
Authorization: ${TestHeader}
|
Authorization: ${TestHeader}
|
||||||
queryParams:
|
queryParams:
|
||||||
api-key: ${API_KEY}
|
api-key: ${API_KEY}
|
||||||
---
|
---
|
||||||
kind: authServices
|
kind: authServices
|
||||||
name: my-google-service
|
name: my-google-service
|
||||||
type: google
|
type: google
|
||||||
clientId: ${clientId}
|
clientId: ${clientId}
|
||||||
---
|
---
|
||||||
kind: authServices
|
kind: authServices
|
||||||
name: other-google-service
|
name: other-google-service
|
||||||
type: google
|
type: google
|
||||||
clientId: ${clientId2}
|
clientId: ${clientId2}
|
||||||
---
|
---
|
||||||
kind: tools
|
kind: tools
|
||||||
name: example_tool
|
name: example_tool
|
||||||
type: http
|
type: http
|
||||||
@@ -1618,12 +1765,12 @@ func TestEnvVarReplacement(t *testing.T) {
|
|||||||
- name: Language
|
- name: Language
|
||||||
type: string
|
type: string
|
||||||
description: language string
|
description: language string
|
||||||
---
|
---
|
||||||
kind: toolsets
|
kind: toolsets
|
||||||
name: ${toolset_name}
|
name: ${toolset_name}
|
||||||
tools:
|
tools:
|
||||||
- example_tool
|
- example_tool
|
||||||
---
|
---
|
||||||
kind: prompts
|
kind: prompts
|
||||||
name: ${prompt_name}
|
name: ${prompt_name}
|
||||||
description: A test prompt for {{.name}}.
|
description: A test prompt for {{.name}}.
|
||||||
@@ -2066,7 +2213,7 @@ func TestPrebuiltTools(t *testing.T) {
|
|||||||
wantToolset: server.ToolsetConfigs{
|
wantToolset: server.ToolsetConfigs{
|
||||||
"cloud_sql_postgres_admin_tools": tools.ToolsetConfig{
|
"cloud_sql_postgres_admin_tools": tools.ToolsetConfig{
|
||||||
Name: "cloud_sql_postgres_admin_tools",
|
Name: "cloud_sql_postgres_admin_tools",
|
||||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck", "clone_instance", "create_backup"},
|
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck", "clone_instance", "create_backup", "restore_backup"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -2076,7 +2223,7 @@ func TestPrebuiltTools(t *testing.T) {
|
|||||||
wantToolset: server.ToolsetConfigs{
|
wantToolset: server.ToolsetConfigs{
|
||||||
"cloud_sql_mysql_admin_tools": tools.ToolsetConfig{
|
"cloud_sql_mysql_admin_tools": tools.ToolsetConfig{
|
||||||
Name: "cloud_sql_mysql_admin_tools",
|
Name: "cloud_sql_mysql_admin_tools",
|
||||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup"},
|
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup", "restore_backup"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -2086,7 +2233,7 @@ func TestPrebuiltTools(t *testing.T) {
|
|||||||
wantToolset: server.ToolsetConfigs{
|
wantToolset: server.ToolsetConfigs{
|
||||||
"cloud_sql_mssql_admin_tools": tools.ToolsetConfig{
|
"cloud_sql_mssql_admin_tools": tools.ToolsetConfig{
|
||||||
Name: "cloud_sql_mssql_admin_tools",
|
Name: "cloud_sql_mssql_admin_tools",
|
||||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup"},
|
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup", "restore_backup"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -2601,6 +2748,7 @@ description: "Dummy"
|
|||||||
---
|
---
|
||||||
kind: toolsets
|
kind: toolsets
|
||||||
name: sqlite_database_tools
|
name: sqlite_database_tools
|
||||||
|
tools:
|
||||||
- dummy_tool
|
- dummy_tool
|
||||||
`
|
`
|
||||||
toolsetConflictFile := filepath.Join(t.TempDir(), "toolset_conflict.yaml")
|
toolsetConflictFile := filepath.Join(t.TempDir(), "toolset_conflict.yaml")
|
||||||
@@ -2641,6 +2789,12 @@ authSources:
|
|||||||
return nil
|
return nil
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
desc: "sqlite called twice error",
|
||||||
|
args: []string{"--prebuilt", "sqlite", "--prebuilt", "sqlite"},
|
||||||
|
wantErr: true,
|
||||||
|
errString: "resource conflicts detected",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
desc: "tool conflict error",
|
desc: "tool conflict error",
|
||||||
args: []string{"--prebuilt", "sqlite", "--tools-file", toolConflictFile},
|
args: []string{"--prebuilt", "sqlite", "--tools-file", toolConflictFile},
|
||||||
@@ -2749,3 +2903,115 @@ func TestDefaultToolsFileBehavior(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestParameterReferenceValidation(t *testing.T) {
|
||||||
|
ctx, err := testutils.ContextWithNewLogger()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Base template
|
||||||
|
baseYaml := `
|
||||||
|
sources:
|
||||||
|
dummy-source:
|
||||||
|
kind: http
|
||||||
|
baseUrl: http://example.com
|
||||||
|
tools:
|
||||||
|
test-tool:
|
||||||
|
kind: postgres-sql
|
||||||
|
source: dummy-source
|
||||||
|
description: test tool
|
||||||
|
statement: SELECT 1;
|
||||||
|
parameters:
|
||||||
|
%s`
|
||||||
|
|
||||||
|
tcs := []struct {
|
||||||
|
desc string
|
||||||
|
params string
|
||||||
|
wantErr bool
|
||||||
|
errSubstr string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
desc: "valid backward reference",
|
||||||
|
params: `
|
||||||
|
- name: source_param
|
||||||
|
type: string
|
||||||
|
description: source
|
||||||
|
- name: copy_param
|
||||||
|
type: string
|
||||||
|
description: copy
|
||||||
|
valueFromParam: source_param`,
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "valid forward reference (out of order)",
|
||||||
|
params: `
|
||||||
|
- name: copy_param
|
||||||
|
type: string
|
||||||
|
description: copy
|
||||||
|
valueFromParam: source_param
|
||||||
|
- name: source_param
|
||||||
|
type: string
|
||||||
|
description: source`,
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "invalid missing reference",
|
||||||
|
params: `
|
||||||
|
- name: copy_param
|
||||||
|
type: string
|
||||||
|
description: copy
|
||||||
|
valueFromParam: non_existent_param`,
|
||||||
|
wantErr: true,
|
||||||
|
errSubstr: "references '\"non_existent_param\"' in the 'valueFromParam' field",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "invalid self reference",
|
||||||
|
params: `
|
||||||
|
- name: myself
|
||||||
|
type: string
|
||||||
|
description: self
|
||||||
|
valueFromParam: myself`,
|
||||||
|
wantErr: true,
|
||||||
|
errSubstr: "parameter \"myself\" cannot copy value from itself",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "multiple valid references",
|
||||||
|
params: `
|
||||||
|
- name: a
|
||||||
|
type: string
|
||||||
|
description: a
|
||||||
|
- name: b
|
||||||
|
type: string
|
||||||
|
description: b
|
||||||
|
valueFromParam: a
|
||||||
|
- name: c
|
||||||
|
type: string
|
||||||
|
description: c
|
||||||
|
valueFromParam: a`,
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range tcs {
|
||||||
|
t.Run(tc.desc, func(t *testing.T) {
|
||||||
|
// Indent parameters to match YAML structure
|
||||||
|
yamlContent := fmt.Sprintf(baseYaml, tc.params)
|
||||||
|
|
||||||
|
_, err := parseToolsFile(ctx, []byte(yamlContent))
|
||||||
|
|
||||||
|
if tc.wantErr {
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("expected error, got nil")
|
||||||
|
}
|
||||||
|
if !strings.Contains(err.Error(), tc.errSubstr) {
|
||||||
|
t.Errorf("error %q does not contain expected substring %q", err.Error(), tc.errSubstr)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
0.25.0
|
0.26.0
|
||||||
|
|||||||
@@ -234,7 +234,7 @@
|
|||||||
},
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"version = \"0.25.0\" # x-release-please-version\n",
|
"version = \"0.26.0\" # x-release-please-version\n",
|
||||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# Make the binary executable\n",
|
"# Make the binary executable\n",
|
||||||
@@ -520,8 +520,7 @@
|
|||||||
},
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"! pip install toolbox-core --quiet\n",
|
"! pip install google-adk[toolbox] --quiet"
|
||||||
"! pip install google-adk --quiet"
|
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -536,14 +535,18 @@
|
|||||||
"from google.adk.runners import Runner\n",
|
"from google.adk.runners import Runner\n",
|
||||||
"from google.adk.sessions import InMemorySessionService\n",
|
"from google.adk.sessions import InMemorySessionService\n",
|
||||||
"from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService\n",
|
"from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService\n",
|
||||||
|
"from google.adk.tools.toolbox_toolset import ToolboxToolset\n",
|
||||||
"from google.genai import types\n",
|
"from google.genai import types\n",
|
||||||
"from toolbox_core import ToolboxSyncClient\n",
|
|
||||||
"\n",
|
"\n",
|
||||||
"import os\n",
|
"import os\n",
|
||||||
"# TODO(developer): replace this with your Google API key\n",
|
"# TODO(developer): replace this with your Google API key\n",
|
||||||
"os.environ['GOOGLE_API_KEY'] = \"<GOOGLE_API_KEY>\"\n",
|
"os.environ['GOOGLE_API_KEY'] = \"<GOOGLE_API_KEY>\"\n",
|
||||||
"\n",
|
"\n",
|
||||||
"toolbox_client = ToolboxSyncClient(\"http://127.0.0.1:5000\")\n",
|
"# Configure toolset\n",
|
||||||
|
"toolset = ToolboxToolset(\n",
|
||||||
|
" server_url=\"http://127.0.0.1:5000\",\n",
|
||||||
|
" toolset_name=\"my-toolset\"\n",
|
||||||
|
")\n",
|
||||||
"\n",
|
"\n",
|
||||||
"prompt = \"\"\"\n",
|
"prompt = \"\"\"\n",
|
||||||
" You're a helpful hotel assistant. You handle hotel searching, booking and\n",
|
" You're a helpful hotel assistant. You handle hotel searching, booking and\n",
|
||||||
@@ -560,7 +563,7 @@
|
|||||||
" name='hotel_agent',\n",
|
" name='hotel_agent',\n",
|
||||||
" description='A helpful AI assistant.',\n",
|
" description='A helpful AI assistant.',\n",
|
||||||
" instruction=prompt,\n",
|
" instruction=prompt,\n",
|
||||||
" tools=toolbox_client.load_toolset(\"my-toolset\"),\n",
|
" tools=[toolset],\n",
|
||||||
")\n",
|
")\n",
|
||||||
"\n",
|
"\n",
|
||||||
"session_service = InMemorySessionService()\n",
|
"session_service = InMemorySessionService()\n",
|
||||||
|
|||||||
@@ -16,6 +16,12 @@ Databases” as its initial development predated MCP, but was renamed to align
|
|||||||
with recently added MCP compatibility.
|
with recently added MCP compatibility.
|
||||||
{{< /notice >}}
|
{{< /notice >}}
|
||||||
|
|
||||||
|
{{< notice note >}}
|
||||||
|
This document has been updated to support the configuration file v2 format. To
|
||||||
|
view documentation with configuration file v1 format, please navigate to the
|
||||||
|
top-right menu and select versions v0.26.0 or older.
|
||||||
|
{{< /notice >}}
|
||||||
|
|
||||||
## Why Toolbox?
|
## Why Toolbox?
|
||||||
|
|
||||||
Toolbox helps you build Gen AI tools that let your agents access data in your
|
Toolbox helps you build Gen AI tools that let your agents access data in your
|
||||||
@@ -71,7 +77,7 @@ redeploying your application.
|
|||||||
|
|
||||||
## Getting Started
|
## Getting Started
|
||||||
|
|
||||||
### (Non-production) Running Toolbox
|
### Quickstart: Running Toolbox using NPX
|
||||||
|
|
||||||
You can run Toolbox directly with a [configuration file](../configure.md):
|
You can run Toolbox directly with a [configuration file](../configure.md):
|
||||||
|
|
||||||
@@ -103,7 +109,7 @@ To install Toolbox as a binary on Linux (AMD64):
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# see releases page for other versions
|
# see releases page for other versions
|
||||||
export VERSION=0.25.0
|
export VERSION=0.26.0
|
||||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||||
chmod +x toolbox
|
chmod +x toolbox
|
||||||
```
|
```
|
||||||
@@ -114,7 +120,7 @@ To install Toolbox as a binary on macOS (Apple Silicon):
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# see releases page for other versions
|
# see releases page for other versions
|
||||||
export VERSION=0.25.0
|
export VERSION=0.26.0
|
||||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||||
chmod +x toolbox
|
chmod +x toolbox
|
||||||
```
|
```
|
||||||
@@ -125,7 +131,7 @@ To install Toolbox as a binary on macOS (Intel):
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# see releases page for other versions
|
# see releases page for other versions
|
||||||
export VERSION=0.25.0
|
export VERSION=0.26.0
|
||||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||||
chmod +x toolbox
|
chmod +x toolbox
|
||||||
```
|
```
|
||||||
@@ -136,7 +142,7 @@ To install Toolbox as a binary on Windows (Command Prompt):
|
|||||||
|
|
||||||
```cmd
|
```cmd
|
||||||
:: see releases page for other versions
|
:: see releases page for other versions
|
||||||
set VERSION=0.25.0
|
set VERSION=0.26.0
|
||||||
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -146,7 +152,7 @@ To install Toolbox as a binary on Windows (PowerShell):
|
|||||||
|
|
||||||
```powershell
|
```powershell
|
||||||
# see releases page for other versions
|
# see releases page for other versions
|
||||||
$VERSION = "0.25.0"
|
$VERSION = "0.26.0"
|
||||||
curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -158,7 +164,7 @@ You can also install Toolbox as a container:
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# see releases page for other versions
|
# see releases page for other versions
|
||||||
export VERSION=0.25.0
|
export VERSION=0.26.0
|
||||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -177,7 +183,7 @@ To install from source, ensure you have the latest version of
|
|||||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
go install github.com/googleapis/genai-toolbox@v0.25.0
|
go install github.com/googleapis/genai-toolbox@v0.26.0
|
||||||
```
|
```
|
||||||
|
|
||||||
{{% /tab %}}
|
{{% /tab %}}
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ runtime](https://research.google.com/colaboratory/local-runtimes.html).
|
|||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="ADK" lang="bash" >}}
|
{{< tab header="ADK" lang="bash" >}}
|
||||||
|
|
||||||
pip install toolbox-core
|
pip install google-adk[toolbox]
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< tab header="Langchain" lang="bash" >}}
|
{{< tab header="Langchain" lang="bash" >}}
|
||||||
|
|
||||||
@@ -73,7 +73,7 @@ pip install toolbox-core
|
|||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="ADK" lang="bash" >}}
|
{{< tab header="ADK" lang="bash" >}}
|
||||||
|
|
||||||
pip install google-adk
|
# No other dependencies required for ADK
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< tab header="Langchain" lang="bash" >}}
|
{{< tab header="Langchain" lang="bash" >}}
|
||||||
|
|
||||||
|
|||||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
```bash
|
```bash
|
||||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|
||||||
|
|||||||
@@ -24,12 +24,13 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@dabh/diagnostics": {
|
"node_modules/@dabh/diagnostics": {
|
||||||
"version": "2.0.3",
|
"version": "2.0.8",
|
||||||
"resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.8.tgz",
|
||||||
"integrity": "sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==",
|
"integrity": "sha512-R4MSXTVnuMzGD7bzHdW2ZhhdPC/igELENcq5IjEverBvq5hn1SXCWcsi6eSsdWP0/Ur+SItRRjAktmdoX/8R/Q==",
|
||||||
|
"license": "MIT",
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"colorspace": "1.1.x",
|
"@so-ric/colorspace": "^1.1.6",
|
||||||
"enabled": "2.0.x",
|
"enabled": "2.0.x",
|
||||||
"kuler": "^2.0.0"
|
"kuler": "^2.0.0"
|
||||||
}
|
}
|
||||||
@@ -578,9 +579,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@google-cloud/firestore": {
|
"node_modules/@google-cloud/firestore": {
|
||||||
"version": "7.11.3",
|
"version": "7.11.6",
|
||||||
"resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.11.3.tgz",
|
"resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.11.6.tgz",
|
||||||
"integrity": "sha512-qsM3/WHpawF07SRVvEJJVRwhYzM7o9qtuksyuqnrMig6fxIrwWnsezECWsG/D5TyYru51Fv5c/RTqNDQ2yU+4w==",
|
"integrity": "sha512-EW/O8ktzwLfyWBOsNuhRoMi8lrC3clHM5LVFhGvO1HCsLozCOOXRAlHrYBoE6HL42Sc8yYMuCb2XqcnJ4OOEpw==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
@@ -2887,6 +2889,17 @@
|
|||||||
"resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz",
|
||||||
"integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="
|
"integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="
|
||||||
},
|
},
|
||||||
|
"node_modules/@so-ric/colorspace": {
|
||||||
|
"version": "1.1.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/@so-ric/colorspace/-/colorspace-1.1.6.tgz",
|
||||||
|
"integrity": "sha512-/KiKkpHNOBgkFJwu9sh48LkHSMYGyuTcSFK/qMBdnOAlrRJzRSXAOFB5qwzaVQuDl8wAvHVMkaASQDReTahxuw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"dependencies": {
|
||||||
|
"color": "^5.0.2",
|
||||||
|
"text-hex": "1.0.x"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@toolbox-sdk/core": {
|
"node_modules/@toolbox-sdk/core": {
|
||||||
"version": "0.1.2",
|
"version": "0.1.2",
|
||||||
"resolved": "https://registry.npmjs.org/@toolbox-sdk/core/-/core-0.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/@toolbox-sdk/core/-/core-0.1.2.tgz",
|
||||||
@@ -3515,38 +3528,53 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/color": {
|
"node_modules/color": {
|
||||||
"version": "3.2.1",
|
"version": "5.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/color/-/color-3.2.1.tgz",
|
"resolved": "https://registry.npmjs.org/color/-/color-5.0.3.tgz",
|
||||||
"integrity": "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==",
|
"integrity": "sha512-ezmVcLR3xAVp8kYOm4GS45ZLLgIE6SPAFoduLr6hTDajwb3KZ2F46gulK3XpcwRFb5KKGCSezCBAY4Dw4HsyXA==",
|
||||||
|
"license": "MIT",
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"color-convert": "^1.9.3",
|
"color-convert": "^3.1.3",
|
||||||
"color-string": "^1.6.0"
|
"color-string": "^2.1.3"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/color-convert": {
|
"node_modules/color-convert": {
|
||||||
"version": "1.9.3",
|
"version": "3.1.3",
|
||||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
|
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-3.1.3.tgz",
|
||||||
"integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
|
"integrity": "sha512-fasDH2ont2GqF5HpyO4w0+BcewlhHEZOFn9c1ckZdHpJ56Qb7MHhH/IcJZbBGgvdtwdwNbLvxiBEdg336iA9Sg==",
|
||||||
|
"license": "MIT",
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"color-name": "1.1.3"
|
"color-name": "^2.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14.6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/color-name": {
|
"node_modules/color-name": {
|
||||||
"version": "1.1.3",
|
"version": "2.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
|
"resolved": "https://registry.npmjs.org/color-name/-/color-name-2.1.0.tgz",
|
||||||
"integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==",
|
"integrity": "sha512-1bPaDNFm0axzE4MEAzKPuqKWeRaT43U/hyxKPBdqTfmPF+d6n7FSoTFxLVULUJOmiLp01KjhIPPH+HrXZJN4Rg==",
|
||||||
"optional": true
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12.20"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"node_modules/color-string": {
|
"node_modules/color-string": {
|
||||||
"version": "1.9.1",
|
"version": "2.1.4",
|
||||||
"resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz",
|
"resolved": "https://registry.npmjs.org/color-string/-/color-string-2.1.4.tgz",
|
||||||
"integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==",
|
"integrity": "sha512-Bb6Cq8oq0IjDOe8wJmi4JeNn763Xs9cfrBcaylK1tPypWzyoy2G3l90v9k64kjphl/ZJjPIShFztenRomi8WTg==",
|
||||||
|
"license": "MIT",
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"color-name": "^1.0.0",
|
"color-name": "^2.0.0"
|
||||||
"simple-swizzle": "^0.2.2"
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/colorette": {
|
"node_modules/colorette": {
|
||||||
@@ -3554,16 +3582,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz",
|
"resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz",
|
||||||
"integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w=="
|
"integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w=="
|
||||||
},
|
},
|
||||||
"node_modules/colorspace": {
|
|
||||||
"version": "1.1.4",
|
|
||||||
"resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.4.tgz",
|
|
||||||
"integrity": "sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w==",
|
|
||||||
"optional": true,
|
|
||||||
"dependencies": {
|
|
||||||
"color": "^3.1.3",
|
|
||||||
"text-hex": "1.0.x"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/combined-stream": {
|
"node_modules/combined-stream": {
|
||||||
"version": "1.0.8",
|
"version": "1.0.8",
|
||||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||||
@@ -4968,12 +4986,6 @@
|
|||||||
"node": ">= 0.10"
|
"node": ">= 0.10"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/is-arrayish": {
|
|
||||||
"version": "0.3.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz",
|
|
||||||
"integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==",
|
|
||||||
"optional": true
|
|
||||||
},
|
|
||||||
"node_modules/is-core-module": {
|
"node_modules/is-core-module": {
|
||||||
"version": "2.16.1",
|
"version": "2.16.1",
|
||||||
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz",
|
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz",
|
||||||
@@ -5114,13 +5126,14 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/jsonwebtoken/node_modules/jws": {
|
"node_modules/jsonwebtoken/node_modules/jws": {
|
||||||
"version": "3.2.2",
|
"version": "3.2.3",
|
||||||
"resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz",
|
"resolved": "https://registry.npmjs.org/jws/-/jws-3.2.3.tgz",
|
||||||
"integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==",
|
"integrity": "sha512-byiJ0FLRdLdSVSReO/U4E7RoEyOCKnEnEPMjq3HxWtvzLsV08/i5RQKsFVNkCldrCaPr2vDNAOMsfs8T/Hze7g==",
|
||||||
|
"license": "MIT",
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"jwa": "^1.4.1",
|
"jwa": "^1.4.2",
|
||||||
"safe-buffer": "^5.0.1"
|
"safe-buffer": "^5.0.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -5153,11 +5166,12 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/jws": {
|
"node_modules/jws": {
|
||||||
"version": "4.0.0",
|
"version": "4.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
|
||||||
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
|
||||||
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"jwa": "^2.0.0",
|
"jwa": "^2.0.1",
|
||||||
"safe-buffer": "^5.0.1"
|
"safe-buffer": "^5.0.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -5424,9 +5438,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/node-forge": {
|
"node_modules/node-forge": {
|
||||||
"version": "1.3.1",
|
"version": "1.3.3",
|
||||||
"resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz",
|
"resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.3.tgz",
|
||||||
"integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==",
|
"integrity": "sha512-rLvcdSyRCyouf6jcOIPe/BgwG/d7hKjzMKOas33/pHEr6gbq18IK9zV7DiPvzsz0oBJPme6qr6H6kGZuI9/DZg==",
|
||||||
|
"license": "(BSD-3-Clause OR GPL-2.0)",
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
@@ -6038,15 +6053,6 @@
|
|||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/simple-swizzle": {
|
|
||||||
"version": "0.2.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz",
|
|
||||||
"integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==",
|
|
||||||
"optional": true,
|
|
||||||
"dependencies": {
|
|
||||||
"is-arrayish": "^0.3.1"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/source-map": {
|
"node_modules/source-map": {
|
||||||
"version": "0.6.1",
|
"version": "0.6.1",
|
||||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||||
@@ -6233,6 +6239,7 @@
|
|||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz",
|
||||||
"integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==",
|
"integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==",
|
||||||
|
"license": "MIT",
|
||||||
"optional": true
|
"optional": true
|
||||||
},
|
},
|
||||||
"node_modules/thriftrw": {
|
"node_modules/thriftrw": {
|
||||||
@@ -6416,13 +6423,14 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/winston": {
|
"node_modules/winston": {
|
||||||
"version": "3.17.0",
|
"version": "3.19.0",
|
||||||
"resolved": "https://registry.npmjs.org/winston/-/winston-3.17.0.tgz",
|
"resolved": "https://registry.npmjs.org/winston/-/winston-3.19.0.tgz",
|
||||||
"integrity": "sha512-DLiFIXYC5fMPxaRg832S6F5mJYvePtmO5G9v9IgUFPhXm9/GkXarH/TUrBAVzhTCzAj9anE/+GjrgXp/54nOgw==",
|
"integrity": "sha512-LZNJgPzfKR+/J3cHkxcpHKpKKvGfDZVPS4hfJCc4cCG0CgYzvlD6yE/S3CIL/Yt91ak327YCpiF/0MyeZHEHKA==",
|
||||||
|
"license": "MIT",
|
||||||
"optional": true,
|
"optional": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@colors/colors": "^1.6.0",
|
"@colors/colors": "^1.6.0",
|
||||||
"@dabh/diagnostics": "^2.0.2",
|
"@dabh/diagnostics": "^2.0.8",
|
||||||
"async": "^3.2.3",
|
"async": "^3.2.3",
|
||||||
"is-stream": "^2.0.0",
|
"is-stream": "^2.0.0",
|
||||||
"logform": "^2.7.0",
|
"logform": "^2.7.0",
|
||||||
|
|||||||
@@ -975,9 +975,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/lodash": {
|
"node_modules/lodash": {
|
||||||
"version": "4.17.21",
|
"version": "4.17.23",
|
||||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz",
|
||||||
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
|
"integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==",
|
||||||
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/magic-bytes.js": {
|
"node_modules/magic-bytes.js": {
|
||||||
"version": "1.12.1",
|
"version": "1.12.1",
|
||||||
|
|||||||
@@ -1,15 +1,17 @@
|
|||||||
from google.adk import Agent
|
from google.adk import Agent
|
||||||
from google.adk.apps import App
|
from google.adk.apps import App
|
||||||
from toolbox_core import ToolboxSyncClient
|
from google.adk.tools.toolbox_toolset import ToolboxToolset
|
||||||
|
|
||||||
# TODO(developer): update the TOOLBOX_URL to your toolbox endpoint
|
# TODO(developer): update the TOOLBOX_URL to your toolbox endpoint
|
||||||
client = ToolboxSyncClient("http://127.0.0.1:5000")
|
toolset = ToolboxToolset(
|
||||||
|
server_url="http://127.0.0.1:5000",
|
||||||
|
)
|
||||||
|
|
||||||
root_agent = Agent(
|
root_agent = Agent(
|
||||||
name='root_agent',
|
name='root_agent',
|
||||||
model='gemini-2.5-flash',
|
model='gemini-2.5-flash',
|
||||||
instruction="You are a helpful AI assistant designed to provide accurate and useful information.",
|
instruction="You are a helpful AI assistant designed to provide accurate and useful information.",
|
||||||
tools=client.load_toolset(),
|
tools=[toolset],
|
||||||
)
|
)
|
||||||
|
|
||||||
app = App(root_agent=root_agent, name="my_agent")
|
app = App(root_agent=root_agent, name="my_agent")
|
||||||
|
|||||||
@@ -1,3 +1,2 @@
|
|||||||
google-adk==1.21.0
|
google-adk[toolbox]==1.23.0
|
||||||
toolbox-core==0.5.4
|
|
||||||
pytest==9.0.2
|
pytest==9.0.2
|
||||||
@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
```bash
|
```bash
|
||||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|
||||||
|
|||||||
@@ -54,6 +54,7 @@ instance, database and users:
|
|||||||
* `create_instance`
|
* `create_instance`
|
||||||
* `create_user`
|
* `create_user`
|
||||||
* `clone_instance`
|
* `clone_instance`
|
||||||
|
* `restore_backup`
|
||||||
|
|
||||||
## Install MCP Toolbox
|
## Install MCP Toolbox
|
||||||
|
|
||||||
@@ -301,6 +302,7 @@ instances and interacting with your database:
|
|||||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for SQL Server instance.
|
* **clone_instance**: Creates a clone of an existing Cloud SQL for SQL Server instance.
|
||||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||||
|
* **restore_backup**: Restores a backup of a Cloud SQL instance.
|
||||||
|
|
||||||
{{< notice note >}}
|
{{< notice note >}}
|
||||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||||
|
|||||||
@@ -54,6 +54,7 @@ database and users:
|
|||||||
* `create_instance`
|
* `create_instance`
|
||||||
* `create_user`
|
* `create_user`
|
||||||
* `clone_instance`
|
* `clone_instance`
|
||||||
|
* `restore_backup`
|
||||||
|
|
||||||
## Install MCP Toolbox
|
## Install MCP Toolbox
|
||||||
|
|
||||||
@@ -301,6 +302,7 @@ instances and interacting with your database:
|
|||||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for MySQL instance.
|
* **clone_instance**: Creates a clone of an existing Cloud SQL for MySQL instance.
|
||||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||||
|
* **restore_backup**: Restores a backup of a Cloud SQL instance.
|
||||||
|
|
||||||
{{< notice note >}}
|
{{< notice note >}}
|
||||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||||
|
|||||||
@@ -54,6 +54,7 @@ instance, database and users:
|
|||||||
* `create_instance`
|
* `create_instance`
|
||||||
* `create_user`
|
* `create_user`
|
||||||
* `clone_instance`
|
* `clone_instance`
|
||||||
|
* `restore_backup`
|
||||||
|
|
||||||
## Install MCP Toolbox
|
## Install MCP Toolbox
|
||||||
|
|
||||||
@@ -301,6 +302,7 @@ instances and interacting with your database:
|
|||||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for PostgreSQL instance.
|
* **clone_instance**: Creates a clone of an existing Cloud SQL for PostgreSQL instance.
|
||||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||||
|
* **restore_backup**: Restores a backup of a Cloud SQL instance.
|
||||||
|
|
||||||
{{< notice note >}}
|
{{< notice note >}}
|
||||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||||
|
|||||||
@@ -100,19 +100,19 @@ After you install Looker in the MCP Store, resources and tools from the server a
|
|||||||
|
|
||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="linux/amd64" lang="bash" >}}
|
{{< tab header="linux/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="windows/amd64" lang="bash" >}}
|
{{< tab header="windows/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -45,19 +45,19 @@ instance:
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="linux/amd64" lang="bash" >}}
|
{{< tab header="linux/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="windows/amd64" lang="bash" >}}
|
{{< tab header="windows/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -43,19 +43,19 @@ expose your developer assistant tools to a MySQL instance:
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="linux/amd64" lang="bash" >}}
|
{{< tab header="linux/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="windows/amd64" lang="bash" >}}
|
{{< tab header="windows/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -44,19 +44,19 @@ expose your developer assistant tools to a Neo4j instance:
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="linux/amd64" lang="bash" >}}
|
{{< tab header="linux/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="windows/amd64" lang="bash" >}}
|
{{< tab header="windows/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="linux/amd64" lang="bash" >}}
|
{{< tab header="linux/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="windows/amd64" lang="bash" >}}
|
{{< tab header="windows/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -43,19 +43,19 @@ to expose your developer assistant tools to a SQLite instance:
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
{{< tabpane persist=header >}}
|
{{< tabpane persist=header >}}
|
||||||
{{< tab header="linux/amd64" lang="bash" >}}
|
{{< tab header="linux/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/linux/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/arm64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/darwin/amd64/toolbox
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< tab header="windows/amd64" lang="bash" >}}
|
{{< tab header="windows/amd64" lang="bash" >}}
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/windows/amd64/toolbox.exe
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ The native SDKs can be combined with MCP clients in many cases.
|
|||||||
|
|
||||||
Toolbox currently supports the following versions of MCP specification:
|
Toolbox currently supports the following versions of MCP specification:
|
||||||
|
|
||||||
|
* [2025-11-25](https://modelcontextprotocol.io/specification/2025-11-25)
|
||||||
* [2025-06-18](https://modelcontextprotocol.io/specification/2025-06-18)
|
* [2025-06-18](https://modelcontextprotocol.io/specification/2025-06-18)
|
||||||
* [2025-03-26](https://modelcontextprotocol.io/specification/2025-03-26)
|
* [2025-03-26](https://modelcontextprotocol.io/specification/2025-03-26)
|
||||||
* [2024-11-05](https://modelcontextprotocol.io/specification/2024-11-05)
|
* [2024-11-05](https://modelcontextprotocol.io/specification/2024-11-05)
|
||||||
|
|||||||
@@ -46,10 +46,10 @@ with the necessary configuration for deployment to Vertex AI Agent Engine.
|
|||||||
process will generate deployment configuration files (like a `Makefile` and
|
process will generate deployment configuration files (like a `Makefile` and
|
||||||
`Dockerfile`) in your project directory.
|
`Dockerfile`) in your project directory.
|
||||||
|
|
||||||
4. Add `toolbox-core` as a dependency to the new project:
|
4. Add `google-adk[toolbox]` as a dependency to the new project:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
uv add toolbox-core
|
uv add google-adk[toolbox]
|
||||||
```
|
```
|
||||||
|
|
||||||
## Step 3: Configure Google Cloud Authentication
|
## Step 3: Configure Google Cloud Authentication
|
||||||
@@ -95,22 +95,23 @@ authentication token.
|
|||||||
```python
|
```python
|
||||||
from google.adk import Agent
|
from google.adk import Agent
|
||||||
from google.adk.apps import App
|
from google.adk.apps import App
|
||||||
from toolbox_core import ToolboxSyncClient, auth_methods
|
from google.adk.tools.toolbox_toolset import ToolboxToolset
|
||||||
|
from toolbox_adk import CredentialStrategy
|
||||||
|
|
||||||
# TODO(developer): Replace with your Toolbox Cloud Run Service URL
|
# TODO(developer): Replace with your Toolbox Cloud Run Service URL
|
||||||
TOOLBOX_URL = "https://your-toolbox-service-xyz.a.run.app"
|
TOOLBOX_URL = "https://your-toolbox-service-xyz.a.run.app"
|
||||||
|
|
||||||
# Initialize the client with the Cloud Run URL and Auth headers
|
# Initialize the toolset with Workload Identity (generates ID token for the URL)
|
||||||
client = ToolboxSyncClient(
|
toolset = ToolboxToolset(
|
||||||
TOOLBOX_URL,
|
server_url=TOOLBOX_URL,
|
||||||
client_headers={"Authorization": auth_methods.get_google_id_token(TOOLBOX_URL)}
|
credentials=CredentialStrategy.workload_identity(target_audience=TOOLBOX_URL)
|
||||||
)
|
)
|
||||||
|
|
||||||
root_agent = Agent(
|
root_agent = Agent(
|
||||||
name='root_agent',
|
name='root_agent',
|
||||||
model='gemini-2.5-flash',
|
model='gemini-2.5-flash',
|
||||||
instruction="You are a helpful AI assistant designed to provide accurate and useful information.",
|
instruction="You are a helpful AI assistant designed to provide accurate and useful information.",
|
||||||
tools=client.load_toolset(),
|
tools=[toolset],
|
||||||
)
|
)
|
||||||
|
|
||||||
app = App(root_agent=root_agent, name="my_agent")
|
app = App(root_agent=root_agent, name="my_agent")
|
||||||
|
|||||||
@@ -207,6 +207,7 @@ You can connect to Toolbox Cloud Run instances directly through the SDK.
|
|||||||
{{< tab header="Python" lang="python" >}}
|
{{< tab header="Python" lang="python" >}}
|
||||||
import asyncio
|
import asyncio
|
||||||
from toolbox_core import ToolboxClient, auth_methods
|
from toolbox_core import ToolboxClient, auth_methods
|
||||||
|
from toolbox_core.protocol import Protocol
|
||||||
|
|
||||||
# Replace with the Cloud Run service URL generated in the previous step
|
# Replace with the Cloud Run service URL generated in the previous step
|
||||||
URL = "https://cloud-run-url.app"
|
URL = "https://cloud-run-url.app"
|
||||||
@@ -217,6 +218,7 @@ async def main():
|
|||||||
async with ToolboxClient(
|
async with ToolboxClient(
|
||||||
URL,
|
URL,
|
||||||
client_headers={"Authorization": auth_token_provider},
|
client_headers={"Authorization": auth_token_provider},
|
||||||
|
protocol=Protocol.TOOLBOX,
|
||||||
) as toolbox:
|
) as toolbox:
|
||||||
toolset = await toolbox.load_toolset()
|
toolset = await toolbox.load_toolset()
|
||||||
# ...
|
# ...
|
||||||
@@ -281,3 +283,5 @@ contain the specific error message needed to diagnose the problem.
|
|||||||
Manager, it means the Toolbox service account is missing permissions.
|
Manager, it means the Toolbox service account is missing permissions.
|
||||||
- Ensure the `toolbox-identity` service account has the **Secret Manager
|
- Ensure the `toolbox-identity` service account has the **Secret Manager
|
||||||
Secret Accessor** (`roles/secretmanager.secretAccessor`) IAM role.
|
Secret Accessor** (`roles/secretmanager.secretAccessor`) IAM role.
|
||||||
|
|
||||||
|
- **Cloud Run Connections via IAP:** Currently we do not support Cloud Run connections via [IAP](https://docs.cloud.google.com/iap/docs/concepts-overview). Please disable IAP if you are using it.
|
||||||
@@ -16,7 +16,7 @@ description: >
|
|||||||
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
|
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
|
||||||
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
|
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
|
||||||
| `-p` | `--port` | Port the server will listen on. | `5000` |
|
| `-p` | `--port` | Port the server will listen on. | `5000` |
|
||||||
| | `--prebuilt` | Use a prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
|
| | `--prebuilt` | Use one or more prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
|
||||||
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
|
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
|
||||||
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
|
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
|
||||||
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
|
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
|
||||||
@@ -27,6 +27,7 @@ description: >
|
|||||||
| | `--ui` | Launches the Toolbox UI web server. | |
|
| | `--ui` | Launches the Toolbox UI web server. | |
|
||||||
| | `--allowed-origins` | Specifies a list of origins permitted to access this server for CORs access. | `*` |
|
| | `--allowed-origins` | Specifies a list of origins permitted to access this server for CORs access. | `*` |
|
||||||
| | `--allowed-hosts` | Specifies a list of hosts permitted to access this server to prevent DNS rebinding attacks. | `*` |
|
| | `--allowed-hosts` | Specifies a list of hosts permitted to access this server to prevent DNS rebinding attacks. | `*` |
|
||||||
|
| | `--user-agent-metadata` | Appends additional metadata to the User-Agent. | |
|
||||||
| `-v` | `--version` | version for toolbox | |
|
| `-v` | `--version` | version for toolbox | |
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
@@ -50,6 +51,11 @@ description: >
|
|||||||
|
|
||||||
# Server with prebuilt + custom tools configurations
|
# Server with prebuilt + custom tools configurations
|
||||||
./toolbox --tools-file tools.yaml --prebuilt alloydb-postgres
|
./toolbox --tools-file tools.yaml --prebuilt alloydb-postgres
|
||||||
|
|
||||||
|
# Server with multiple prebuilt tools configurations
|
||||||
|
./toolbox --prebuilt alloydb-postgres,alloydb-postgres-admin
|
||||||
|
# OR
|
||||||
|
./toolbox --prebuilt alloydb-postgres --prebuilt alloydb-postgres-admin
|
||||||
```
|
```
|
||||||
|
|
||||||
### Tool Configuration Sources
|
### Tool Configuration Sources
|
||||||
@@ -70,7 +76,7 @@ The CLI supports multiple mutually exclusive ways to specify tool configurations
|
|||||||
|
|
||||||
**Prebuilt Configurations:**
|
**Prebuilt Configurations:**
|
||||||
|
|
||||||
- `--prebuilt`: Use predefined configurations for specific database types (e.g.,
|
- `--prebuilt`: Use one or more predefined configurations for specific database types (e.g.,
|
||||||
'bigquery', 'postgres', 'spanner'). See [Prebuilt Tools
|
'bigquery', 'postgres', 'spanner'). See [Prebuilt Tools
|
||||||
Reference](prebuilt-tools.md) for allowed values.
|
Reference](prebuilt-tools.md) for allowed values.
|
||||||
|
|
||||||
|
|||||||
@@ -16,6 +16,9 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
|||||||
{{< notice tip >}}
|
{{< notice tip >}}
|
||||||
You can now use `--prebuilt` along `--tools-file`, `--tools-files`, or
|
You can now use `--prebuilt` along `--tools-file`, `--tools-files`, or
|
||||||
`--tools-folder` to combine prebuilt configs with custom tools.
|
`--tools-folder` to combine prebuilt configs with custom tools.
|
||||||
|
|
||||||
|
You can also combine multiple prebuilt configs.
|
||||||
|
|
||||||
See [Usage Examples](../reference/cli.md#examples).
|
See [Usage Examples](../reference/cli.md#examples).
|
||||||
{{< /notice >}}
|
{{< /notice >}}
|
||||||
|
|
||||||
@@ -194,6 +197,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
* `create_instance`
|
* `create_instance`
|
||||||
* `create_user`
|
* `create_user`
|
||||||
* `clone_instance`
|
* `clone_instance`
|
||||||
|
* `restore_backup`
|
||||||
|
|
||||||
* **Tools:**
|
* **Tools:**
|
||||||
* `create_instance`: Creates a new Cloud SQL for MySQL instance.
|
* `create_instance`: Creates a new Cloud SQL for MySQL instance.
|
||||||
@@ -205,6 +209,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for MySQL instance.
|
* `clone_instance`: Creates a clone for an existing Cloud SQL for MySQL instance.
|
||||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||||
|
* `restore_backup`: Restores a backup of a Cloud SQL instance.
|
||||||
|
|
||||||
## Cloud SQL for PostgreSQL
|
## Cloud SQL for PostgreSQL
|
||||||
|
|
||||||
@@ -284,6 +289,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
* `create_instance`
|
* `create_instance`
|
||||||
* `create_user`
|
* `create_user`
|
||||||
* `clone_instance`
|
* `clone_instance`
|
||||||
|
* `restore_backup`
|
||||||
* **Tools:**
|
* **Tools:**
|
||||||
* `create_instance`: Creates a new Cloud SQL for PostgreSQL instance.
|
* `create_instance`: Creates a new Cloud SQL for PostgreSQL instance.
|
||||||
* `get_instance`: Gets information about a Cloud SQL instance.
|
* `get_instance`: Gets information about a Cloud SQL instance.
|
||||||
@@ -294,6 +300,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for PostgreSQL instance.
|
* `clone_instance`: Creates a clone for an existing Cloud SQL for PostgreSQL instance.
|
||||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||||
|
* `restore_backup`: Restores a backup of a Cloud SQL instance.
|
||||||
|
|
||||||
## Cloud SQL for SQL Server
|
## Cloud SQL for SQL Server
|
||||||
|
|
||||||
@@ -347,6 +354,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
* `create_instance`
|
* `create_instance`
|
||||||
* `create_user`
|
* `create_user`
|
||||||
* `clone_instance`
|
* `clone_instance`
|
||||||
|
* `restore_backup`
|
||||||
* **Tools:**
|
* **Tools:**
|
||||||
* `create_instance`: Creates a new Cloud SQL for SQL Server instance.
|
* `create_instance`: Creates a new Cloud SQL for SQL Server instance.
|
||||||
* `get_instance`: Gets information about a Cloud SQL instance.
|
* `get_instance`: Gets information about a Cloud SQL instance.
|
||||||
@@ -357,6 +365,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for SQL Server instance.
|
* `clone_instance`: Creates a clone for an existing Cloud SQL for SQL Server instance.
|
||||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||||
|
* `restore_backup`: Restores a backup of a Cloud SQL instance.
|
||||||
|
|
||||||
## Dataplex
|
## Dataplex
|
||||||
|
|
||||||
|
|||||||
@@ -3,13 +3,14 @@ title: "EmbeddingModels"
|
|||||||
type: docs
|
type: docs
|
||||||
weight: 2
|
weight: 2
|
||||||
description: >
|
description: >
|
||||||
EmbeddingModels represent services that transform text into vector embeddings for semantic search.
|
EmbeddingModels represent services that transform text into vector embeddings
|
||||||
|
for semantic search.
|
||||||
---
|
---
|
||||||
|
|
||||||
EmbeddingModels represent services that generate vector representations of text
|
EmbeddingModels represent services that generate vector representations of text
|
||||||
data. In the MCP Toolbox, these models enable **Semantic Queries**,
|
data. In the MCP Toolbox, these models enable **Semantic Queries**, allowing
|
||||||
allowing [Tools](../tools/) to automatically convert human-readable text into
|
[Tools](../tools/) to automatically convert human-readable text into numerical
|
||||||
numerical vectors before using them in a query.
|
vectors before using them in a query.
|
||||||
|
|
||||||
This is primarily used in two scenarios:
|
This is primarily used in two scenarios:
|
||||||
|
|
||||||
@@ -19,14 +20,33 @@ This is primarily used in two scenarios:
|
|||||||
- **Semantic Search**: Converting a natural language query into a vector to
|
- **Semantic Search**: Converting a natural language query into a vector to
|
||||||
perform similarity searches.
|
perform similarity searches.
|
||||||
|
|
||||||
|
## Hidden Parameter Duplication (valueFromParam)
|
||||||
|
|
||||||
|
When building tools for vector ingestion, you often need the same input string
|
||||||
|
twice:
|
||||||
|
|
||||||
|
1. To store the original text in a TEXT column.
|
||||||
|
1. To generate the vector embedding for a VECTOR column.
|
||||||
|
|
||||||
|
Requesting an Agent (LLM) to output the exact same string twice is inefficient
|
||||||
|
and error-prone. The `valueFromParam` field solves this by allowing a parameter
|
||||||
|
to inherit its value from another parameter in the same tool.
|
||||||
|
|
||||||
|
### Key Behaviors
|
||||||
|
|
||||||
|
1. Hidden from Manifest: Parameters with valueFromParam set are excluded from
|
||||||
|
the tool definition sent to the Agent. The Agent does not know this parameter
|
||||||
|
exists.
|
||||||
|
1. Auto-Filled: When the tool is executed, the Toolbox automatically copies the
|
||||||
|
value from the referenced parameter before processing embeddings.
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
||||||
The following configuration defines an embedding model and applies it to
|
The following configuration defines an embedding model and applies it to
|
||||||
specific tool parameters.
|
specific tool parameters.
|
||||||
|
|
||||||
{{< notice tip >}}
|
{{< notice tip >}} Use environment variable replacement with the format
|
||||||
Use environment variable replacement with the format ${ENV_NAME}
|
${ENV_NAME} instead of hardcoding your API keys into the configuration file.
|
||||||
instead of hardcoding your API keys into the configuration file.
|
|
||||||
{{< /notice >}}
|
{{< /notice >}}
|
||||||
|
|
||||||
### Step 1 - Define an Embedding Model
|
### Step 1 - Define an Embedding Model
|
||||||
@@ -35,7 +55,7 @@ Define an embedding model in the `embeddingModels` section:
|
|||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: embeddingModels
|
kind: embeddingModels
|
||||||
name: gemini-model: # Name of the embedding model
|
name: gemini-model # Name of the embedding model
|
||||||
type: gemini
|
type: gemini
|
||||||
model: gemini-embedding-001
|
model: gemini-embedding-001
|
||||||
apiKey: ${GOOGLE_API_KEY}
|
apiKey: ${GOOGLE_API_KEY}
|
||||||
@@ -45,8 +65,7 @@ dimension: 768
|
|||||||
### Step 2 - Embed Tool Parameters
|
### Step 2 - Embed Tool Parameters
|
||||||
|
|
||||||
Use the defined embedding model, embed your query parameters using the
|
Use the defined embedding model, embed your query parameters using the
|
||||||
`embeddedBy` field. Only string-typed
|
`embeddedBy` field. Only string-typed parameters can be embedded:
|
||||||
parameters can be embedded:
|
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
# Vector ingestion tool
|
# Vector ingestion tool
|
||||||
@@ -60,10 +79,13 @@ statement: |
|
|||||||
parameters:
|
parameters:
|
||||||
- name: content
|
- name: content
|
||||||
type: string
|
type: string
|
||||||
|
description: The raw text content to be stored in the database.
|
||||||
- name: vector_string
|
- name: vector_string
|
||||||
type: string
|
type: string
|
||||||
description: The text to be vectorized and stored.
|
# This parameter is hidden from the LLM.
|
||||||
embeddedBy: gemini-model # refers to the name of a defined embedding model
|
# It automatically copies the value from 'content' and embeds it.
|
||||||
|
valueFromParam: content
|
||||||
|
embeddedBy: gemini-model
|
||||||
---
|
---
|
||||||
# Semantic search tool
|
# Semantic search tool
|
||||||
kind: tools
|
kind: tools
|
||||||
|
|||||||
@@ -20,10 +20,10 @@ kind: prompts
|
|||||||
name: code_review
|
name: code_review
|
||||||
description: "Asks the LLM to analyze code quality and suggest improvements."
|
description: "Asks the LLM to analyze code quality and suggest improvements."
|
||||||
messages:
|
messages:
|
||||||
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
||||||
arguments:
|
arguments:
|
||||||
- name: "code"
|
- name: "code"
|
||||||
description: "The code to review"
|
description: "The code to review"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Prompt Schema
|
## Prompt Schema
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ Authentication can be handled in two ways:
|
|||||||
```yaml
|
```yaml
|
||||||
kind: sources
|
kind: sources
|
||||||
name: my-alloydb-admin
|
name: my-alloydb-admin
|
||||||
type: alloy-admin
|
type: alloydb-admin
|
||||||
---
|
---
|
||||||
kind: sources
|
kind: sources
|
||||||
name: my-oauth-alloydb-admin
|
name: my-oauth-alloydb-admin
|
||||||
|
|||||||
71
docs/en/resources/sources/cloud-logging-admin.md
Normal file
71
docs/en/resources/sources/cloud-logging-admin.md
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
---
|
||||||
|
title: "Cloud Logging Admin"
|
||||||
|
type: docs
|
||||||
|
weight: 1
|
||||||
|
description: >
|
||||||
|
The Cloud Logging Admin source enables tools to interact with the Cloud Logging API, allowing for the retrieval of log names, monitored resource types, and the querying of log data.
|
||||||
|
---
|
||||||
|
|
||||||
|
## About
|
||||||
|
|
||||||
|
The Cloud Logging Admin source provides a client to interact with the [Google
|
||||||
|
Cloud Logging API](https://cloud.google.com/logging/docs). This allows tools to list log names, monitored resource types, and query log entries.
|
||||||
|
|
||||||
|
Authentication can be handled in two ways:
|
||||||
|
|
||||||
|
1. **Application Default Credentials (ADC):** By default, the source uses ADC
|
||||||
|
to authenticate with the API.
|
||||||
|
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will
|
||||||
|
expect an OAuth 2.0 access token to be provided by the client (e.g., a web
|
||||||
|
browser) for each request.
|
||||||
|
|
||||||
|
## Available Tools
|
||||||
|
|
||||||
|
- [`cloud-logging-admin-list-log-names`](../tools/cloudloggingadmin/cloud-logging-admin-list-log-names.md)
|
||||||
|
Lists the log names in the project.
|
||||||
|
|
||||||
|
- [`cloud-logging-admin-list-resource-types`](../tools/cloudloggingadmin/cloud-logging-admin-list-resource-types.md)
|
||||||
|
Lists the monitored resource types.
|
||||||
|
|
||||||
|
- [`cloud-logging-admin-query-logs`](../tools/cloudloggingadmin/cloud-logging-admin-query-logs.md)
|
||||||
|
Queries log entries.
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
Initialize a Cloud Logging Admin source that uses ADC:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
kind: sources
|
||||||
|
name: my-cloud-logging
|
||||||
|
type: cloud-logging-admin
|
||||||
|
project: my-project-id
|
||||||
|
```
|
||||||
|
|
||||||
|
Initialize a Cloud Logging Admin source that uses client-side OAuth:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
kind: sources
|
||||||
|
name: my-oauth-cloud-logging
|
||||||
|
type: cloud-logging-admin
|
||||||
|
project: my-project-id
|
||||||
|
useClientOAuth: true
|
||||||
|
```
|
||||||
|
|
||||||
|
Initialize a Cloud Logging Admin source that uses service account impersonation:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
kind: sources
|
||||||
|
name: my-impersonated-cloud-logging
|
||||||
|
type: cloud-logging-admin
|
||||||
|
project: my-project-id
|
||||||
|
impersonateServiceAccount: "my-service-account@my-project.iam.gserviceaccount.com"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Reference
|
||||||
|
|
||||||
|
| **field** | **type** | **required** | **description** |
|
||||||
|
|-----------------------------|:--------:|:------------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||||
|
| type | string | true | Must be "cloud-logging-admin". |
|
||||||
|
| project | string | true | ID of the GCP project. |
|
||||||
|
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. Cannot be used with `impersonateServiceAccount`. |
|
||||||
|
| impersonateServiceAccount | string | false | The service account to impersonate for API calls. Cannot be used with `useClientOAuth`. |
|
||||||
@@ -7,6 +7,17 @@ description: >
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
{{< notice note >}}
|
||||||
|
**⚠️ Best Effort Maintenance**
|
||||||
|
|
||||||
|
This integration is maintained on a best-effort basis by the project
|
||||||
|
team/community. While we strive to address issues and provide workarounds when
|
||||||
|
resources are available, there are no guaranteed response times or code fixes.
|
||||||
|
|
||||||
|
The automated integration tests for this module are currently non-functional or
|
||||||
|
failing.
|
||||||
|
{{< /notice >}}
|
||||||
|
|
||||||
## About
|
## About
|
||||||
|
|
||||||
[Dgraph][dgraph-docs] is an open-source graph database. It is designed for
|
[Dgraph][dgraph-docs] is an open-source graph database. It is designed for
|
||||||
|
|||||||
@@ -12,6 +12,9 @@ aliases:
|
|||||||
|
|
||||||
The `cloud-gemini-data-analytics-query` tool allows you to send natural language questions to the Gemini Data Analytics API and receive structured responses containing SQL queries, natural language answers, and explanations. For details on defining data agent context for database data sources, see the official [documentation](https://docs.cloud.google.com/gemini/docs/conversational-analytics-api/data-agent-authored-context-databases).
|
The `cloud-gemini-data-analytics-query` tool allows you to send natural language questions to the Gemini Data Analytics API and receive structured responses containing SQL queries, natural language answers, and explanations. For details on defining data agent context for database data sources, see the official [documentation](https://docs.cloud.google.com/gemini/docs/conversational-analytics-api/data-agent-authored-context-databases).
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> Only `alloydb`, `spannerReference`, and `cloudSqlReference` are supported as [datasource references](https://clouddocs.devsite.corp.google.com/gemini/docs/conversational-analytics-api/reference/rest/v1beta/projects.locations.dataAgents#DatasourceReferences).
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
@@ -41,13 +44,13 @@ generationOptions:
|
|||||||
|
|
||||||
### Usage Flow
|
### Usage Flow
|
||||||
|
|
||||||
When using this tool, a `prompt` parameter containing a natural language query is provided to the tool (typically by an agent). The tool then interacts with the Gemini Data Analytics API using the context defined in your configuration.
|
When using this tool, a `query` parameter containing a natural language query is provided to the tool (typically by an agent). The tool then interacts with the Gemini Data Analytics API using the context defined in your configuration.
|
||||||
|
|
||||||
The structure of the response depends on the `generationOptions` configured in your tool definition (e.g., enabling `generateQueryResult` will include the SQL query results).
|
The structure of the response depends on the `generationOptions` configured in your tool definition (e.g., enabling `generateQueryResult` will include the SQL query results).
|
||||||
|
|
||||||
See [Data Analytics API REST documentation](https://clouddocs.devsite.corp.google.com/gemini/docs/conversational-analytics-api/reference/rest/v1alpha/projects.locations/queryData?rep_location=global) for details.
|
See [Data Analytics API REST documentation](https://clouddocs.devsite.corp.google.com/gemini/docs/conversational-analytics-api/reference/rest/v1alpha/projects.locations/queryData?rep_location=global) for details.
|
||||||
|
|
||||||
**Example Input Prompt:**
|
**Example Input Query:**
|
||||||
|
|
||||||
```text
|
```text
|
||||||
How many accounts who have region in Prague are eligible for loans? A3 contains the data of region.
|
How many accounts who have region in Prague are eligible for loans? A3 contains the data of region.
|
||||||
|
|||||||
@@ -0,0 +1,39 @@
|
|||||||
|
---
|
||||||
|
title: "cloud-logging-admin-list-log-names"
|
||||||
|
type: docs
|
||||||
|
description: >
|
||||||
|
A "cloud-logging-admin-list-log-names" tool lists the log names in the project.
|
||||||
|
aliases:
|
||||||
|
- /resources/tools/cloud-logging-admin-list-log-names
|
||||||
|
---
|
||||||
|
|
||||||
|
## About
|
||||||
|
|
||||||
|
The `cloud-logging-admin-list-log-names` tool lists the log names available in the Google Cloud project.
|
||||||
|
It's compatible with the following sources:
|
||||||
|
|
||||||
|
- [cloud-logging-admin](../../sources/cloud-logging-admin.md)
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
kind: tools
|
||||||
|
name: list_log_names
|
||||||
|
type: cloud-logging-admin-list-log-names
|
||||||
|
source: my-cloud-logging
|
||||||
|
description: Lists all log names in the project.
|
||||||
|
```
|
||||||
|
|
||||||
|
## Reference
|
||||||
|
|
||||||
|
| **field** | **type** | **required** | **description** |
|
||||||
|
|-------------|:--------:|:------------:|----------------------------------------------------|
|
||||||
|
| type | string | true | Must be "cloud-logging-admin-list-log-names". |
|
||||||
|
| source | string | true | Name of the cloud-logging-admin source. |
|
||||||
|
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||||
|
|
||||||
|
### Parameters
|
||||||
|
|
||||||
|
| **parameter** | **type** | **required** | **description** |
|
||||||
|
|:--------------|:--------:|:------------:|:----------------|
|
||||||
|
| limit | integer | false | Maximum number of log entries to return (default: 200). |
|
||||||
@@ -0,0 +1,34 @@
|
|||||||
|
---
|
||||||
|
title: "cloud-logging-admin-list-resource-types"
|
||||||
|
type: docs
|
||||||
|
description: >
|
||||||
|
A "cloud-logging-admin-list-resource-types" tool lists the monitored resource types.
|
||||||
|
aliases:
|
||||||
|
- /resources/tools/cloud-logging-admin-list-resource-types
|
||||||
|
---
|
||||||
|
|
||||||
|
## About
|
||||||
|
|
||||||
|
The `cloud-logging-admin-list-resource-types` tool lists the monitored resource types available in Google Cloud Logging.
|
||||||
|
It's compatible with the following sources:
|
||||||
|
|
||||||
|
- [cloud-logging-admin](../../sources/cloud-logging-admin.md)
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
kind: tools
|
||||||
|
name: list_resource_types
|
||||||
|
type: cloud-logging-admin-list-resource-types
|
||||||
|
source: my-cloud-logging
|
||||||
|
description: Lists monitored resource types.
|
||||||
|
```
|
||||||
|
|
||||||
|
## Reference
|
||||||
|
|
||||||
|
| **field** | **type** | **required** | **description** |
|
||||||
|
|-------------|:--------:|:------------:|----------------------------------------------------|
|
||||||
|
| type | string | true | Must be "cloud-logging-admin-list-resource-types".|
|
||||||
|
| source | string | true | Name of the cloud-logging-admin source. |
|
||||||
|
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||||
|
|
||||||
@@ -0,0 +1,44 @@
|
|||||||
|
---
|
||||||
|
title: "cloud-logging-admin-query-logs"
|
||||||
|
type: docs
|
||||||
|
description: >
|
||||||
|
A "cloud-logging-admin-query-logs" tool queries log entries.
|
||||||
|
aliases:
|
||||||
|
- /resources/tools/cloud-logging-admin-query-logs
|
||||||
|
---
|
||||||
|
|
||||||
|
## About
|
||||||
|
|
||||||
|
The `cloud-logging-admin-query-logs` tool allows you to query log entries from Google Cloud Logging using the advanced logs filter syntax.
|
||||||
|
It's compatible with the following sources:
|
||||||
|
|
||||||
|
- [cloud-logging-admin](../../sources/cloud-logging-admin.md)
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
kind: tools
|
||||||
|
name: query_logs
|
||||||
|
type: cloud-logging-admin-query-logs
|
||||||
|
source: my-cloud-logging
|
||||||
|
description: Queries log entries from Cloud Logging.
|
||||||
|
```
|
||||||
|
|
||||||
|
## Reference
|
||||||
|
|
||||||
|
| **field** | **type** | **required** | **description** |
|
||||||
|
|-------------|:--------:|:------------:|----------------------------------------------------|
|
||||||
|
| type | string | true | Must be "cloud-logging-admin-query-logs". |
|
||||||
|
| source | string | true | Name of the cloud-logging-admin source. |
|
||||||
|
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||||
|
|
||||||
|
### Parameters
|
||||||
|
|
||||||
|
| **parameter** | **type** | **required** | **description** |
|
||||||
|
|:--------------|:--------:|:------------:|:----------------|
|
||||||
|
| filter | string | false | Cloud Logging filter query. Common fields: resource.type, resource.labels.*, logName, severity, textPayload, jsonPayload.*, protoPayload.*, labels.*, httpRequest.*. Operators: =, !=, <, <=, >, >=, :, =~, AND, OR, NOT. |
|
||||||
|
| newestFirst | boolean | false | Set to true for newest logs first. Defaults to oldest first. |
|
||||||
|
| startTime | string | false | Start time in RFC3339 format (e.g., 2025-12-09T00:00:00Z). Defaults to 30 days ago. |
|
||||||
|
| endTime | string | false | End time in RFC3339 format (e.g., 2025-12-09T23:59:59Z). Defaults to now. |
|
||||||
|
| verbose | boolean | false | Include additional fields (insertId, trace, spanId, httpRequest, labels, operation, sourceLocation). Defaults to false. |
|
||||||
|
| limit | integer | false | Maximum number of log entries to return. Default: `200`. |
|
||||||
53
docs/en/resources/tools/cloudsql/cloudsqlrestorebackup.md
Normal file
53
docs/en/resources/tools/cloudsql/cloudsqlrestorebackup.md
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
---
|
||||||
|
title: cloud-sql-restore-backup
|
||||||
|
type: docs
|
||||||
|
weight: 10
|
||||||
|
description: "Restores a backup of a Cloud SQL instance."
|
||||||
|
---
|
||||||
|
|
||||||
|
The `cloud-sql-restore-backup` tool restores a backup on a Cloud SQL instance using the Cloud SQL Admin API.
|
||||||
|
|
||||||
|
{{< notice info dd>}}
|
||||||
|
This tool uses a `source` of type `cloud-sql-admin`.
|
||||||
|
{{< /notice >}}
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
Basic backup restore
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
kind: tools
|
||||||
|
name: backup-restore-basic
|
||||||
|
type: cloud-sql-restore-backup
|
||||||
|
source: cloud-sql-admin-source
|
||||||
|
description: "Restores a backup onto the given Cloud SQL instance."
|
||||||
|
```
|
||||||
|
|
||||||
|
## Reference
|
||||||
|
|
||||||
|
### Tool Configuration
|
||||||
|
| **field** | **type** | **required** | **description** |
|
||||||
|
| -------------- | :------: | :----------: | ------------------------------------------------ |
|
||||||
|
| type | string | true | Must be "cloud-sql-restore-backup". |
|
||||||
|
| source | string | true | The name of the `cloud-sql-admin` source to use. |
|
||||||
|
| description | string | false | A description of the tool. |
|
||||||
|
|
||||||
|
### Tool Inputs
|
||||||
|
|
||||||
|
| **parameter** | **type** | **required** | **description** |
|
||||||
|
| ------------------| :------: | :----------: | -----------------------------------------------------------------------------|
|
||||||
|
| target_project | string | true | The project ID of the instance to restore the backup onto. |
|
||||||
|
| target_instance | string | true | The instance to restore the backup onto. Does not include the project ID. |
|
||||||
|
| backup_id | string | true | The identifier of the backup being restored. |
|
||||||
|
| source_project | string | false | (Optional) The project ID of the instance that the backup belongs to. |
|
||||||
|
| source_instance | string | false | (Optional) Cloud SQL instance ID of the instance that the backup belongs to. |
|
||||||
|
|
||||||
|
## Usage Notes
|
||||||
|
|
||||||
|
- The `backup_id` field can be a BackupRun ID (which will be an int64), backup name, or BackupDR backup name.
|
||||||
|
- If the `backup_id` field contains a BackupRun ID (i.e. an int64), the optional fields `source_project` and `source_instance` must also be provided.
|
||||||
|
|
||||||
|
## See Also
|
||||||
|
- [Cloud SQL Admin API documentation](https://cloud.google.com/sql/docs/mysql/admin-api)
|
||||||
|
- [Toolbox Cloud SQL tools documentation](../cloudsql)
|
||||||
|
- [Cloud SQL Restore API documentation](https://cloud.google.com/sql/docs/mysql/backup-recovery/restoring)
|
||||||
@@ -9,6 +9,17 @@ aliases:
|
|||||||
- /resources/tools/dgraph-dql
|
- /resources/tools/dgraph-dql
|
||||||
---
|
---
|
||||||
|
|
||||||
|
{{< notice note >}}
|
||||||
|
**⚠️ Best Effort Maintenance**
|
||||||
|
|
||||||
|
This integration is maintained on a best-effort basis by the project
|
||||||
|
team/community. While we strive to address issues and provide workarounds when
|
||||||
|
resources are available, there are no guaranteed response times or code fixes.
|
||||||
|
|
||||||
|
The automated integration tests for this module are currently non-functional or
|
||||||
|
failing.
|
||||||
|
{{< /notice >}}
|
||||||
|
|
||||||
## About
|
## About
|
||||||
|
|
||||||
A `dgraph-dql` tool executes a pre-defined DQL statement against a Dgraph
|
A `dgraph-dql` tool executes a pre-defined DQL statement against a Dgraph
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ with filters, ordering, and limit capabilities.
|
|||||||
To use this tool, you need to configure it in your YAML configuration file:
|
To use this tool, you need to configure it in your YAML configuration file:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
kind: source
|
kind: sources
|
||||||
name: my-firestore
|
name: my-firestore
|
||||||
type: firestore
|
type: firestore
|
||||||
project: my-gcp-project
|
project: my-gcp-project
|
||||||
|
|||||||
@@ -30,6 +30,10 @@ following config for example:
|
|||||||
- name: userNames
|
- name: userNames
|
||||||
type: array
|
type: array
|
||||||
description: The user names to be set.
|
description: The user names to be set.
|
||||||
|
items:
|
||||||
|
name: userName # the item name doesn't matter but it has to exist
|
||||||
|
type: string
|
||||||
|
description: username
|
||||||
```
|
```
|
||||||
|
|
||||||
If the input is an array of strings `["Alice", "Sid", "Bob"]`, The final command
|
If the input is an array of strings `["Alice", "Sid", "Bob"]`, The final command
|
||||||
|
|||||||
@@ -771,7 +771,7 @@
|
|||||||
},
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"version = \"0.25.0\" # x-release-please-version\n",
|
"version = \"0.26.0\" # x-release-please-version\n",
|
||||||
"! curl -L -o /content/toolbox https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
"! curl -L -o /content/toolbox https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# Make the binary executable\n",
|
"# Make the binary executable\n",
|
||||||
|
|||||||
@@ -123,7 +123,7 @@ In this section, we will download and install the Toolbox binary.
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
```bash
|
```bash
|
||||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||||
export VERSION="0.25.0"
|
export VERSION="0.26.0"
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|||||||
@@ -220,7 +220,7 @@
|
|||||||
},
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"version = \"0.25.0\" # x-release-please-version\n",
|
"version = \"0.26.0\" # x-release-please-version\n",
|
||||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# Make the binary executable\n",
|
"# Make the binary executable\n",
|
||||||
|
|||||||
@@ -179,7 +179,7 @@ to use BigQuery, and then run the Toolbox server.
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
```bash
|
```bash
|
||||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|
||||||
@@ -335,7 +335,7 @@ pip install toolbox-llamaindex
|
|||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< tab header="ADK" lang="bash" >}}
|
{{< tab header="ADK" lang="bash" >}}
|
||||||
|
|
||||||
pip install google-adk
|
pip install google-adk[toolbox]
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
|
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
@@ -375,7 +375,7 @@ pip install llama-index-llms-google-genai
|
|||||||
|
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< tab header="ADK" lang="bash" >}}
|
{{< tab header="ADK" lang="bash" >}}
|
||||||
pip install toolbox-core
|
# No other dependencies required for ADK
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
|
|
||||||
@@ -617,8 +617,8 @@ from google.adk.agents import Agent
|
|||||||
from google.adk.runners import Runner
|
from google.adk.runners import Runner
|
||||||
from google.adk.sessions import InMemorySessionService
|
from google.adk.sessions import InMemorySessionService
|
||||||
from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService
|
from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService
|
||||||
|
from google.adk.tools.toolbox_toolset import ToolboxToolset
|
||||||
from google.genai import types # For constructing message content
|
from google.genai import types # For constructing message content
|
||||||
from toolbox_core import ToolboxSyncClient
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
os.environ['GOOGLE_GENAI_USE_VERTEXAI'] = 'True'
|
os.environ['GOOGLE_GENAI_USE_VERTEXAI'] = 'True'
|
||||||
@@ -633,48 +633,47 @@ os.environ['GOOGLE_CLOUD_LOCATION'] = 'us-central1'
|
|||||||
|
|
||||||
# --- Load Tools from Toolbox ---
|
# --- Load Tools from Toolbox ---
|
||||||
|
|
||||||
# TODO(developer): Ensure the Toolbox server is running at <http://127.0.0.1:5000>
|
# TODO(developer): Ensure the Toolbox server is running at http://127.0.0.1:5000
|
||||||
|
toolset = ToolboxToolset(server_url="http://127.0.0.1:5000")
|
||||||
|
|
||||||
with ToolboxSyncClient("<http://127.0.0.1:5000>") as toolbox_client:
|
# --- Define the Agent's Prompt ---
|
||||||
# TODO(developer): Replace "my-toolset" with the actual ID of your toolset as configured in your MCP Toolbox server.
|
prompt = """
|
||||||
agent_toolset = toolbox_client.load_toolset("my-toolset")
|
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||||
|
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||||
|
location and price tier. Always mention hotel ids while performing any
|
||||||
|
searches. This is very important for any operations. For any bookings or
|
||||||
|
cancellations, please provide the appropriate confirmation. Be sure to
|
||||||
|
update checkin or checkout dates if mentioned by the user.
|
||||||
|
Don't ask for confirmations from the user.
|
||||||
|
"""
|
||||||
|
|
||||||
# --- Define the Agent's Prompt ---
|
# --- Configure the Agent ---
|
||||||
prompt = """
|
|
||||||
You're a helpful hotel assistant. You handle hotel searching, booking and
|
|
||||||
cancellations. When the user searches for a hotel, mention it's name, id,
|
|
||||||
location and price tier. Always mention hotel ids while performing any
|
|
||||||
searches. This is very important for any operations. For any bookings or
|
|
||||||
cancellations, please provide the appropriate confirmation. Be sure to
|
|
||||||
update checkin or checkout dates if mentioned by the user.
|
|
||||||
Don't ask for confirmations from the user.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# --- Configure the Agent ---
|
root_agent = Agent(
|
||||||
|
model='gemini-2.0-flash-001',
|
||||||
|
name='hotel_agent',
|
||||||
|
description='A helpful AI assistant that can search and book hotels.',
|
||||||
|
instruction=prompt,
|
||||||
|
tools=[toolset], # Pass the loaded toolset
|
||||||
|
)
|
||||||
|
|
||||||
root_agent = Agent(
|
# --- Initialize Services for Running the Agent ---
|
||||||
model='gemini-2.0-flash-001',
|
session_service = InMemorySessionService()
|
||||||
name='hotel_agent',
|
artifacts_service = InMemoryArtifactService()
|
||||||
description='A helpful AI assistant that can search and book hotels.',
|
|
||||||
instruction=prompt,
|
|
||||||
tools=agent_toolset, # Pass the loaded toolset
|
|
||||||
)
|
|
||||||
|
|
||||||
# --- Initialize Services for Running the Agent ---
|
runner = Runner(
|
||||||
session_service = InMemorySessionService()
|
app_name='hotel_agent',
|
||||||
artifacts_service = InMemoryArtifactService()
|
agent=root_agent,
|
||||||
|
artifact_service=artifacts_service,
|
||||||
|
session_service=session_service,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def main():
|
||||||
# Create a new session for the interaction.
|
# Create a new session for the interaction.
|
||||||
session = session_service.create_session(
|
session = await session_service.create_session(
|
||||||
state={}, app_name='hotel_agent', user_id='123'
|
state={}, app_name='hotel_agent', user_id='123'
|
||||||
)
|
)
|
||||||
|
|
||||||
runner = Runner(
|
|
||||||
app_name='hotel_agent',
|
|
||||||
agent=root_agent,
|
|
||||||
artifact_service=artifacts_service,
|
|
||||||
session_service=session_service,
|
|
||||||
)
|
|
||||||
|
|
||||||
# --- Define Queries and Run the Agent ---
|
# --- Define Queries and Run the Agent ---
|
||||||
queries = [
|
queries = [
|
||||||
"Find hotels in Basel with Basel in it's name.",
|
"Find hotels in Basel with Basel in it's name.",
|
||||||
@@ -697,6 +696,10 @@ with ToolboxSyncClient("<http://127.0.0.1:5000>") as toolbox_client:
|
|||||||
|
|
||||||
for text in responses:
|
for text in responses:
|
||||||
print(text)
|
print(text)
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(main())
|
||||||
{{< /tab >}}
|
{{< /tab >}}
|
||||||
{{< /tabpane >}}
|
{{< /tabpane >}}
|
||||||
|
|
||||||
|
|||||||
@@ -98,7 +98,7 @@ In this section, we will download Toolbox, configure our tools in a
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
```bash
|
```bash
|
||||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
```bash
|
```bash
|
||||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|
||||||
|
|||||||
@@ -48,7 +48,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
```bash
|
```bash
|
||||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
|||||||
<!-- {x-release-please-start-version} -->
|
<!-- {x-release-please-start-version} -->
|
||||||
```bash
|
```bash
|
||||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
curl -O https://storage.googleapis.com/genai-toolbox/v0.26.0/$OS/toolbox
|
||||||
```
|
```
|
||||||
<!-- {x-release-please-end} -->
|
<!-- {x-release-please-end} -->
|
||||||
|
|
||||||
|
|||||||
52
docs/en/samples/pre_post_processing/_index.md
Normal file
52
docs/en/samples/pre_post_processing/_index.md
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
---
|
||||||
|
title: "Pre and Post processing"
|
||||||
|
type: docs
|
||||||
|
weight: 1
|
||||||
|
description: >
|
||||||
|
Pre and Post processing in GenAI applications.
|
||||||
|
---
|
||||||
|
|
||||||
|
Pre and post processing allow developers to intercept and modify interactions between the agent and its tools or the user. This capability is essential for building robust, secure, and compliant agents.
|
||||||
|
|
||||||
|
## Types of Processing
|
||||||
|
|
||||||
|
### Pre-processing
|
||||||
|
|
||||||
|
Pre-processing occurs before a tool is executed or an agent processes a message. Key types include:
|
||||||
|
|
||||||
|
- **Input Sanitization & Redaction**: Detecting and masking sensitive information (like PII) in user queries or tool arguments to prevent it from being logged or sent to unauthorized systems.
|
||||||
|
- **Business Logic Validation**: Verifying that the proposed action complies with business rules (e.g., ensuring a requested hotel stay does not exceed 14 days, or checking if a user has sufficient permission).
|
||||||
|
- **Security Guardrails**: Analyzing inputs for potential prompt injection attacks or malicious payloads.
|
||||||
|
|
||||||
|
### Post-processing
|
||||||
|
|
||||||
|
Post-processing occurs after a tool has executed or the model has generated a response. Key types include:
|
||||||
|
|
||||||
|
- **Response Enrichment**: Injecting additional data into the tool output that wasn't part of the raw API response (e.g., calculating loyalty points earned based on the booking value).
|
||||||
|
- **Output Formatting**: Transforming raw data (like JSON or XML) into a more human-readable or model-friendly format to improve the agent's understanding.
|
||||||
|
- **Compliance Auditing**: Logging the final outcome of transactions, including the original request and the result, to a secure audit trail.
|
||||||
|
|
||||||
|
## Processing Scopes
|
||||||
|
|
||||||
|
Processing logic can be applied at different levels of the application:
|
||||||
|
|
||||||
|
### Tool Level
|
||||||
|
|
||||||
|
Wraps individual tool executions. This is best for logic specific to a single tool or a set of tools.
|
||||||
|
|
||||||
|
- **Scope**: Intercepts the raw inputs (arguments) to a tool and its outputs.
|
||||||
|
- **Use Cases**: Argument validation, output formatting, specific privacy rules for sensitive tools.
|
||||||
|
|
||||||
|
### Model Level
|
||||||
|
|
||||||
|
Intercepts individual calls to the Large Language Model (LLM).
|
||||||
|
|
||||||
|
- **Scope**: Intercepts the list of messages (prompt) sent to the model and the generation (response) received.
|
||||||
|
- **Use Cases**: Global PII redaction (across all tools/chat), prompt engineering/injection, token usage tracking, and hallucination detection.
|
||||||
|
|
||||||
|
### Agent Level
|
||||||
|
|
||||||
|
Wraps the high-level agent execution loop (e.g., a "turn" in the conversation).
|
||||||
|
|
||||||
|
- **Scope**: Intercepts the initial user input and the final agent response, enveloping one or more model calls and tool executions.
|
||||||
|
- **Use Cases**: User authentication, rate limiting, session management, and end-to-end audit logging.
|
||||||
5
docs/en/samples/pre_post_processing/golden.txt
Normal file
5
docs/en/samples/pre_post_processing/golden.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
Final Client Response:
|
||||||
|
AI:
|
||||||
|
Booking Confirmed!
|
||||||
|
Loyalty Points
|
||||||
|
POLICY CHECK: Intercepting 'book-hotel'
|
||||||
31
docs/en/samples/pre_post_processing/python.md
Normal file
31
docs/en/samples/pre_post_processing/python.md
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
---
|
||||||
|
title: "(Python) Pre and post processing"
|
||||||
|
type: docs
|
||||||
|
weight: 4
|
||||||
|
description: >
|
||||||
|
How to add pre and post processing to your Python toolbox applications.
|
||||||
|
---
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
This tutorial assumes that you have set up a basic toolbox application as described in the [local quickstart](../../getting-started/local_quickstart).
|
||||||
|
|
||||||
|
This guide demonstrates how to implement these patterns in your Toolbox applications.
|
||||||
|
|
||||||
|
## Python
|
||||||
|
|
||||||
|
{{< tabpane persist=header >}}
|
||||||
|
{{% tab header="ADK" text=true %}}
|
||||||
|
Coming soon.
|
||||||
|
{{% /tab %}}
|
||||||
|
{{% tab header="Langchain" text=true %}}
|
||||||
|
The following example demonstrates how to use `ToolboxClient` with LangChain's middleware to implement pre and post processing for tool calls.
|
||||||
|
|
||||||
|
```py
|
||||||
|
{{< include "python/langchain/agent.py" >}}
|
||||||
|
```
|
||||||
|
|
||||||
|
For more information, see the [LangChain Middleware documentation](https://docs.langchain.com/oss/python/langchain/middleware/custom#wrap-style-hooks).
|
||||||
|
You can also add model-level (`wrap_model`) and agent-level (`before_agent`, `after_agent`) hooks to intercept messages at different stages of the execution loop. See the [LangChain Middleware documentation](https://docs.langchain.com/oss/python/langchain/middleware/custom#wrap-style-hooks) for details on these additional hook types.
|
||||||
|
{{% /tab %}}
|
||||||
|
{{< /tabpane >}}
|
||||||
4
docs/en/samples/pre_post_processing/python/__init__.py
Normal file
4
docs/en/samples/pre_post_processing/python/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# This file makes the 'pre_post_processing/python' directory a Python package.
|
||||||
|
|
||||||
|
# You can include any package-level initialization logic here if needed.
|
||||||
|
# For now, this file is empty.
|
||||||
58
docs/en/samples/pre_post_processing/python/agent_test.py
Normal file
58
docs/en/samples/pre_post_processing/python/agent_test.py
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
# Copyright 2026 Google LLC
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import importlib
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
ORCH_NAME = os.environ.get("ORCH_NAME")
|
||||||
|
module_path = f"python.{ORCH_NAME}.agent"
|
||||||
|
agent = importlib.import_module(module_path)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="module")
|
||||||
|
def golden_keywords():
|
||||||
|
"""Loads expected keywords from the golden.txt file."""
|
||||||
|
golden_file_path = Path(__file__).resolve().parent.parent / "golden.txt"
|
||||||
|
if not golden_file_path.exists():
|
||||||
|
pytest.fail(f"Golden file not found: {golden_file_path}")
|
||||||
|
try:
|
||||||
|
with open(golden_file_path, "r") as f:
|
||||||
|
return [line.strip() for line in f.readlines() if line.strip()]
|
||||||
|
except Exception as e:
|
||||||
|
pytest.fail(f"Could not read golden.txt: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
# --- Execution Tests ---
|
||||||
|
class TestExecution:
|
||||||
|
"""Test framework execution and output validation."""
|
||||||
|
|
||||||
|
@pytest.fixture(scope="function")
|
||||||
|
def script_output(self, capsys):
|
||||||
|
"""Run the agent function and return its output."""
|
||||||
|
asyncio.run(agent.main())
|
||||||
|
return capsys.readouterr()
|
||||||
|
|
||||||
|
def test_script_runs_without_errors(self, script_output):
|
||||||
|
"""Test that the script runs and produces no stderr."""
|
||||||
|
assert script_output.err == "", f"Script produced stderr: {script_output.err}"
|
||||||
|
|
||||||
|
def test_keywords_in_output(self, script_output, golden_keywords):
|
||||||
|
"""Test that expected keywords are present in the script's output."""
|
||||||
|
output = script_output.out
|
||||||
|
missing_keywords = [kw for kw in golden_keywords if kw not in output]
|
||||||
|
assert not missing_keywords, f"Missing keywords in output: {missing_keywords}"
|
||||||
111
docs/en/samples/pre_post_processing/python/langchain/agent.py
Normal file
111
docs/en/samples/pre_post_processing/python/langchain/agent.py
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
# Copyright 2026 Google LLC
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from langchain.agents import create_agent
|
||||||
|
from langchain.agents.middleware import wrap_tool_call
|
||||||
|
from langchain_core.messages import ToolMessage
|
||||||
|
from langchain_google_vertexai import ChatVertexAI
|
||||||
|
from toolbox_langchain import ToolboxClient
|
||||||
|
|
||||||
|
system_prompt = """
|
||||||
|
You're a helpful hotel assistant. You handle hotel searching, booking and
|
||||||
|
cancellations. When the user searches for a hotel, mention it's name, id,
|
||||||
|
location and price tier. Always mention hotel ids while performing any
|
||||||
|
searches. This is very important for any operations. For any bookings or
|
||||||
|
cancellations, please provide the appropriate confirmation. Be sure to
|
||||||
|
update checkin or checkout dates if mentioned by the user.
|
||||||
|
Don't ask for confirmations from the user.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
# Pre processing
|
||||||
|
@wrap_tool_call
|
||||||
|
async def enforce_business_rules(request, handler):
|
||||||
|
"""
|
||||||
|
Business Logic Validation:
|
||||||
|
Enforces max stay duration (e.g., max 14 days).
|
||||||
|
"""
|
||||||
|
tool_call = request.tool_call
|
||||||
|
name = tool_call["name"]
|
||||||
|
args = tool_call["args"]
|
||||||
|
|
||||||
|
print(f"POLICY CHECK: Intercepting '{name}'")
|
||||||
|
|
||||||
|
if name == "update-hotel":
|
||||||
|
if "checkin_date" in args and "checkout_date" in args:
|
||||||
|
try:
|
||||||
|
start = datetime.fromisoformat(args["checkin_date"])
|
||||||
|
end = datetime.fromisoformat(args["checkout_date"])
|
||||||
|
duration = (end - start).days
|
||||||
|
|
||||||
|
if duration > 14:
|
||||||
|
print("BLOCKED: Stay too long")
|
||||||
|
return ToolMessage(
|
||||||
|
content="Error: Maximum stay duration is 14 days.",
|
||||||
|
tool_call_id=tool_call["id"],
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
pass # Ignore invalid date formats
|
||||||
|
|
||||||
|
return await handler(request)
|
||||||
|
|
||||||
|
|
||||||
|
# Post processing
|
||||||
|
@wrap_tool_call
|
||||||
|
async def enrich_response(request, handler):
|
||||||
|
"""
|
||||||
|
Post-Processing & Enrichment:
|
||||||
|
Adds loyalty points information to successful bookings.
|
||||||
|
Standardizes output format.
|
||||||
|
"""
|
||||||
|
result = await handler(request)
|
||||||
|
|
||||||
|
if isinstance(result, ToolMessage):
|
||||||
|
content = str(result.content)
|
||||||
|
tool_name = request.tool_call["name"]
|
||||||
|
|
||||||
|
if tool_name == "book-hotel" and "Error" not in content:
|
||||||
|
loyalty_bonus = 500
|
||||||
|
result.content = f"Booking Confirmed! \n You earned {loyalty_bonus} Loyalty Points with this stay.\n\nSystem Details: {content}"
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
async with ToolboxClient("http://127.0.0.1:5000") as client:
|
||||||
|
tools = await client.aload_toolset("my-toolset")
|
||||||
|
model = ChatVertexAI(model="gemini-2.5-flash")
|
||||||
|
agent = create_agent(
|
||||||
|
system_prompt=system_prompt,
|
||||||
|
model=model,
|
||||||
|
tools=tools,
|
||||||
|
middleware=[enforce_business_rules, enrich_response],
|
||||||
|
)
|
||||||
|
|
||||||
|
user_input = "Book hotel with id 3."
|
||||||
|
response = await agent.ainvoke(
|
||||||
|
{"messages": [{"role": "user", "content": user_input}]}
|
||||||
|
)
|
||||||
|
|
||||||
|
print("-" * 50)
|
||||||
|
print("Final Client Response:")
|
||||||
|
last_ai_msg = response["messages"][-1].content
|
||||||
|
print(f"AI: {last_ai_msg}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(main())
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
langchain==1.2.6
|
||||||
|
toolbox-langchain==0.5.7
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "mcp-toolbox-for-databases",
|
"name": "mcp-toolbox-for-databases",
|
||||||
"version": "0.25.0",
|
"version": "0.26.0",
|
||||||
"description": "MCP Toolbox for Databases is an open-source MCP server for more than 30 different datasources.",
|
"description": "MCP Toolbox for Databases is an open-source MCP server for more than 30 different datasources.",
|
||||||
"contextFileName": "MCP-TOOLBOX-EXTENSION.md"
|
"contextFileName": "MCP-TOOLBOX-EXTENSION.md"
|
||||||
}
|
}
|
||||||
5
go.mod
5
go.mod
@@ -13,6 +13,7 @@ require (
|
|||||||
cloud.google.com/go/dataproc/v2 v2.15.0
|
cloud.google.com/go/dataproc/v2 v2.15.0
|
||||||
cloud.google.com/go/firestore v1.20.0
|
cloud.google.com/go/firestore v1.20.0
|
||||||
cloud.google.com/go/geminidataanalytics v0.3.0
|
cloud.google.com/go/geminidataanalytics v0.3.0
|
||||||
|
cloud.google.com/go/logging v1.13.1
|
||||||
cloud.google.com/go/longrunning v0.7.0
|
cloud.google.com/go/longrunning v0.7.0
|
||||||
cloud.google.com/go/spanner v1.86.1
|
cloud.google.com/go/spanner v1.86.1
|
||||||
github.com/ClickHouse/clickhouse-go/v2 v2.40.3
|
github.com/ClickHouse/clickhouse-go/v2 v2.40.3
|
||||||
@@ -38,7 +39,7 @@ require (
|
|||||||
github.com/google/uuid v1.6.0
|
github.com/google/uuid v1.6.0
|
||||||
github.com/jackc/pgx/v5 v5.7.6
|
github.com/jackc/pgx/v5 v5.7.6
|
||||||
github.com/jmoiron/sqlx v1.4.0
|
github.com/jmoiron/sqlx v1.4.0
|
||||||
github.com/looker-open-source/sdk-codegen/go v0.25.21
|
github.com/looker-open-source/sdk-codegen/go v0.25.22
|
||||||
github.com/microsoft/go-mssqldb v1.9.3
|
github.com/microsoft/go-mssqldb v1.9.3
|
||||||
github.com/nakagami/firebirdsql v0.9.15
|
github.com/nakagami/firebirdsql v0.9.15
|
||||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4
|
github.com/neo4j/neo4j-go-driver/v5 v5.28.4
|
||||||
@@ -63,6 +64,7 @@ require (
|
|||||||
google.golang.org/api v0.256.0
|
google.golang.org/api v0.256.0
|
||||||
google.golang.org/genai v1.37.0
|
google.golang.org/genai v1.37.0
|
||||||
google.golang.org/genproto v0.0.0-20251022142026-3a174f9686a8
|
google.golang.org/genproto v0.0.0-20251022142026-3a174f9686a8
|
||||||
|
google.golang.org/grpc v1.76.0
|
||||||
google.golang.org/protobuf v1.36.10
|
google.golang.org/protobuf v1.36.10
|
||||||
modernc.org/sqlite v1.40.0
|
modernc.org/sqlite v1.40.0
|
||||||
)
|
)
|
||||||
@@ -229,7 +231,6 @@ require (
|
|||||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect
|
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect
|
||||||
google.golang.org/genproto/googleapis/api v0.0.0-20251111163417-95abcf5c77ba // indirect
|
google.golang.org/genproto/googleapis/api v0.0.0-20251111163417-95abcf5c77ba // indirect
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251103181224-f26f9409b101 // indirect
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20251103181224-f26f9409b101 // indirect
|
||||||
google.golang.org/grpc v1.76.0 // indirect
|
|
||||||
gopkg.in/inf.v0 v0.9.1 // indirect
|
gopkg.in/inf.v0 v0.9.1 // indirect
|
||||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||||
modernc.org/libc v1.66.10 // indirect
|
modernc.org/libc v1.66.10 // indirect
|
||||||
|
|||||||
8
go.sum
8
go.sum
@@ -370,8 +370,8 @@ cloud.google.com/go/lifesciences v0.6.0/go.mod h1:ddj6tSX/7BOnhxCSd3ZcETvtNr8NZ6
|
|||||||
cloud.google.com/go/lifesciences v0.8.0/go.mod h1:lFxiEOMqII6XggGbOnKiyZ7IBwoIqA84ClvoezaA/bo=
|
cloud.google.com/go/lifesciences v0.8.0/go.mod h1:lFxiEOMqII6XggGbOnKiyZ7IBwoIqA84ClvoezaA/bo=
|
||||||
cloud.google.com/go/logging v1.6.1/go.mod h1:5ZO0mHHbvm8gEmeEUHrmDlTDSu5imF6MUP9OfilNXBw=
|
cloud.google.com/go/logging v1.6.1/go.mod h1:5ZO0mHHbvm8gEmeEUHrmDlTDSu5imF6MUP9OfilNXBw=
|
||||||
cloud.google.com/go/logging v1.7.0/go.mod h1:3xjP2CjkM3ZkO73aj4ASA5wRPGGCRrPIAeNqVNkzY8M=
|
cloud.google.com/go/logging v1.7.0/go.mod h1:3xjP2CjkM3ZkO73aj4ASA5wRPGGCRrPIAeNqVNkzY8M=
|
||||||
cloud.google.com/go/logging v1.13.0 h1:7j0HgAp0B94o1YRDqiqm26w4q1rDMH7XNRU34lJXHYc=
|
cloud.google.com/go/logging v1.13.1 h1:O7LvmO0kGLaHY/gq8cV7T0dyp6zJhYAOtZPX4TF3QtY=
|
||||||
cloud.google.com/go/logging v1.13.0/go.mod h1:36CoKh6KA/M0PbhPKMq6/qety2DCAErbhXT62TuXALA=
|
cloud.google.com/go/logging v1.13.1/go.mod h1:XAQkfkMBxQRjQek96WLPNze7vsOmay9H5PqfsNYDqvw=
|
||||||
cloud.google.com/go/longrunning v0.1.1/go.mod h1:UUFxuDWkv22EuY93jjmDMFT5GPQKeFVJBIF6QlTqdsE=
|
cloud.google.com/go/longrunning v0.1.1/go.mod h1:UUFxuDWkv22EuY93jjmDMFT5GPQKeFVJBIF6QlTqdsE=
|
||||||
cloud.google.com/go/longrunning v0.3.0/go.mod h1:qth9Y41RRSUE69rDcOn6DdK3HfQfsUI0YSmW3iIlLJc=
|
cloud.google.com/go/longrunning v0.3.0/go.mod h1:qth9Y41RRSUE69rDcOn6DdK3HfQfsUI0YSmW3iIlLJc=
|
||||||
cloud.google.com/go/longrunning v0.4.1/go.mod h1:4iWDqhBZ70CvZ6BfETbvam3T8FMvLK+eFj0E6AaRQTo=
|
cloud.google.com/go/longrunning v0.4.1/go.mod h1:4iWDqhBZ70CvZ6BfETbvam3T8FMvLK+eFj0E6AaRQTo=
|
||||||
@@ -1172,8 +1172,8 @@ github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
|
|||||||
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
|
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
|
||||||
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||||
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||||
github.com/looker-open-source/sdk-codegen/go v0.25.21 h1:nlZ1nz22SKluBNkzplrMHBPEVgJO3zVLF6aAws1rrRA=
|
github.com/looker-open-source/sdk-codegen/go v0.25.22 h1:DGYt1v2R2uE/m71sWAvgxsJnDLM9B7C40N5/CTDlE2A=
|
||||||
github.com/looker-open-source/sdk-codegen/go v0.25.21/go.mod h1:Br1ntSiruDJ/4nYNjpYyWyCbqJ7+GQceWbIgn0hYims=
|
github.com/looker-open-source/sdk-codegen/go v0.25.22/go.mod h1:Br1ntSiruDJ/4nYNjpYyWyCbqJ7+GQceWbIgn0hYims=
|
||||||
github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
|
github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
|
||||||
github.com/lyft/protoc-gen-star v0.6.1/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
|
github.com/lyft/protoc-gen-star v0.6.1/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
|
||||||
github.com/lyft/protoc-gen-star/v2 v2.0.1/go.mod h1:RcCdONR2ScXaYnQC5tUzxzlpA3WVYF7/opLeUgcQs/o=
|
github.com/lyft/protoc-gen-star/v2 v2.0.1/go.mod h1:RcCdONR2ScXaYnQC5tUzxzlpA3WVYF7/opLeUgcQs/o=
|
||||||
|
|||||||
@@ -46,6 +46,9 @@ tools:
|
|||||||
create_backup:
|
create_backup:
|
||||||
kind: cloud-sql-create-backup
|
kind: cloud-sql-create-backup
|
||||||
source: cloud-sql-admin-source
|
source: cloud-sql-admin-source
|
||||||
|
restore_backup:
|
||||||
|
kind: cloud-sql-restore-backup
|
||||||
|
source: cloud-sql-admin-source
|
||||||
|
|
||||||
toolsets:
|
toolsets:
|
||||||
cloud_sql_mssql_admin_tools:
|
cloud_sql_mssql_admin_tools:
|
||||||
@@ -58,3 +61,4 @@ toolsets:
|
|||||||
- wait_for_operation
|
- wait_for_operation
|
||||||
- clone_instance
|
- clone_instance
|
||||||
- create_backup
|
- create_backup
|
||||||
|
- restore_backup
|
||||||
|
|||||||
@@ -46,6 +46,9 @@ tools:
|
|||||||
create_backup:
|
create_backup:
|
||||||
kind: cloud-sql-create-backup
|
kind: cloud-sql-create-backup
|
||||||
source: cloud-sql-admin-source
|
source: cloud-sql-admin-source
|
||||||
|
restore_backup:
|
||||||
|
kind: cloud-sql-restore-backup
|
||||||
|
source: cloud-sql-admin-source
|
||||||
|
|
||||||
toolsets:
|
toolsets:
|
||||||
cloud_sql_mysql_admin_tools:
|
cloud_sql_mysql_admin_tools:
|
||||||
@@ -58,3 +61,4 @@ toolsets:
|
|||||||
- wait_for_operation
|
- wait_for_operation
|
||||||
- clone_instance
|
- clone_instance
|
||||||
- create_backup
|
- create_backup
|
||||||
|
- restore_backup
|
||||||
|
|||||||
@@ -49,6 +49,9 @@ tools:
|
|||||||
create_backup:
|
create_backup:
|
||||||
kind: cloud-sql-create-backup
|
kind: cloud-sql-create-backup
|
||||||
source: cloud-sql-admin-source
|
source: cloud-sql-admin-source
|
||||||
|
restore_backup:
|
||||||
|
kind: cloud-sql-restore-backup
|
||||||
|
source: cloud-sql-admin-source
|
||||||
|
|
||||||
toolsets:
|
toolsets:
|
||||||
cloud_sql_postgres_admin_tools:
|
cloud_sql_postgres_admin_tools:
|
||||||
@@ -62,3 +65,4 @@ toolsets:
|
|||||||
- postgres_upgrade_precheck
|
- postgres_upgrade_precheck
|
||||||
- clone_instance
|
- clone_instance
|
||||||
- create_backup
|
- create_backup
|
||||||
|
- restore_backup
|
||||||
|
|||||||
@@ -26,6 +26,7 @@ import (
|
|||||||
"github.com/go-chi/render"
|
"github.com/go-chi/render"
|
||||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||||
"github.com/googleapis/genai-toolbox/internal/util"
|
"github.com/googleapis/genai-toolbox/internal/util"
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||||
"go.opentelemetry.io/otel/attribute"
|
"go.opentelemetry.io/otel/attribute"
|
||||||
"go.opentelemetry.io/otel/codes"
|
"go.opentelemetry.io/otel/codes"
|
||||||
"go.opentelemetry.io/otel/metric"
|
"go.opentelemetry.io/otel/metric"
|
||||||
@@ -231,7 +232,7 @@ func toolInvokeHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
params, err := tool.ParseParams(data, claimsFromAuth)
|
params, err := parameters.ParseParams(tool.GetParameters(), data, claimsFromAuth)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// If auth error, return 401
|
// If auth error, return 401
|
||||||
if errors.Is(err, util.ErrUnauthorized) {
|
if errors.Is(err, util.ErrUnauthorized) {
|
||||||
|
|||||||
@@ -87,6 +87,10 @@ func (t MockTool) RequiresClientAuthorization(tools.SourceProvider) (bool, error
|
|||||||
return t.requiresClientAuthrorization, nil
|
return t.requiresClientAuthrorization, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t MockTool) GetParameters() parameters.Parameters {
|
||||||
|
return t.Params
|
||||||
|
}
|
||||||
|
|
||||||
func (t MockTool) McpManifest() tools.McpManifest {
|
func (t MockTool) McpManifest() tools.McpManifest {
|
||||||
properties := make(map[string]parameters.ParameterMcpManifest)
|
properties := make(map[string]parameters.ParameterMcpManifest)
|
||||||
required := make([]string, 0)
|
required := make([]string, 0)
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
yaml "github.com/goccy/go-yaml"
|
yaml "github.com/goccy/go-yaml"
|
||||||
@@ -66,12 +67,14 @@ type ServerConfig struct {
|
|||||||
Stdio bool
|
Stdio bool
|
||||||
// DisableReload indicates if the user has disabled dynamic reloading for Toolbox.
|
// DisableReload indicates if the user has disabled dynamic reloading for Toolbox.
|
||||||
DisableReload bool
|
DisableReload bool
|
||||||
// UI indicates if Toolbox UI endpoints (/ui) are available
|
// UI indicates if Toolbox UI endpoints (/ui) are available.
|
||||||
UI bool
|
UI bool
|
||||||
// Specifies a list of origins permitted to access this server.
|
// Specifies a list of origins permitted to access this server.
|
||||||
AllowedOrigins []string
|
AllowedOrigins []string
|
||||||
// Specifies a list of hosts permitted to access this server
|
// Specifies a list of hosts permitted to access this server.
|
||||||
AllowedHosts []string
|
AllowedHosts []string
|
||||||
|
// UserAgentMetadata specifies additional metadata to append to the User-Agent string.
|
||||||
|
UserAgentMetadata []string
|
||||||
}
|
}
|
||||||
|
|
||||||
type logFormat string
|
type logFormat string
|
||||||
@@ -136,12 +139,12 @@ type PromptsetConfigs map[string]prompts.PromptsetConfig
|
|||||||
|
|
||||||
func UnmarshalResourceConfig(ctx context.Context, raw []byte) (SourceConfigs, AuthServiceConfigs, EmbeddingModelConfigs, ToolConfigs, ToolsetConfigs, PromptConfigs, error) {
|
func UnmarshalResourceConfig(ctx context.Context, raw []byte) (SourceConfigs, AuthServiceConfigs, EmbeddingModelConfigs, ToolConfigs, ToolsetConfigs, PromptConfigs, error) {
|
||||||
// prepare configs map
|
// prepare configs map
|
||||||
sourceConfigs := make(map[string]sources.SourceConfig)
|
var sourceConfigs SourceConfigs
|
||||||
authServiceConfigs := make(AuthServiceConfigs)
|
var authServiceConfigs AuthServiceConfigs
|
||||||
embeddingModelConfigs := make(EmbeddingModelConfigs)
|
var embeddingModelConfigs EmbeddingModelConfigs
|
||||||
toolConfigs := make(ToolConfigs)
|
var toolConfigs ToolConfigs
|
||||||
toolsetConfigs := make(ToolsetConfigs)
|
var toolsetConfigs ToolsetConfigs
|
||||||
promptConfigs := make(PromptConfigs)
|
var promptConfigs PromptConfigs
|
||||||
// promptset configs is not yet supported
|
// promptset configs is not yet supported
|
||||||
|
|
||||||
decoder := yaml.NewDecoder(bytes.NewReader(raw))
|
decoder := yaml.NewDecoder(bytes.NewReader(raw))
|
||||||
@@ -157,7 +160,7 @@ func UnmarshalResourceConfig(ctx context.Context, raw []byte) (SourceConfigs, Au
|
|||||||
var kind, name string
|
var kind, name string
|
||||||
var ok bool
|
var ok bool
|
||||||
if kind, ok = resource["kind"].(string); !ok {
|
if kind, ok = resource["kind"].(string); !ok {
|
||||||
return nil, nil, nil, nil, nil, nil, fmt.Errorf("missing 'kind' field or it is not a string")
|
return nil, nil, nil, nil, nil, nil, fmt.Errorf("missing 'kind' field or it is not a string: %v", resource)
|
||||||
}
|
}
|
||||||
if name, ok = resource["name"].(string); !ok {
|
if name, ok = resource["name"].(string); !ok {
|
||||||
return nil, nil, nil, nil, nil, nil, fmt.Errorf("missing 'name' field or it is not a string")
|
return nil, nil, nil, nil, nil, nil, fmt.Errorf("missing 'name' field or it is not a string")
|
||||||
@@ -171,36 +174,54 @@ func UnmarshalResourceConfig(ctx context.Context, raw []byte) (SourceConfigs, Au
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
||||||
}
|
}
|
||||||
|
if sourceConfigs == nil {
|
||||||
|
sourceConfigs = make(SourceConfigs)
|
||||||
|
}
|
||||||
sourceConfigs[name] = c
|
sourceConfigs[name] = c
|
||||||
case "authServices":
|
case "authServices":
|
||||||
c, err := UnmarshalYAMLAuthServiceConfig(ctx, name, resource)
|
c, err := UnmarshalYAMLAuthServiceConfig(ctx, name, resource)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
||||||
}
|
}
|
||||||
|
if authServiceConfigs == nil {
|
||||||
|
authServiceConfigs = make(AuthServiceConfigs)
|
||||||
|
}
|
||||||
authServiceConfigs[name] = c
|
authServiceConfigs[name] = c
|
||||||
case "tools":
|
case "tools":
|
||||||
c, err := UnmarshalYAMLToolConfig(ctx, name, resource)
|
c, err := UnmarshalYAMLToolConfig(ctx, name, resource)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
||||||
}
|
}
|
||||||
|
if toolConfigs == nil {
|
||||||
|
toolConfigs = make(ToolConfigs)
|
||||||
|
}
|
||||||
toolConfigs[name] = c
|
toolConfigs[name] = c
|
||||||
case "toolsets":
|
case "toolsets":
|
||||||
c, err := UnmarshalYAMLToolsetConfig(ctx, name, resource)
|
c, err := UnmarshalYAMLToolsetConfig(ctx, name, resource)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
||||||
}
|
}
|
||||||
|
if toolsetConfigs == nil {
|
||||||
|
toolsetConfigs = make(ToolsetConfigs)
|
||||||
|
}
|
||||||
toolsetConfigs[name] = c
|
toolsetConfigs[name] = c
|
||||||
case "embeddingModels":
|
case "embeddingModels":
|
||||||
c, err := UnmarshalYAMLEmbeddingModelConfig(ctx, name, resource)
|
c, err := UnmarshalYAMLEmbeddingModelConfig(ctx, name, resource)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
||||||
}
|
}
|
||||||
|
if embeddingModelConfigs == nil {
|
||||||
|
embeddingModelConfigs = make(EmbeddingModelConfigs)
|
||||||
|
}
|
||||||
embeddingModelConfigs[name] = c
|
embeddingModelConfigs[name] = c
|
||||||
case "prompts":
|
case "prompts":
|
||||||
c, err := UnmarshalYAMLPromptConfig(ctx, name, resource)
|
c, err := UnmarshalYAMLPromptConfig(ctx, name, resource)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error unmarshaling %s: %s", kind, err)
|
||||||
}
|
}
|
||||||
|
if promptConfigs == nil {
|
||||||
|
promptConfigs = make(PromptConfigs)
|
||||||
|
}
|
||||||
promptConfigs[name] = c
|
promptConfigs[name] = c
|
||||||
default:
|
default:
|
||||||
return nil, nil, nil, nil, nil, nil, fmt.Errorf("invalid kind %s", kind)
|
return nil, nil, nil, nil, nil, nil, fmt.Errorf("invalid kind %s", kind)
|
||||||
@@ -276,6 +297,45 @@ func UnmarshalYAMLToolConfig(ctx context.Context, name string, r map[string]any)
|
|||||||
if r["authRequired"] == nil {
|
if r["authRequired"] == nil {
|
||||||
r["authRequired"] = []string{}
|
r["authRequired"] = []string{}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// validify parameter references
|
||||||
|
if rawParams, ok := r["parameters"]; ok {
|
||||||
|
if paramsList, ok := rawParams.([]any); ok {
|
||||||
|
// Turn params into a map
|
||||||
|
validParamNames := make(map[string]bool)
|
||||||
|
for _, rawP := range paramsList {
|
||||||
|
if pMap, ok := rawP.(map[string]any); ok {
|
||||||
|
if pName, ok := pMap["name"].(string); ok && pName != "" {
|
||||||
|
validParamNames[pName] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate references
|
||||||
|
for i, rawP := range paramsList {
|
||||||
|
pMap, ok := rawP.(map[string]any)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
pName, _ := pMap["name"].(string)
|
||||||
|
refName, _ := pMap["valueFromParam"].(string)
|
||||||
|
|
||||||
|
if refName != "" {
|
||||||
|
// Check if the referenced parameter exists
|
||||||
|
if !validParamNames[refName] {
|
||||||
|
return nil, fmt.Errorf("tool %q config error: parameter %q (index %d) references '%q' in the 'valueFromParam' field, which is not a defined parameter", name, pName, i, refName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for self-reference
|
||||||
|
if refName == pName {
|
||||||
|
return nil, fmt.Errorf("tool %q config error: parameter %q cannot copy value from itself", name, pName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
dec, err := util.NewStrictDecoder(r)
|
dec, err := util.NewStrictDecoder(r)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("error creating decoder: %s", err)
|
return nil, fmt.Errorf("error creating decoder: %s", err)
|
||||||
@@ -289,7 +349,11 @@ func UnmarshalYAMLToolConfig(ctx context.Context, name string, r map[string]any)
|
|||||||
|
|
||||||
func UnmarshalYAMLToolsetConfig(ctx context.Context, name string, r map[string]any) (tools.ToolsetConfig, error) {
|
func UnmarshalYAMLToolsetConfig(ctx context.Context, name string, r map[string]any) (tools.ToolsetConfig, error) {
|
||||||
var toolsetConfig tools.ToolsetConfig
|
var toolsetConfig tools.ToolsetConfig
|
||||||
justTools := map[string]any{"tools": r["tools"]}
|
toolList, ok := r["tools"].([]any)
|
||||||
|
if !ok {
|
||||||
|
return toolsetConfig, fmt.Errorf("tools is missing or not a list of strings: %v", r)
|
||||||
|
}
|
||||||
|
justTools := map[string]any{"tools": toolList}
|
||||||
dec, err := util.NewStrictDecoder(justTools)
|
dec, err := util.NewStrictDecoder(justTools)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return toolsetConfig, fmt.Errorf("error creating decoder: %s", err)
|
return toolsetConfig, fmt.Errorf("error creating decoder: %s", err)
|
||||||
@@ -324,3 +388,23 @@ func UnmarshalYAMLPromptConfig(ctx context.Context, name string, r map[string]an
|
|||||||
}
|
}
|
||||||
return promptCfg, nil
|
return promptCfg, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Tools naming validation is added in the MCP v2025-11-25, but we'll be
|
||||||
|
// implementing it across Toolbox
|
||||||
|
// Tool names SHOULD be between 1 and 128 characters in length (inclusive).
|
||||||
|
// Tool names SHOULD be considered case-sensitive.
|
||||||
|
// The following SHOULD be the only allowed characters: uppercase and lowercase ASCII letters (A-Z, a-z), digits (0-9), underscore (_), hyphen (-), and dot (.)
|
||||||
|
// Tool names SHOULD NOT contain spaces, commas, or other special characters.
|
||||||
|
// Tool names SHOULD be unique within a server.
|
||||||
|
func NameValidation(name string) error {
|
||||||
|
strLen := len(name)
|
||||||
|
if strLen < 1 || strLen > 128 {
|
||||||
|
return fmt.Errorf("resource name SHOULD be between 1 and 128 characters in length (inclusive)")
|
||||||
|
}
|
||||||
|
validChars := regexp.MustCompile("^[a-zA-Z0-9_.-]+$")
|
||||||
|
isValid := validChars.MatchString(name)
|
||||||
|
if !isValid {
|
||||||
|
return fmt.Errorf("invalid character for resource name; only uppercase and lowercase ASCII letters (A-Z, a-z), digits (0-9), underscore (_), hyphen (-), and dot (.) is allowed")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|||||||
@@ -27,19 +27,21 @@ import (
|
|||||||
v20241105 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20241105"
|
v20241105 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20241105"
|
||||||
v20250326 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20250326"
|
v20250326 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20250326"
|
||||||
v20250618 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20250618"
|
v20250618 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20250618"
|
||||||
|
v20251125 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20251125"
|
||||||
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
||||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||||
)
|
)
|
||||||
|
|
||||||
// LATEST_PROTOCOL_VERSION is the latest version of the MCP protocol supported.
|
// LATEST_PROTOCOL_VERSION is the latest version of the MCP protocol supported.
|
||||||
// Update the version used in InitializeResponse when this value is updated.
|
// Update the version used in InitializeResponse when this value is updated.
|
||||||
const LATEST_PROTOCOL_VERSION = v20250618.PROTOCOL_VERSION
|
const LATEST_PROTOCOL_VERSION = v20251125.PROTOCOL_VERSION
|
||||||
|
|
||||||
// SUPPORTED_PROTOCOL_VERSIONS is the MCP protocol versions that are supported.
|
// SUPPORTED_PROTOCOL_VERSIONS is the MCP protocol versions that are supported.
|
||||||
var SUPPORTED_PROTOCOL_VERSIONS = []string{
|
var SUPPORTED_PROTOCOL_VERSIONS = []string{
|
||||||
v20241105.PROTOCOL_VERSION,
|
v20241105.PROTOCOL_VERSION,
|
||||||
v20250326.PROTOCOL_VERSION,
|
v20250326.PROTOCOL_VERSION,
|
||||||
v20250618.PROTOCOL_VERSION,
|
v20250618.PROTOCOL_VERSION,
|
||||||
|
v20251125.PROTOCOL_VERSION,
|
||||||
}
|
}
|
||||||
|
|
||||||
// InitializeResponse runs capability negotiation and protocol version agreement.
|
// InitializeResponse runs capability negotiation and protocol version agreement.
|
||||||
@@ -102,6 +104,8 @@ func NotificationHandler(ctx context.Context, body []byte) error {
|
|||||||
// This is the Operation phase of the lifecycle for MCP client-server connections.
|
// This is the Operation phase of the lifecycle for MCP client-server connections.
|
||||||
func ProcessMethod(ctx context.Context, mcpVersion string, id jsonrpc.RequestId, method string, toolset tools.Toolset, promptset prompts.Promptset, resourceMgr *resources.ResourceManager, body []byte, header http.Header) (any, error) {
|
func ProcessMethod(ctx context.Context, mcpVersion string, id jsonrpc.RequestId, method string, toolset tools.Toolset, promptset prompts.Promptset, resourceMgr *resources.ResourceManager, body []byte, header http.Header) (any, error) {
|
||||||
switch mcpVersion {
|
switch mcpVersion {
|
||||||
|
case v20251125.PROTOCOL_VERSION:
|
||||||
|
return v20251125.ProcessMethod(ctx, id, method, toolset, promptset, resourceMgr, body, header)
|
||||||
case v20250618.PROTOCOL_VERSION:
|
case v20250618.PROTOCOL_VERSION:
|
||||||
return v20250618.ProcessMethod(ctx, id, method, toolset, promptset, resourceMgr, body, header)
|
return v20250618.ProcessMethod(ctx, id, method, toolset, promptset, resourceMgr, body, header)
|
||||||
case v20250326.PROTOCOL_VERSION:
|
case v20250326.PROTOCOL_VERSION:
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ import (
|
|||||||
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
||||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||||
"github.com/googleapis/genai-toolbox/internal/util"
|
"github.com/googleapis/genai-toolbox/internal/util"
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ProcessMethod returns a response for the request.
|
// ProcessMethod returns a response for the request.
|
||||||
@@ -176,13 +177,20 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
|||||||
}
|
}
|
||||||
logger.DebugContext(ctx, "tool invocation authorized")
|
logger.DebugContext(ctx, "tool invocation authorized")
|
||||||
|
|
||||||
params, err := tool.ParseParams(data, claimsFromAuth)
|
params, err := parameters.ParseParams(tool.GetParameters(), data, claimsFromAuth)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = fmt.Errorf("provided parameters were invalid: %w", err)
|
err = fmt.Errorf("provided parameters were invalid: %w", err)
|
||||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||||
}
|
}
|
||||||
logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
||||||
|
|
||||||
|
embeddingModels := resourceMgr.GetEmbeddingModelMap()
|
||||||
|
params, err = tool.EmbedParams(ctx, params, embeddingModels)
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("error embedding parameters: %w", err)
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
|
||||||
// run tool invocation and generate response.
|
// run tool invocation and generate response.
|
||||||
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ import (
|
|||||||
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
||||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||||
"github.com/googleapis/genai-toolbox/internal/util"
|
"github.com/googleapis/genai-toolbox/internal/util"
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ProcessMethod returns a response for the request.
|
// ProcessMethod returns a response for the request.
|
||||||
@@ -176,13 +177,20 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
|||||||
}
|
}
|
||||||
logger.DebugContext(ctx, "tool invocation authorized")
|
logger.DebugContext(ctx, "tool invocation authorized")
|
||||||
|
|
||||||
params, err := tool.ParseParams(data, claimsFromAuth)
|
params, err := parameters.ParseParams(tool.GetParameters(), data, claimsFromAuth)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = fmt.Errorf("provided parameters were invalid: %w", err)
|
err = fmt.Errorf("provided parameters were invalid: %w", err)
|
||||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||||
}
|
}
|
||||||
logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
||||||
|
|
||||||
|
embeddingModels := resourceMgr.GetEmbeddingModelMap()
|
||||||
|
params, err = tool.EmbedParams(ctx, params, embeddingModels)
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("error embedding parameters: %w", err)
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
|
||||||
// run tool invocation and generate response.
|
// run tool invocation and generate response.
|
||||||
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ import (
|
|||||||
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
||||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||||
"github.com/googleapis/genai-toolbox/internal/util"
|
"github.com/googleapis/genai-toolbox/internal/util"
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ProcessMethod returns a response for the request.
|
// ProcessMethod returns a response for the request.
|
||||||
@@ -169,13 +170,20 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
|||||||
}
|
}
|
||||||
logger.DebugContext(ctx, "tool invocation authorized")
|
logger.DebugContext(ctx, "tool invocation authorized")
|
||||||
|
|
||||||
params, err := tool.ParseParams(data, claimsFromAuth)
|
params, err := parameters.ParseParams(tool.GetParameters(), data, claimsFromAuth)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = fmt.Errorf("provided parameters were invalid: %w", err)
|
err = fmt.Errorf("provided parameters were invalid: %w", err)
|
||||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||||
}
|
}
|
||||||
logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
||||||
|
|
||||||
|
embeddingModels := resourceMgr.GetEmbeddingModelMap()
|
||||||
|
params, err = tool.EmbedParams(ctx, params, embeddingModels)
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("error embedding parameters: %w", err)
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
|
||||||
// run tool invocation and generate response.
|
// run tool invocation and generate response.
|
||||||
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
334
internal/server/mcp/v20251125/method.go
Normal file
334
internal/server/mcp/v20251125/method.go
Normal file
@@ -0,0 +1,334 @@
|
|||||||
|
// Copyright 2026 Google LLC
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package v20251125
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/prompts"
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc"
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/util"
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ProcessMethod returns a response for the request.
|
||||||
|
func ProcessMethod(ctx context.Context, id jsonrpc.RequestId, method string, toolset tools.Toolset, promptset prompts.Promptset, resourceMgr *resources.ResourceManager, body []byte, header http.Header) (any, error) {
|
||||||
|
switch method {
|
||||||
|
case PING:
|
||||||
|
return pingHandler(id)
|
||||||
|
case TOOLS_LIST:
|
||||||
|
return toolsListHandler(id, toolset, body)
|
||||||
|
case TOOLS_CALL:
|
||||||
|
return toolsCallHandler(ctx, id, resourceMgr, body, header)
|
||||||
|
case PROMPTS_LIST:
|
||||||
|
return promptsListHandler(ctx, id, promptset, body)
|
||||||
|
case PROMPTS_GET:
|
||||||
|
return promptsGetHandler(ctx, id, resourceMgr, body)
|
||||||
|
default:
|
||||||
|
err := fmt.Errorf("invalid method %s", method)
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.METHOD_NOT_FOUND, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// pingHandler handles the "ping" method by returning an empty response.
|
||||||
|
func pingHandler(id jsonrpc.RequestId) (any, error) {
|
||||||
|
return jsonrpc.JSONRPCResponse{
|
||||||
|
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||||
|
Id: id,
|
||||||
|
Result: struct{}{},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func toolsListHandler(id jsonrpc.RequestId, toolset tools.Toolset, body []byte) (any, error) {
|
||||||
|
var req ListToolsRequest
|
||||||
|
if err := json.Unmarshal(body, &req); err != nil {
|
||||||
|
err = fmt.Errorf("invalid mcp tools list request: %w", err)
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
|
||||||
|
result := ListToolsResult{
|
||||||
|
Tools: toolset.McpManifest,
|
||||||
|
}
|
||||||
|
return jsonrpc.JSONRPCResponse{
|
||||||
|
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||||
|
Id: id,
|
||||||
|
Result: result,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// toolsCallHandler generate a response for tools call.
|
||||||
|
func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *resources.ResourceManager, body []byte, header http.Header) (any, error) {
|
||||||
|
authServices := resourceMgr.GetAuthServiceMap()
|
||||||
|
|
||||||
|
// retrieve logger from context
|
||||||
|
logger, err := util.LoggerFromContext(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
|
||||||
|
var req CallToolRequest
|
||||||
|
if err = json.Unmarshal(body, &req); err != nil {
|
||||||
|
err = fmt.Errorf("invalid mcp tools call request: %w", err)
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
|
||||||
|
toolName := req.Params.Name
|
||||||
|
toolArgument := req.Params.Arguments
|
||||||
|
logger.DebugContext(ctx, fmt.Sprintf("tool name: %s", toolName))
|
||||||
|
tool, ok := resourceMgr.GetTool(toolName)
|
||||||
|
if !ok {
|
||||||
|
err = fmt.Errorf("invalid tool name: tool with name %q does not exist", toolName)
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get access token
|
||||||
|
authTokenHeadername, err := tool.GetAuthTokenHeaderName(resourceMgr)
|
||||||
|
if err != nil {
|
||||||
|
errMsg := fmt.Errorf("error during invocation: %w", err)
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, errMsg.Error(), nil), errMsg
|
||||||
|
}
|
||||||
|
accessToken := tools.AccessToken(header.Get(authTokenHeadername))
|
||||||
|
|
||||||
|
// Check if this specific tool requires the standard authorization header
|
||||||
|
clientAuth, err := tool.RequiresClientAuthorization(resourceMgr)
|
||||||
|
if err != nil {
|
||||||
|
errMsg := fmt.Errorf("error during invocation: %w", err)
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, errMsg.Error(), nil), errMsg
|
||||||
|
}
|
||||||
|
if clientAuth {
|
||||||
|
if accessToken == "" {
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, "missing access token in the 'Authorization' header", nil), util.ErrUnauthorized
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// marshal arguments and decode it using decodeJSON instead to prevent loss between floats/int.
|
||||||
|
aMarshal, err := json.Marshal(toolArgument)
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("unable to marshal tools argument: %w", err)
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
|
||||||
|
var data map[string]any
|
||||||
|
if err = util.DecodeJSON(bytes.NewBuffer(aMarshal), &data); err != nil {
|
||||||
|
err = fmt.Errorf("unable to decode tools argument: %w", err)
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tool authentication
|
||||||
|
// claimsFromAuth maps the name of the authservice to the claims retrieved from it.
|
||||||
|
claimsFromAuth := make(map[string]map[string]any)
|
||||||
|
|
||||||
|
// if using stdio, header will be nil and auth will not be supported
|
||||||
|
if header != nil {
|
||||||
|
for _, aS := range authServices {
|
||||||
|
claims, err := aS.GetClaimsFromHeader(ctx, header)
|
||||||
|
if err != nil {
|
||||||
|
logger.DebugContext(ctx, err.Error())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if claims == nil {
|
||||||
|
// authService not present in header
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
claimsFromAuth[aS.GetName()] = claims
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tool authorization check
|
||||||
|
verifiedAuthServices := make([]string, len(claimsFromAuth))
|
||||||
|
i := 0
|
||||||
|
for k := range claimsFromAuth {
|
||||||
|
verifiedAuthServices[i] = k
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if any of the specified auth services is verified
|
||||||
|
isAuthorized := tool.Authorized(verifiedAuthServices)
|
||||||
|
if !isAuthorized {
|
||||||
|
err = fmt.Errorf("unauthorized Tool call: Please make sure your specify correct auth headers: %w", util.ErrUnauthorized)
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
logger.DebugContext(ctx, "tool invocation authorized")
|
||||||
|
|
||||||
|
params, err := parameters.ParseParams(tool.GetParameters(), data, claimsFromAuth)
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("provided parameters were invalid: %w", err)
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
||||||
|
|
||||||
|
embeddingModels := resourceMgr.GetEmbeddingModelMap()
|
||||||
|
params, err = tool.EmbedParams(ctx, params, embeddingModels)
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("error embedding parameters: %w", err)
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
|
||||||
|
// run tool invocation and generate response.
|
||||||
|
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
||||||
|
if err != nil {
|
||||||
|
errStr := err.Error()
|
||||||
|
// Missing authService tokens.
|
||||||
|
if errors.Is(err, util.ErrUnauthorized) {
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
// Upstream auth error
|
||||||
|
if strings.Contains(errStr, "Error 401") || strings.Contains(errStr, "Error 403") {
|
||||||
|
if clientAuth {
|
||||||
|
// Error with client credentials should pass down to the client
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
// Auth error with ADC should raise internal 500 error
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
text := TextContent{
|
||||||
|
Type: "text",
|
||||||
|
Text: err.Error(),
|
||||||
|
}
|
||||||
|
return jsonrpc.JSONRPCResponse{
|
||||||
|
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||||
|
Id: id,
|
||||||
|
Result: CallToolResult{Content: []TextContent{text}, IsError: true},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
content := make([]TextContent, 0)
|
||||||
|
|
||||||
|
sliceRes, ok := results.([]any)
|
||||||
|
if !ok {
|
||||||
|
sliceRes = []any{results}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, d := range sliceRes {
|
||||||
|
text := TextContent{Type: "text"}
|
||||||
|
dM, err := json.Marshal(d)
|
||||||
|
if err != nil {
|
||||||
|
text.Text = fmt.Sprintf("fail to marshal: %s, result: %s", err, d)
|
||||||
|
} else {
|
||||||
|
text.Text = string(dM)
|
||||||
|
}
|
||||||
|
content = append(content, text)
|
||||||
|
}
|
||||||
|
|
||||||
|
return jsonrpc.JSONRPCResponse{
|
||||||
|
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||||
|
Id: id,
|
||||||
|
Result: CallToolResult{Content: content},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// promptsListHandler handles the "prompts/list" method.
|
||||||
|
func promptsListHandler(ctx context.Context, id jsonrpc.RequestId, promptset prompts.Promptset, body []byte) (any, error) {
|
||||||
|
// retrieve logger from context
|
||||||
|
logger, err := util.LoggerFromContext(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
logger.DebugContext(ctx, "handling prompts/list request")
|
||||||
|
|
||||||
|
var req ListPromptsRequest
|
||||||
|
if err := json.Unmarshal(body, &req); err != nil {
|
||||||
|
err = fmt.Errorf("invalid mcp prompts list request: %w", err)
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
|
||||||
|
result := ListPromptsResult{
|
||||||
|
Prompts: promptset.McpManifest,
|
||||||
|
}
|
||||||
|
logger.DebugContext(ctx, fmt.Sprintf("returning %d prompts", len(promptset.McpManifest)))
|
||||||
|
return jsonrpc.JSONRPCResponse{
|
||||||
|
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||||
|
Id: id,
|
||||||
|
Result: result,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// promptsGetHandler handles the "prompts/get" method.
|
||||||
|
func promptsGetHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *resources.ResourceManager, body []byte) (any, error) {
|
||||||
|
// retrieve logger from context
|
||||||
|
logger, err := util.LoggerFromContext(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
logger.DebugContext(ctx, "handling prompts/get request")
|
||||||
|
|
||||||
|
var req GetPromptRequest
|
||||||
|
if err := json.Unmarshal(body, &req); err != nil {
|
||||||
|
err = fmt.Errorf("invalid mcp prompts/get request: %w", err)
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
|
||||||
|
promptName := req.Params.Name
|
||||||
|
logger.DebugContext(ctx, fmt.Sprintf("prompt name: %s", promptName))
|
||||||
|
prompt, ok := resourceMgr.GetPrompt(promptName)
|
||||||
|
if !ok {
|
||||||
|
err := fmt.Errorf("prompt with name %q does not exist", promptName)
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the arguments provided in the request.
|
||||||
|
argValues, err := prompt.ParseArgs(req.Params.Arguments, nil)
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("invalid arguments for prompt %q: %w", promptName, err)
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
logger.DebugContext(ctx, fmt.Sprintf("parsed args: %v", argValues))
|
||||||
|
|
||||||
|
// Substitute the argument values into the prompt's messages.
|
||||||
|
substituted, err := prompt.SubstituteParams(argValues)
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("error substituting params for prompt %q: %w", promptName, err)
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cast the result to the expected []prompts.Message type.
|
||||||
|
substitutedMessages, ok := substituted.([]prompts.Message)
|
||||||
|
if !ok {
|
||||||
|
err = fmt.Errorf("internal error: SubstituteParams returned unexpected type")
|
||||||
|
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||||
|
}
|
||||||
|
logger.DebugContext(ctx, "substituted params successfully")
|
||||||
|
|
||||||
|
// Format the response messages into the required structure.
|
||||||
|
promptMessages := make([]PromptMessage, len(substitutedMessages))
|
||||||
|
for i, msg := range substitutedMessages {
|
||||||
|
promptMessages[i] = PromptMessage{
|
||||||
|
Role: msg.Role,
|
||||||
|
Content: TextContent{
|
||||||
|
Type: "text",
|
||||||
|
Text: msg.Content,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result := GetPromptResult{
|
||||||
|
Description: prompt.Manifest().Description,
|
||||||
|
Messages: promptMessages,
|
||||||
|
}
|
||||||
|
|
||||||
|
return jsonrpc.JSONRPCResponse{
|
||||||
|
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||||
|
Id: id,
|
||||||
|
Result: result,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
219
internal/server/mcp/v20251125/types.go
Normal file
219
internal/server/mcp/v20251125/types.go
Normal file
@@ -0,0 +1,219 @@
|
|||||||
|
// Copyright 2026 Google LLC
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package v20251125
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/prompts"
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc"
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SERVER_NAME is the server name used in Implementation.
|
||||||
|
const SERVER_NAME = "Toolbox"
|
||||||
|
|
||||||
|
// PROTOCOL_VERSION is the version of the MCP protocol in this package.
|
||||||
|
const PROTOCOL_VERSION = "2025-11-25"
|
||||||
|
|
||||||
|
// methods that are supported.
|
||||||
|
const (
|
||||||
|
PING = "ping"
|
||||||
|
TOOLS_LIST = "tools/list"
|
||||||
|
TOOLS_CALL = "tools/call"
|
||||||
|
PROMPTS_LIST = "prompts/list"
|
||||||
|
PROMPTS_GET = "prompts/get"
|
||||||
|
)
|
||||||
|
|
||||||
|
/* Empty result */
|
||||||
|
|
||||||
|
// EmptyResult represents a response that indicates success but carries no data.
|
||||||
|
type EmptyResult jsonrpc.Result
|
||||||
|
|
||||||
|
/* Pagination */
|
||||||
|
|
||||||
|
// Cursor is an opaque token used to represent a cursor for pagination.
|
||||||
|
type Cursor string
|
||||||
|
|
||||||
|
type PaginatedRequest struct {
|
||||||
|
jsonrpc.Request
|
||||||
|
Params struct {
|
||||||
|
// An opaque token representing the current pagination position.
|
||||||
|
// If provided, the server should return results starting after this cursor.
|
||||||
|
Cursor Cursor `json:"cursor,omitempty"`
|
||||||
|
} `json:"params,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type PaginatedResult struct {
|
||||||
|
jsonrpc.Result
|
||||||
|
// An opaque token representing the pagination position after the last returned result.
|
||||||
|
// If present, there may be more results available.
|
||||||
|
NextCursor Cursor `json:"nextCursor,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Tools */
|
||||||
|
|
||||||
|
// Sent from the client to request a list of tools the server has.
|
||||||
|
type ListToolsRequest struct {
|
||||||
|
PaginatedRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
// The server's response to a tools/list request from the client.
|
||||||
|
type ListToolsResult struct {
|
||||||
|
PaginatedResult
|
||||||
|
Tools []tools.McpManifest `json:"tools"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Used by the client to invoke a tool provided by the server.
|
||||||
|
type CallToolRequest struct {
|
||||||
|
jsonrpc.Request
|
||||||
|
Params struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Arguments map[string]any `json:"arguments,omitempty"`
|
||||||
|
} `json:"params,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// The sender or recipient of messages and data in a conversation.
|
||||||
|
type Role string
|
||||||
|
|
||||||
|
const (
|
||||||
|
RoleUser Role = "user"
|
||||||
|
RoleAssistant Role = "assistant"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Base for objects that include optional annotations for the client.
|
||||||
|
// The client can use annotations to inform how objects are used or displayed
|
||||||
|
type Annotated struct {
|
||||||
|
Annotations *struct {
|
||||||
|
// Describes who the intended customer of this object or data is.
|
||||||
|
// It can include multiple entries to indicate content useful for multiple
|
||||||
|
// audiences (e.g., `["user", "assistant"]`).
|
||||||
|
Audience []Role `json:"audience,omitempty"`
|
||||||
|
// Describes how important this data is for operating the server.
|
||||||
|
//
|
||||||
|
// A value of 1 means "most important," and indicates that the data is
|
||||||
|
// effectively required, while 0 means "least important," and indicates that
|
||||||
|
// the data is entirely optional.
|
||||||
|
//
|
||||||
|
// @TJS-type number
|
||||||
|
// @minimum 0
|
||||||
|
// @maximum 1
|
||||||
|
Priority float64 `json:"priority,omitempty"`
|
||||||
|
} `json:"annotations,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// TextContent represents text provided to or from an LLM.
|
||||||
|
type TextContent struct {
|
||||||
|
Annotated
|
||||||
|
Type string `json:"type"`
|
||||||
|
// The text content of the message.
|
||||||
|
Text string `json:"text"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// The server's response to a tool call.
|
||||||
|
//
|
||||||
|
// Any errors that originate from the tool SHOULD be reported inside the result
|
||||||
|
// object, with `isError` set to true, _not_ as an MCP protocol-level error
|
||||||
|
// response. Otherwise, the LLM would not be able to see that an error occurred
|
||||||
|
// and self-correct.
|
||||||
|
//
|
||||||
|
// However, any errors in _finding_ the tool, an error indicating that the
|
||||||
|
// server does not support tool calls, or any other exceptional conditions,
|
||||||
|
// should be reported as an MCP error response.
|
||||||
|
type CallToolResult struct {
|
||||||
|
jsonrpc.Result
|
||||||
|
// Could be either a TextContent, ImageContent, or EmbeddedResources
|
||||||
|
// For Toolbox, we will only be sending TextContent
|
||||||
|
Content []TextContent `json:"content"`
|
||||||
|
// Whether the tool call ended in an error.
|
||||||
|
// If not set, this is assumed to be false (the call was successful).
|
||||||
|
//
|
||||||
|
// Any errors that originate from the tool SHOULD be reported inside the result
|
||||||
|
// object, with `isError` set to true, _not_ as an MCP protocol-level error
|
||||||
|
// response. Otherwise, the LLM would not be able to see that an error occurred
|
||||||
|
// and self-correct.
|
||||||
|
//
|
||||||
|
// However, any errors in _finding_ the tool, an error indicating that the
|
||||||
|
// server does not support tool calls, or any other exceptional conditions,
|
||||||
|
// should be reported as an MCP error response.
|
||||||
|
IsError bool `json:"isError,omitempty"`
|
||||||
|
// An optional JSON object that represents the structured result of the tool call.
|
||||||
|
StructuredContent map[string]any `json:"structuredContent,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Additional properties describing a Tool to clients.
|
||||||
|
//
|
||||||
|
// NOTE: all properties in ToolAnnotations are **hints**.
|
||||||
|
// They are not guaranteed to provide a faithful description of
|
||||||
|
// tool behavior (including descriptive properties like `title`).
|
||||||
|
//
|
||||||
|
// Clients should never make tool use decisions based on ToolAnnotations
|
||||||
|
// received from untrusted servers.
|
||||||
|
type ToolAnnotations struct {
|
||||||
|
// A human-readable title for the tool.
|
||||||
|
Title string `json:"title,omitempty"`
|
||||||
|
// If true, the tool does not modify its environment.
|
||||||
|
// Default: false
|
||||||
|
ReadOnlyHint bool `json:"readOnlyHint,omitempty"`
|
||||||
|
// If true, the tool may perform destructive updates to its environment.
|
||||||
|
// If false, the tool performs only additive updates.
|
||||||
|
// (This property is meaningful only when `readOnlyHint == false`)
|
||||||
|
// Default: true
|
||||||
|
DestructiveHint bool `json:"destructiveHint,omitempty"`
|
||||||
|
// If true, calling the tool repeatedly with the same arguments
|
||||||
|
// will have no additional effect on the its environment.
|
||||||
|
// (This property is meaningful only when `readOnlyHint == false`)
|
||||||
|
// Default: false
|
||||||
|
IdempotentHint bool `json:"idempotentHint,omitempty"`
|
||||||
|
// If true, this tool may interact with an "open world" of external
|
||||||
|
// entities. If false, the tool's domain of interaction is closed.
|
||||||
|
// For example, the world of a web search tool is open, whereas that
|
||||||
|
// of a memory tool is not.
|
||||||
|
// Default: true
|
||||||
|
OpenWorldHint bool `json:"openWorldHint,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Prompts */
|
||||||
|
|
||||||
|
// Sent from the client to request a list of prompts the server has.
|
||||||
|
type ListPromptsRequest struct {
|
||||||
|
PaginatedRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
// The server's response to a prompts/list request from the client.
|
||||||
|
type ListPromptsResult struct {
|
||||||
|
PaginatedResult
|
||||||
|
Prompts []prompts.McpManifest `json:"prompts"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Used by the client to get a prompt provided by the server.
|
||||||
|
type GetPromptRequest struct {
|
||||||
|
jsonrpc.Request
|
||||||
|
Params struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Arguments map[string]any `json:"arguments,omitempty"`
|
||||||
|
} `json:"params"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// The server's response to a prompts/get request from the client.
|
||||||
|
type GetPromptResult struct {
|
||||||
|
jsonrpc.Result
|
||||||
|
Description string `json:"description,omitempty"`
|
||||||
|
Messages []PromptMessage `json:"messages"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Describes a message returned as part of a prompt.
|
||||||
|
type PromptMessage struct {
|
||||||
|
Role string `json:"role"`
|
||||||
|
Content TextContent `json:"content"`
|
||||||
|
}
|
||||||
@@ -37,6 +37,7 @@ const jsonrpcVersion = "2.0"
|
|||||||
const protocolVersion20241105 = "2024-11-05"
|
const protocolVersion20241105 = "2024-11-05"
|
||||||
const protocolVersion20250326 = "2025-03-26"
|
const protocolVersion20250326 = "2025-03-26"
|
||||||
const protocolVersion20250618 = "2025-06-18"
|
const protocolVersion20250618 = "2025-06-18"
|
||||||
|
const protocolVersion20251125 = "2025-11-25"
|
||||||
const serverName = "Toolbox"
|
const serverName = "Toolbox"
|
||||||
|
|
||||||
var basicInputSchema = map[string]any{
|
var basicInputSchema = map[string]any{
|
||||||
@@ -485,6 +486,23 @@ func TestMcpEndpoint(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "version 2025-11-25",
|
||||||
|
protocol: protocolVersion20251125,
|
||||||
|
idHeader: false,
|
||||||
|
initWant: map[string]any{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": "mcp-initialize",
|
||||||
|
"result": map[string]any{
|
||||||
|
"protocolVersion": "2025-11-25",
|
||||||
|
"capabilities": map[string]any{
|
||||||
|
"tools": map[string]any{"listChanged": false},
|
||||||
|
"prompts": map[string]any{"listChanged": false},
|
||||||
|
},
|
||||||
|
"serverInfo": map[string]any{"name": serverName, "version": fakeVersionString},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
for _, vtc := range versTestCases {
|
for _, vtc := range versTestCases {
|
||||||
t.Run(vtc.name, func(t *testing.T) {
|
t.Run(vtc.name, func(t *testing.T) {
|
||||||
@@ -494,8 +512,7 @@ func TestMcpEndpoint(t *testing.T) {
|
|||||||
if sessionId != "" {
|
if sessionId != "" {
|
||||||
header["Mcp-Session-Id"] = sessionId
|
header["Mcp-Session-Id"] = sessionId
|
||||||
}
|
}
|
||||||
|
if vtc.protocol != protocolVersion20241105 && vtc.protocol != protocolVersion20250326 {
|
||||||
if vtc.protocol == protocolVersion20250618 {
|
|
||||||
header["MCP-Protocol-Version"] = vtc.protocol
|
header["MCP-Protocol-Version"] = vtc.protocol
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -64,7 +64,11 @@ func InitializeConfigs(ctx context.Context, cfg ServerConfig) (
|
|||||||
map[string]prompts.Promptset,
|
map[string]prompts.Promptset,
|
||||||
error,
|
error,
|
||||||
) {
|
) {
|
||||||
ctx = util.WithUserAgent(ctx, cfg.Version)
|
metadataStr := cfg.Version
|
||||||
|
if len(cfg.UserAgentMetadata) > 0 {
|
||||||
|
metadataStr += "+" + strings.Join(cfg.UserAgentMetadata, "+")
|
||||||
|
}
|
||||||
|
ctx = util.WithUserAgent(ctx, metadataStr)
|
||||||
instrumentation, err := util.InstrumentationFromContext(ctx)
|
instrumentation, err := util.InstrumentationFromContext(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
@@ -304,10 +308,14 @@ func hostCheck(allowedHosts map[string]struct{}) func(http.Handler) http.Handler
|
|||||||
return func(next http.Handler) http.Handler {
|
return func(next http.Handler) http.Handler {
|
||||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
_, hasWildcard := allowedHosts["*"]
|
_, hasWildcard := allowedHosts["*"]
|
||||||
_, hostIsAllowed := allowedHosts[r.Host]
|
hostname := r.Host
|
||||||
|
if host, _, err := net.SplitHostPort(r.Host); err == nil {
|
||||||
|
hostname = host
|
||||||
|
}
|
||||||
|
_, hostIsAllowed := allowedHosts[hostname]
|
||||||
if !hasWildcard && !hostIsAllowed {
|
if !hasWildcard && !hostIsAllowed {
|
||||||
// Return 400 Bad Request or 403 Forbidden to block the attack
|
// Return 403 Forbidden to block the attack
|
||||||
http.Error(w, "Invalid Host header", http.StatusBadRequest)
|
http.Error(w, "Invalid Host header", http.StatusForbidden)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
next.ServeHTTP(w, r)
|
next.ServeHTTP(w, r)
|
||||||
@@ -406,7 +414,11 @@ func NewServer(ctx context.Context, cfg ServerConfig) (*Server, error) {
|
|||||||
}
|
}
|
||||||
allowedHostsMap := make(map[string]struct{}, len(cfg.AllowedHosts))
|
allowedHostsMap := make(map[string]struct{}, len(cfg.AllowedHosts))
|
||||||
for _, h := range cfg.AllowedHosts {
|
for _, h := range cfg.AllowedHosts {
|
||||||
allowedHostsMap[h] = struct{}{}
|
hostname := h
|
||||||
|
if host, _, err := net.SplitHostPort(h); err == nil {
|
||||||
|
hostname = host
|
||||||
|
}
|
||||||
|
allowedHostsMap[hostname] = struct{}{}
|
||||||
}
|
}
|
||||||
r.Use(hostCheck(allowedHostsMap))
|
r.Use(hostCheck(allowedHostsMap))
|
||||||
|
|
||||||
|
|||||||
@@ -200,3 +200,62 @@ func TestUpdateServer(t *testing.T) {
|
|||||||
t.Errorf("error updating server, promptset (-want +got):\n%s", diff)
|
t.Errorf("error updating server, promptset (-want +got):\n%s", diff)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestNameValidation(t *testing.T) {
|
||||||
|
testCases := []struct {
|
||||||
|
desc string
|
||||||
|
resourceName string
|
||||||
|
errStr string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
desc: "names with 0 length",
|
||||||
|
resourceName: "",
|
||||||
|
errStr: "resource name SHOULD be between 1 and 128 characters in length (inclusive)",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "names with allowed length",
|
||||||
|
resourceName: "foo",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "names with 128 length",
|
||||||
|
resourceName: strings.Repeat("a", 128),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "names with more than 128 length",
|
||||||
|
resourceName: strings.Repeat("a", 129),
|
||||||
|
errStr: "resource name SHOULD be between 1 and 128 characters in length (inclusive)",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "names with space",
|
||||||
|
resourceName: "foo bar",
|
||||||
|
errStr: "invalid character for resource name; only uppercase and lowercase ASCII letters (A-Z, a-z), digits (0-9), underscore (_), hyphen (-), and dot (.) is allowed",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "names with commas",
|
||||||
|
resourceName: "foo,bar",
|
||||||
|
errStr: "invalid character for resource name; only uppercase and lowercase ASCII letters (A-Z, a-z), digits (0-9), underscore (_), hyphen (-), and dot (.) is allowed",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "names with other special character",
|
||||||
|
resourceName: "foo!",
|
||||||
|
errStr: "invalid character for resource name; only uppercase and lowercase ASCII letters (A-Z, a-z), digits (0-9), underscore (_), hyphen (-), and dot (.) is allowed",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "names with allowed special character",
|
||||||
|
resourceName: "foo_.-bar6",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tc := range testCases {
|
||||||
|
t.Run(tc.desc, func(t *testing.T) {
|
||||||
|
err := server.NameValidation(tc.resourceName)
|
||||||
|
if err != nil {
|
||||||
|
if tc.errStr != err.Error() {
|
||||||
|
t.Fatalf("unexpected error: %s", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err == nil && tc.errStr != "" {
|
||||||
|
t.Fatalf("expect error: %s", tc.errStr)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -12,6 +12,8 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
|
import { escapeHtml } from './sanitize.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Renders the Google Sign-In button using the GIS library.
|
* Renders the Google Sign-In button using the GIS library.
|
||||||
* @param {string} toolId The ID of the tool.
|
* @param {string} toolId The ID of the tool.
|
||||||
@@ -112,13 +114,14 @@ function handleCredentialResponse(response, toolId, authProfileName) {
|
|||||||
|
|
||||||
// creates the Google Auth method dropdown
|
// creates the Google Auth method dropdown
|
||||||
export function createGoogleAuthMethodItem(toolId, authProfileName) {
|
export function createGoogleAuthMethodItem(toolId, authProfileName) {
|
||||||
|
const safeProfileName = escapeHtml(authProfileName);
|
||||||
const UNIQUE_ID_BASE = `${toolId}-${authProfileName}`;
|
const UNIQUE_ID_BASE = `${toolId}-${authProfileName}`;
|
||||||
const item = document.createElement('div');
|
const item = document.createElement('div');
|
||||||
|
|
||||||
item.className = 'auth-method-item';
|
item.className = 'auth-method-item';
|
||||||
item.innerHTML = `
|
item.innerHTML = `
|
||||||
<div class="auth-method-header">
|
<div class="auth-method-header">
|
||||||
<span class="auth-method-label">Google ID Token (${authProfileName})</span>
|
<span class="auth-method-label">Google ID Token (${safeProfileName})</span>
|
||||||
<button class="toggle-details-tab">Auto Setup</button>
|
<button class="toggle-details-tab">Auto Setup</button>
|
||||||
</div>
|
</div>
|
||||||
<div class="auth-method-details" id="google-auth-details-${UNIQUE_ID_BASE}" style="display: none;">
|
<div class="auth-method-details" id="google-auth-details-${UNIQUE_ID_BASE}" style="display: none;">
|
||||||
|
|||||||
@@ -13,6 +13,7 @@
|
|||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
import { renderToolInterface } from "./toolDisplay.js";
|
import { renderToolInterface } from "./toolDisplay.js";
|
||||||
|
import { escapeHtml } from "./sanitize.js";
|
||||||
|
|
||||||
let toolDetailsAbortController = null;
|
let toolDetailsAbortController = null;
|
||||||
|
|
||||||
@@ -34,7 +35,7 @@ export async function loadTools(secondNavContent, toolDisplayArea, toolsetName)
|
|||||||
renderToolList(apiResponse, secondNavContent, toolDisplayArea);
|
renderToolList(apiResponse, secondNavContent, toolDisplayArea);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Failed to load tools:', error);
|
console.error('Failed to load tools:', error);
|
||||||
secondNavContent.innerHTML = `<p class="error">Failed to load tools: <pre><code>${error}</code></pre></p>`;
|
secondNavContent.innerHTML = `<p class="error">Failed to load tools: <pre><code>${escapeHtml(String(error))}</code></pre></p>`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -168,7 +169,7 @@ async function fetchToolDetails(toolName, toolDisplayArea) {
|
|||||||
console.debug("Previous fetch was aborted, expected behavior.");
|
console.debug("Previous fetch was aborted, expected behavior.");
|
||||||
} else {
|
} else {
|
||||||
console.error(`Failed to load details for tool "${toolName}":`, error);
|
console.error(`Failed to load details for tool "${toolName}":`, error);
|
||||||
toolDisplayArea.innerHTML = `<p class="error">Failed to load details for ${toolName}. ${error.message}</p>`;
|
toolDisplayArea.innerHTML = `<p class="error">Failed to load details for ${escapeHtml(toolName)}. ${escapeHtml(error.message)}</p>`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
43
internal/server/static/js/sanitize.js
Normal file
43
internal/server/static/js/sanitize.js
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
// Copyright 2025 Google LLC
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Escapes special characters for safe rendering in HTML text contexts.
|
||||||
|
*
|
||||||
|
* This utility encodes user-controlled values to avoid unintended script
|
||||||
|
* execution when rendering content as HTML. It is intended as a defensive
|
||||||
|
* measure and does not perform HTML sanitization.
|
||||||
|
*
|
||||||
|
* @param {*} input The value to escape.
|
||||||
|
* @return {string} The escaped string safe for HTML rendering.
|
||||||
|
*/
|
||||||
|
const htmlEscapes = {
|
||||||
|
'&': '&',
|
||||||
|
'<': '<',
|
||||||
|
'>': '>',
|
||||||
|
'"': '"',
|
||||||
|
"'": ''',
|
||||||
|
'`': '`'
|
||||||
|
};
|
||||||
|
|
||||||
|
const escapeCharsRegex = /[&<>"'`]/g;
|
||||||
|
|
||||||
|
export function escapeHtml(input) {
|
||||||
|
if (input === null || input === undefined) {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
|
||||||
|
const str = String(input);
|
||||||
|
return str.replace(escapeCharsRegex, (char) => htmlEscapes[char]);
|
||||||
|
}
|
||||||
@@ -14,6 +14,7 @@
|
|||||||
|
|
||||||
import { handleRunTool, displayResults } from './runTool.js';
|
import { handleRunTool, displayResults } from './runTool.js';
|
||||||
import { createGoogleAuthMethodItem } from './auth.js'
|
import { createGoogleAuthMethodItem } from './auth.js'
|
||||||
|
import { escapeHtml } from './sanitize.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Helper function to create form inputs for parameters.
|
* Helper function to create form inputs for parameters.
|
||||||
@@ -357,9 +358,9 @@ export function renderToolInterface(tool, containerElement) {
|
|||||||
const descBox = document.createElement('div');
|
const descBox = document.createElement('div');
|
||||||
|
|
||||||
nameBox.className = 'tool-box tool-name';
|
nameBox.className = 'tool-box tool-name';
|
||||||
nameBox.innerHTML = `<h5>Name:</h5><p>${tool.name}</p>`;
|
nameBox.innerHTML = `<h5>Name:</h5><p>${escapeHtml(tool.name)}</p>`;
|
||||||
descBox.className = 'tool-box tool-description';
|
descBox.className = 'tool-box tool-description';
|
||||||
descBox.innerHTML = `<h5>Description:</h5><p>${tool.description}</p>`;
|
descBox.innerHTML = `<h5>Description:</h5><p>${escapeHtml(tool.description)}</p>`;
|
||||||
|
|
||||||
toolInfoContainer.className = 'tool-info';
|
toolInfoContainer.className = 'tool-info';
|
||||||
toolInfoContainer.appendChild(nameBox);
|
toolInfoContainer.appendChild(nameBox);
|
||||||
|
|||||||
@@ -236,9 +236,9 @@ func setupClientCaching(s *Source, baseCreator BigqueryClientCreator) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Initialize caches
|
// Initialize caches
|
||||||
s.bqClientCache = NewCache(onBqEvict)
|
s.bqClientCache = sources.NewCache(onBqEvict)
|
||||||
s.bqRestCache = NewCache(nil)
|
s.bqRestCache = sources.NewCache(nil)
|
||||||
s.dataplexCache = NewCache(onDataplexEvict)
|
s.dataplexCache = sources.NewCache(onDataplexEvict)
|
||||||
|
|
||||||
// Create the caching wrapper for the client creator
|
// Create the caching wrapper for the client creator
|
||||||
s.ClientCreator = func(tokenString string, wantRestService bool) (*bigqueryapi.Client, *bigqueryrestapi.Service, error) {
|
s.ClientCreator = func(tokenString string, wantRestService bool) (*bigqueryapi.Client, *bigqueryrestapi.Service, error) {
|
||||||
@@ -289,9 +289,9 @@ type Source struct {
|
|||||||
Session *Session
|
Session *Session
|
||||||
|
|
||||||
// Caches for OAuth clients
|
// Caches for OAuth clients
|
||||||
bqClientCache *Cache
|
bqClientCache *sources.Cache
|
||||||
bqRestCache *Cache
|
bqRestCache *sources.Cache
|
||||||
dataplexCache *Cache
|
dataplexCache *sources.Cache
|
||||||
}
|
}
|
||||||
|
|
||||||
type Session struct {
|
type Session struct {
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
package bigquery
|
package sources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"sync"
|
"sync"
|
||||||
439
internal/sources/cloudloggingadmin/cloud_logging_admin.go
Normal file
439
internal/sources/cloudloggingadmin/cloud_logging_admin.go
Normal file
@@ -0,0 +1,439 @@
|
|||||||
|
// Copyright 2026 Google LLC
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
package cloudloggingadmin
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"slices"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"cloud.google.com/go/logging"
|
||||||
|
"cloud.google.com/go/logging/logadmin"
|
||||||
|
"github.com/goccy/go-yaml"
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/util"
|
||||||
|
"go.opentelemetry.io/otel/trace"
|
||||||
|
"golang.org/x/oauth2"
|
||||||
|
"golang.org/x/oauth2/google"
|
||||||
|
"google.golang.org/api/impersonate"
|
||||||
|
"google.golang.org/api/iterator"
|
||||||
|
"google.golang.org/api/option"
|
||||||
|
)
|
||||||
|
|
||||||
|
const SourceType string = "cloud-logging-admin"
|
||||||
|
|
||||||
|
var _ sources.SourceConfig = Config{}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
if !sources.Register(SourceType, newConfig) {
|
||||||
|
panic(fmt.Sprintf("source type %q already registered", SourceType))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources.SourceConfig, error) {
|
||||||
|
actual := Config{Name: name}
|
||||||
|
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return actual, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type Config struct {
|
||||||
|
Name string `yaml:"name" validate:"required"`
|
||||||
|
Type string `yaml:"type" validate:"required"`
|
||||||
|
Project string `yaml:"project" validate:"required"`
|
||||||
|
UseClientOAuth bool `yaml:"useClientOAuth"`
|
||||||
|
ImpersonateServiceAccount string `yaml:"impersonateServiceAccount"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r Config) SourceConfigType() string {
|
||||||
|
return SourceType
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
|
||||||
|
|
||||||
|
if r.UseClientOAuth && r.ImpersonateServiceAccount != "" {
|
||||||
|
return nil, fmt.Errorf("useClientOAuth cannot be used with impersonateServiceAccount")
|
||||||
|
}
|
||||||
|
|
||||||
|
var client *logadmin.Client
|
||||||
|
var tokenSource oauth2.TokenSource
|
||||||
|
var clientCreator LogAdminClientCreator
|
||||||
|
var err error
|
||||||
|
|
||||||
|
s := &Source{
|
||||||
|
Config: r,
|
||||||
|
Client: client,
|
||||||
|
TokenSource: tokenSource,
|
||||||
|
ClientCreator: clientCreator,
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.UseClientOAuth {
|
||||||
|
// use client OAuth
|
||||||
|
baseClientCreator, err := newLogAdminClientCreator(ctx, tracer, r.Project, r.Name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error constructing client creator: %w", err)
|
||||||
|
}
|
||||||
|
setupClientCaching(s, baseClientCreator)
|
||||||
|
} else {
|
||||||
|
client, tokenSource, err = initLogAdminConnection(ctx, tracer, r.Name, r.Project, r.ImpersonateServiceAccount)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error creating client from ADC %w", err)
|
||||||
|
}
|
||||||
|
s.Client = client
|
||||||
|
s.TokenSource = tokenSource
|
||||||
|
}
|
||||||
|
return s, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ sources.Source = &Source{}
|
||||||
|
|
||||||
|
type LogAdminClientCreator func(tokenString string) (*logadmin.Client, error)
|
||||||
|
|
||||||
|
type Source struct {
|
||||||
|
Config
|
||||||
|
Client *logadmin.Client
|
||||||
|
TokenSource oauth2.TokenSource
|
||||||
|
ClientCreator LogAdminClientCreator
|
||||||
|
|
||||||
|
// Caches for OAuth clients
|
||||||
|
logadminClientCache *sources.Cache
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Source) SourceType() string {
|
||||||
|
// Returns logadmin source type
|
||||||
|
return SourceType
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Source) ToConfig() sources.SourceConfig {
|
||||||
|
return s.Config
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Source) UseClientAuthorization() bool {
|
||||||
|
return s.UseClientOAuth
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Source) LogAdminClient() *logadmin.Client {
|
||||||
|
return s.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Source) LogAdminTokenSource() oauth2.TokenSource {
|
||||||
|
return s.TokenSource
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Source) LogAdminClientCreator() LogAdminClientCreator {
|
||||||
|
return s.ClientCreator
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Source) GetProject() string {
|
||||||
|
return s.Project
|
||||||
|
}
|
||||||
|
|
||||||
|
// getClient returns the appropriate client based on authentication mode
|
||||||
|
func (s *Source) getClient(accessToken string) (*logadmin.Client, error) {
|
||||||
|
if s.UseClientOAuth {
|
||||||
|
if s.ClientCreator == nil {
|
||||||
|
return nil, fmt.Errorf("client creator is not initialized")
|
||||||
|
}
|
||||||
|
return s.ClientCreator(accessToken)
|
||||||
|
}
|
||||||
|
if s.Client == nil {
|
||||||
|
return nil, fmt.Errorf("source client is not initialized")
|
||||||
|
}
|
||||||
|
return s.Client, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListLogNames lists all log names in the project
|
||||||
|
func (s *Source) ListLogNames(ctx context.Context, limit int, accessToken string) ([]string, error) {
|
||||||
|
client, err := s.getClient(accessToken)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
it := client.Logs(ctx)
|
||||||
|
var logNames []string
|
||||||
|
for len(logNames) < limit {
|
||||||
|
logName, err := it.Next()
|
||||||
|
if err == iterator.Done {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
logNames = append(logNames, logName)
|
||||||
|
}
|
||||||
|
return logNames, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListResourceTypes lists all resource types in the project
|
||||||
|
func (s *Source) ListResourceTypes(ctx context.Context, accessToken string) ([]string, error) {
|
||||||
|
client, err := s.getClient(accessToken)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
it := client.ResourceDescriptors(ctx)
|
||||||
|
var types []string
|
||||||
|
for {
|
||||||
|
desc, err := it.Next()
|
||||||
|
if err == iterator.Done {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to list resource descriptors: %w", err)
|
||||||
|
}
|
||||||
|
types = append(types, desc.Type)
|
||||||
|
}
|
||||||
|
slices.Sort(types)
|
||||||
|
return types, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryLogsParams contains the parameters for querying logs
|
||||||
|
type QueryLogsParams struct {
|
||||||
|
Filter string
|
||||||
|
NewestFirst bool
|
||||||
|
StartTime string
|
||||||
|
EndTime string
|
||||||
|
Verbose bool
|
||||||
|
Limit int
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryLogs queries log entries based on the provided parameters
|
||||||
|
func (s *Source) QueryLogs(ctx context.Context, params QueryLogsParams, accessToken string) ([]map[string]any, error) {
|
||||||
|
client, err := s.getClient(accessToken)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build filter
|
||||||
|
var filterParts []string
|
||||||
|
if params.Filter != "" {
|
||||||
|
filterParts = append(filterParts, params.Filter)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add timestamp filter
|
||||||
|
startTime := params.StartTime
|
||||||
|
if startTime != "" {
|
||||||
|
filterParts = append(filterParts, fmt.Sprintf(`timestamp>="%s"`, startTime))
|
||||||
|
}
|
||||||
|
|
||||||
|
if params.EndTime != "" {
|
||||||
|
filterParts = append(filterParts, fmt.Sprintf(`timestamp<="%s"`, params.EndTime))
|
||||||
|
}
|
||||||
|
|
||||||
|
combinedFilter := strings.Join(filterParts, " AND ")
|
||||||
|
|
||||||
|
// Add opts
|
||||||
|
opts := []logadmin.EntriesOption{
|
||||||
|
logadmin.Filter(combinedFilter),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set order
|
||||||
|
if params.NewestFirst {
|
||||||
|
opts = append(opts, logadmin.NewestFirst())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set up iterator
|
||||||
|
it := client.Entries(ctx, opts...)
|
||||||
|
|
||||||
|
var results []map[string]any
|
||||||
|
for len(results) < params.Limit {
|
||||||
|
entry, err := it.Next()
|
||||||
|
if err == iterator.Done {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to iterate entries: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
result := map[string]any{
|
||||||
|
"logName": entry.LogName,
|
||||||
|
"timestamp": entry.Timestamp.Format(time.RFC3339),
|
||||||
|
"severity": entry.Severity.String(),
|
||||||
|
"resource": map[string]any{
|
||||||
|
"type": entry.Resource.Type,
|
||||||
|
"labels": entry.Resource.Labels,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if entry.Payload != nil {
|
||||||
|
result["payload"] = entry.Payload
|
||||||
|
}
|
||||||
|
|
||||||
|
if params.Verbose {
|
||||||
|
result["insertId"] = entry.InsertID
|
||||||
|
|
||||||
|
if len(entry.Labels) > 0 {
|
||||||
|
result["labels"] = entry.Labels
|
||||||
|
}
|
||||||
|
|
||||||
|
if entry.HTTPRequest != nil {
|
||||||
|
httpRequestMap := map[string]any{
|
||||||
|
"status": entry.HTTPRequest.Status,
|
||||||
|
"latency": entry.HTTPRequest.Latency.String(),
|
||||||
|
"remoteIp": entry.HTTPRequest.RemoteIP,
|
||||||
|
}
|
||||||
|
if req := entry.HTTPRequest.Request; req != nil {
|
||||||
|
httpRequestMap["requestMethod"] = req.Method
|
||||||
|
httpRequestMap["requestUrl"] = req.URL.String()
|
||||||
|
httpRequestMap["userAgent"] = req.UserAgent()
|
||||||
|
}
|
||||||
|
result["httpRequest"] = httpRequestMap
|
||||||
|
}
|
||||||
|
|
||||||
|
if entry.Trace != "" {
|
||||||
|
result["trace"] = entry.Trace
|
||||||
|
}
|
||||||
|
|
||||||
|
if entry.SpanID != "" {
|
||||||
|
result["spanId"] = entry.SpanID
|
||||||
|
}
|
||||||
|
|
||||||
|
if entry.Operation != nil {
|
||||||
|
result["operation"] = map[string]any{
|
||||||
|
"id": entry.Operation.Id,
|
||||||
|
"producer": entry.Operation.Producer,
|
||||||
|
"first": entry.Operation.First,
|
||||||
|
"last": entry.Operation.Last,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if entry.SourceLocation != nil {
|
||||||
|
result["sourceLocation"] = map[string]any{
|
||||||
|
"file": entry.SourceLocation.File,
|
||||||
|
"line": entry.SourceLocation.Line,
|
||||||
|
"function": entry.SourceLocation.Function,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
results = append(results, result)
|
||||||
|
}
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func setupClientCaching(s *Source, baseCreator LogAdminClientCreator) {
|
||||||
|
onEvict := func(key string, value interface{}) {
|
||||||
|
if client, ok := value.(*logadmin.Client); ok && client != nil {
|
||||||
|
client.Close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logadminClientCache = sources.NewCache(onEvict)
|
||||||
|
|
||||||
|
s.ClientCreator = func(tokenString string) (*logadmin.Client, error) {
|
||||||
|
if val, found := s.logadminClientCache.Get(tokenString); found {
|
||||||
|
return val.(*logadmin.Client), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
client, err := baseCreator(tokenString)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
s.logadminClientCache.Set(tokenString, client)
|
||||||
|
return client, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func initLogAdminConnection(
|
||||||
|
ctx context.Context,
|
||||||
|
tracer trace.Tracer,
|
||||||
|
name string,
|
||||||
|
project string,
|
||||||
|
impersonateServiceAccount string,
|
||||||
|
) (*logadmin.Client, oauth2.TokenSource, error) {
|
||||||
|
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceType, name)
|
||||||
|
defer span.End()
|
||||||
|
|
||||||
|
userAgent, err := util.UserAgentFromContext(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var tokenSource oauth2.TokenSource
|
||||||
|
var opts []option.ClientOption
|
||||||
|
|
||||||
|
if impersonateServiceAccount != "" {
|
||||||
|
// Create impersonated credentials token source with cloud-platform scope
|
||||||
|
// This broader scope is needed for tools like conversational analytics
|
||||||
|
cloudPlatformTokenSource, err := impersonate.CredentialsTokenSource(ctx, impersonate.CredentialsConfig{
|
||||||
|
TargetPrincipal: impersonateServiceAccount,
|
||||||
|
Scopes: []string{"https://www.googleapis.com/auth/cloud-platform"},
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("failed to create impersonated credentials for %q: %w", impersonateServiceAccount, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
tokenSource = cloudPlatformTokenSource
|
||||||
|
opts = []option.ClientOption{
|
||||||
|
option.WithUserAgent(userAgent),
|
||||||
|
option.WithTokenSource(cloudPlatformTokenSource),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Use default credentials
|
||||||
|
cred, err := google.FindDefaultCredentials(ctx, logging.AdminScope)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("failed to find default Google Cloud credentials with scope %q: %w", logging.AdminScope, err)
|
||||||
|
}
|
||||||
|
tokenSource = cred.TokenSource
|
||||||
|
opts = []option.ClientOption{
|
||||||
|
option.WithUserAgent(userAgent),
|
||||||
|
option.WithCredentials(cred),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
client, err := logadmin.NewClient(ctx, project, opts...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("failed to create Cloud Logging Admin client for project %q: %w", project, err)
|
||||||
|
}
|
||||||
|
return client, tokenSource, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func initLogAdminConnectionWithOAuthToken(
|
||||||
|
ctx context.Context,
|
||||||
|
tracer trace.Tracer,
|
||||||
|
project, name, userAgent, tokenString string,
|
||||||
|
) (*logadmin.Client, error) {
|
||||||
|
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceType, name)
|
||||||
|
defer span.End()
|
||||||
|
|
||||||
|
token := &oauth2.Token{
|
||||||
|
AccessToken: string(tokenString),
|
||||||
|
}
|
||||||
|
ts := oauth2.StaticTokenSource(token)
|
||||||
|
|
||||||
|
// Initialize the logadmin client with tokenSource
|
||||||
|
client, err := logadmin.NewClient(ctx, project, option.WithUserAgent(userAgent), option.WithTokenSource(ts))
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create logadmin client for project %q: %w", project, err)
|
||||||
|
}
|
||||||
|
return client, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func newLogAdminClientCreator(
|
||||||
|
ctx context.Context,
|
||||||
|
tracer trace.Tracer,
|
||||||
|
project, name string,
|
||||||
|
) (LogAdminClientCreator, error) {
|
||||||
|
userAgent, err := util.UserAgentFromContext(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return func(tokenString string) (*logadmin.Client, error) {
|
||||||
|
return initLogAdminConnectionWithOAuthToken(ctx, tracer, project, name, userAgent, tokenString)
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
137
internal/sources/cloudloggingadmin/cloud_logging_admin_test.go
Normal file
137
internal/sources/cloudloggingadmin/cloud_logging_admin_test.go
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
// Copyright 2026 Google LLC
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
package cloudloggingadmin_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/google/go-cmp/cmp"
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/server"
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/sources/cloudloggingadmin"
|
||||||
|
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestParseFromYamlCloudLoggingAdmin(t *testing.T) {
|
||||||
|
tcs := []struct {
|
||||||
|
desc string
|
||||||
|
in string
|
||||||
|
want server.SourceConfigs
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
desc: "basic example",
|
||||||
|
in: `
|
||||||
|
kind: sources
|
||||||
|
name: my-instance
|
||||||
|
type: cloud-logging-admin
|
||||||
|
project: my-project
|
||||||
|
`,
|
||||||
|
want: server.SourceConfigs{
|
||||||
|
"my-instance": cloudloggingadmin.Config{
|
||||||
|
Name: "my-instance",
|
||||||
|
Type: cloudloggingadmin.SourceType,
|
||||||
|
Project: "my-project",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "with client oauth",
|
||||||
|
in: `
|
||||||
|
kind: sources
|
||||||
|
name: my-instance
|
||||||
|
type: cloud-logging-admin
|
||||||
|
project: my-project
|
||||||
|
useClientOAuth: true
|
||||||
|
`,
|
||||||
|
want: server.SourceConfigs{
|
||||||
|
"my-instance": cloudloggingadmin.Config{
|
||||||
|
Name: "my-instance",
|
||||||
|
Type: cloudloggingadmin.SourceType,
|
||||||
|
Project: "my-project",
|
||||||
|
UseClientOAuth: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "with service account impersonation",
|
||||||
|
in: `
|
||||||
|
kind: sources
|
||||||
|
name: my-instance
|
||||||
|
type: cloud-logging-admin
|
||||||
|
project: my-project
|
||||||
|
impersonateServiceAccount: service-account@my-project.iam.gserviceaccount.com
|
||||||
|
`,
|
||||||
|
want: server.SourceConfigs{
|
||||||
|
"my-instance": cloudloggingadmin.Config{
|
||||||
|
Name: "my-instance",
|
||||||
|
Type: cloudloggingadmin.SourceType,
|
||||||
|
Project: "my-project",
|
||||||
|
ImpersonateServiceAccount: "service-account@my-project.iam.gserviceaccount.com",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tc := range tcs {
|
||||||
|
t.Run(tc.desc, func(t *testing.T) {
|
||||||
|
got, _, _, _, _, _, err := server.UnmarshalResourceConfig(context.Background(), testutils.FormatYaml(tc.in))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unable to unmarshal: %s", err)
|
||||||
|
}
|
||||||
|
if !cmp.Equal(tc.want, got) {
|
||||||
|
t.Fatalf("incorrect parse: want %v, got %v", tc.want, got)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFailParseFromYaml(t *testing.T) {
|
||||||
|
tcs := []struct {
|
||||||
|
desc string
|
||||||
|
in string
|
||||||
|
err string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
desc: "extra field",
|
||||||
|
in: `
|
||||||
|
kind: sources
|
||||||
|
name: my-instance
|
||||||
|
type: cloud-logging-admin
|
||||||
|
project: my-project
|
||||||
|
foo: bar
|
||||||
|
`,
|
||||||
|
err: "error unmarshaling sources: unable to parse source \"my-instance\" as \"cloud-logging-admin\": [1:1] unknown field \"foo\"\n> 1 | foo: bar\n ^\n 2 | name: my-instance\n 3 | project: my-project\n 4 | type: cloud-logging-admin",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "missing required field",
|
||||||
|
in: `
|
||||||
|
kind: sources
|
||||||
|
name: my-instance
|
||||||
|
type: cloud-logging-admin
|
||||||
|
`,
|
||||||
|
err: "error unmarshaling sources: unable to parse source \"my-instance\" as \"cloud-logging-admin\": Key: 'Config.Project' Error:Field validation for 'Project' failed on the 'required' tag",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tc := range tcs {
|
||||||
|
t.Run(tc.desc, func(t *testing.T) {
|
||||||
|
_, _, _, _, _, _, err := server.UnmarshalResourceConfig(context.Background(), testutils.FormatYaml(tc.in))
|
||||||
|
if err == nil {
|
||||||
|
t.Fatalf("expect parsing to fail")
|
||||||
|
}
|
||||||
|
errStr := err.Error()
|
||||||
|
if errStr != tc.err {
|
||||||
|
t.Fatalf("unexpected error: got %q, want %q", errStr, tc.err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -19,6 +19,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"text/template"
|
"text/template"
|
||||||
"time"
|
"time"
|
||||||
@@ -36,7 +37,10 @@ import (
|
|||||||
|
|
||||||
const SourceType string = "cloud-sql-admin"
|
const SourceType string = "cloud-sql-admin"
|
||||||
|
|
||||||
var targetLinkRegex = regexp.MustCompile(`/projects/([^/]+)/instances/([^/]+)/databases/([^/]+)`)
|
var (
|
||||||
|
targetLinkRegex = regexp.MustCompile(`/projects/([^/]+)/instances/([^/]+)/databases/([^/]+)`)
|
||||||
|
backupDRRegex = regexp.MustCompile(`^projects/([^/]+)/locations/([^/]+)/backupVaults/([^/]+)/dataSources/([^/]+)/backups/([^/]+)$`)
|
||||||
|
)
|
||||||
|
|
||||||
// validate interface
|
// validate interface
|
||||||
var _ sources.SourceConfig = Config{}
|
var _ sources.SourceConfig = Config{}
|
||||||
@@ -374,6 +378,48 @@ func (s *Source) InsertBackupRun(ctx context.Context, project, instance, locatio
|
|||||||
return resp, nil
|
return resp, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *Source) RestoreBackup(ctx context.Context, targetProject, targetInstance, sourceProject, sourceInstance, backupID, accessToken string) (any, error) {
|
||||||
|
request := &sqladmin.InstancesRestoreBackupRequest{}
|
||||||
|
|
||||||
|
// There are 3 scenarios for the backup identifier:
|
||||||
|
// 1. The identifier is an int64 containing the timestamp of the BackupRun.
|
||||||
|
// This is used to restore standard backups, and the RestoreBackupContext
|
||||||
|
// field should be populated with the backup ID and source instance info.
|
||||||
|
// 2. The identifier is a string of the format
|
||||||
|
// 'projects/{project-id}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}/backups/{backup-uid}'.
|
||||||
|
// This is used to restore BackupDR backups, and the BackupdrBackup field
|
||||||
|
// should be populated.
|
||||||
|
// 3. The identifer is a string of the format
|
||||||
|
// 'projects/{project-id}/backups/{backup-uid}'. In this case, the Backup
|
||||||
|
// field should be populated.
|
||||||
|
if backupRunID, err := strconv.ParseInt(backupID, 10, 64); err == nil {
|
||||||
|
if sourceProject == "" || targetInstance == "" {
|
||||||
|
return nil, fmt.Errorf("source project and instance are required when restoring via backup ID")
|
||||||
|
}
|
||||||
|
request.RestoreBackupContext = &sqladmin.RestoreBackupContext{
|
||||||
|
Project: sourceProject,
|
||||||
|
InstanceId: sourceInstance,
|
||||||
|
BackupRunId: backupRunID,
|
||||||
|
}
|
||||||
|
} else if backupDRRegex.MatchString(backupID) {
|
||||||
|
request.BackupdrBackup = backupID
|
||||||
|
} else {
|
||||||
|
request.Backup = backupID
|
||||||
|
}
|
||||||
|
|
||||||
|
service, err := s.GetService(ctx, string(accessToken))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := service.Instances.RestoreBackup(targetProject, targetInstance, request).Do()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error restoring backup: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return resp, nil
|
||||||
|
}
|
||||||
|
|
||||||
func generateCloudSQLConnectionMessage(ctx context.Context, source *Source, logger log.Logger, opResponse map[string]any, connectionMessageTemplate string) (string, bool) {
|
func generateCloudSQLConnectionMessage(ctx context.Context, source *Source, logger log.Logger, opResponse map[string]any, connectionMessageTemplate string) (string, bool) {
|
||||||
operationType, ok := opResponse["operationType"].(string)
|
operationType, ok := opResponse["operationType"].(string)
|
||||||
if !ok || operationType != "CREATE_DATABASE" {
|
if !ok || operationType != "CREATE_DATABASE" {
|
||||||
|
|||||||
@@ -26,7 +26,9 @@ import (
|
|||||||
"github.com/googleapis/genai-toolbox/internal/util"
|
"github.com/googleapis/genai-toolbox/internal/util"
|
||||||
"go.opentelemetry.io/otel/trace"
|
"go.opentelemetry.io/otel/trace"
|
||||||
"golang.org/x/oauth2/google"
|
"golang.org/x/oauth2/google"
|
||||||
|
"google.golang.org/api/iterator"
|
||||||
"google.golang.org/api/option"
|
"google.golang.org/api/option"
|
||||||
|
grpcstatus "google.golang.org/grpc/status"
|
||||||
)
|
)
|
||||||
|
|
||||||
const SourceType string = "dataplex"
|
const SourceType string = "dataplex"
|
||||||
@@ -173,9 +175,18 @@ func (s *Source) SearchAspectTypes(ctx context.Context, query string, pageSize i
|
|||||||
var results []*dataplexpb.AspectType
|
var results []*dataplexpb.AspectType
|
||||||
for {
|
for {
|
||||||
entry, err := it.Next()
|
entry, err := it.Next()
|
||||||
if err != nil {
|
|
||||||
|
if err == iterator.Done {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
if err != nil {
|
||||||
|
if st, ok := grpcstatus.FromError(err); ok {
|
||||||
|
errorCode := st.Code()
|
||||||
|
errorMessage := st.Message()
|
||||||
|
return nil, fmt.Errorf("failed to search aspect types with error code: %q message: %s", errorCode.String(), errorMessage)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("failed to search aspect types: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
// Create an instance of exponential backoff with default values for retrying GetAspectType calls
|
// Create an instance of exponential backoff with default values for retrying GetAspectType calls
|
||||||
// InitialInterval, RandomizationFactor, Multiplier, MaxInterval = 500 ms, 0.5, 1.5, 60 s
|
// InitialInterval, RandomizationFactor, Multiplier, MaxInterval = 500 ms, 0.5, 1.5, 60 s
|
||||||
@@ -214,9 +225,17 @@ func (s *Source) SearchEntries(ctx context.Context, query string, pageSize int,
|
|||||||
var results []*dataplexpb.SearchEntriesResult
|
var results []*dataplexpb.SearchEntriesResult
|
||||||
for {
|
for {
|
||||||
entry, err := it.Next()
|
entry, err := it.Next()
|
||||||
if err != nil {
|
if err == iterator.Done {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
if err != nil {
|
||||||
|
if st, ok := grpcstatus.FromError(err); ok {
|
||||||
|
errorCode := st.Code()
|
||||||
|
errorMessage := st.Message()
|
||||||
|
return nil, fmt.Errorf("failed to search entries with error code: %q message: %s", errorCode.String(), errorMessage)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("failed to search entries: %w", err)
|
||||||
|
}
|
||||||
results = append(results, entry)
|
results = append(results, entry)
|
||||||
}
|
}
|
||||||
return results, nil
|
return results, nil
|
||||||
|
|||||||
@@ -162,11 +162,6 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
|||||||
return source.CreateCluster(ctx, project, location, network, user, password, clusterID, string(accessToken))
|
return source.CreateCluster(ctx, project, location, network, user, password, clusterID, string(accessToken))
|
||||||
}
|
}
|
||||||
|
|
||||||
// ParseParams parses the parameters for the tool.
|
|
||||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (parameters.ParamValues, error) {
|
|
||||||
return parameters.ParseParams(t.AllParams, data, claims)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||||
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
||||||
}
|
}
|
||||||
@@ -198,3 +193,7 @@ func (t Tool) RequiresClientAuthorization(resourceMgr tools.SourceProvider) (boo
|
|||||||
func (t Tool) GetAuthTokenHeaderName(resourceMgr tools.SourceProvider) (string, error) {
|
func (t Tool) GetAuthTokenHeaderName(resourceMgr tools.SourceProvider) (string, error) {
|
||||||
return "Authorization", nil
|
return "Authorization", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t Tool) GetParameters() parameters.Parameters {
|
||||||
|
return t.AllParams
|
||||||
|
}
|
||||||
|
|||||||
@@ -168,11 +168,6 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
|||||||
return source.CreateInstance(ctx, project, location, cluster, instanceID, instanceType, displayName, nodeCount, string(accessToken))
|
return source.CreateInstance(ctx, project, location, cluster, instanceID, instanceType, displayName, nodeCount, string(accessToken))
|
||||||
}
|
}
|
||||||
|
|
||||||
// ParseParams parses the parameters for the tool.
|
|
||||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (parameters.ParamValues, error) {
|
|
||||||
return parameters.ParseParams(t.AllParams, data, claims)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||||
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
||||||
}
|
}
|
||||||
@@ -204,3 +199,7 @@ func (t Tool) RequiresClientAuthorization(resourceMgr tools.SourceProvider) (boo
|
|||||||
func (t Tool) GetAuthTokenHeaderName(resourceMgr tools.SourceProvider) (string, error) {
|
func (t Tool) GetAuthTokenHeaderName(resourceMgr tools.SourceProvider) (string, error) {
|
||||||
return "Authorization", nil
|
return "Authorization", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t Tool) GetParameters() parameters.Parameters {
|
||||||
|
return t.AllParams
|
||||||
|
}
|
||||||
|
|||||||
@@ -173,11 +173,6 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
|||||||
return source.CreateUser(ctx, userType, password, roles, string(accessToken), project, location, cluster, userID)
|
return source.CreateUser(ctx, userType, password, roles, string(accessToken), project, location, cluster, userID)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ParseParams parses the parameters for the tool.
|
|
||||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (parameters.ParamValues, error) {
|
|
||||||
return parameters.ParseParams(t.AllParams, data, claims)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||||
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
||||||
}
|
}
|
||||||
@@ -209,3 +204,7 @@ func (t Tool) RequiresClientAuthorization(resourceMgr tools.SourceProvider) (boo
|
|||||||
func (t Tool) GetAuthTokenHeaderName(resourceMgr tools.SourceProvider) (string, error) {
|
func (t Tool) GetAuthTokenHeaderName(resourceMgr tools.SourceProvider) (string, error) {
|
||||||
return "Authorization", nil
|
return "Authorization", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t Tool) GetParameters() parameters.Parameters {
|
||||||
|
return t.AllParams
|
||||||
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user