mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-01-23 06:18:02 -05:00
Compare commits
73 Commits
new-image-
...
config-res
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bbbf377a1f | ||
|
|
f6fef90359 | ||
|
|
87c5953b75 | ||
|
|
7f0c49a4df | ||
|
|
ad8df40791 | ||
|
|
c29355ff82 | ||
|
|
70f5550910 | ||
|
|
348c9fde08 | ||
|
|
aef539bcf3 | ||
|
|
8e0fb03483 | ||
|
|
2817dd1e5d | ||
|
|
68a218407e | ||
|
|
d69792d843 | ||
|
|
647b04d3a7 | ||
|
|
030df9766f | ||
|
|
5dbf207162 | ||
|
|
9c3720e31d | ||
|
|
3cd3c39d66 | ||
|
|
0691a6f715 | ||
|
|
467b96a23b | ||
|
|
4abf0c39e7 | ||
|
|
dd7b9de623 | ||
|
|
41b518b955 | ||
|
|
6e8a9eb8ec | ||
|
|
ef8f3b02f2 | ||
|
|
351b007fe3 | ||
|
|
9d1feca108 | ||
|
|
17b41f6453 | ||
|
|
a7799757c9 | ||
|
|
d961e373e1 | ||
|
|
bcb40a720d | ||
|
|
4a4cf1e712 | ||
|
|
b706b5bc68 | ||
|
|
4d3332d37d | ||
|
|
1203b7370a | ||
|
|
4a26ce3c1b | ||
|
|
306b5becda | ||
|
|
a4506009b9 | ||
|
|
17b70ccaa7 | ||
|
|
001d634de1 | ||
|
|
268700bdbf | ||
|
|
eb793398cd | ||
|
|
cf0fc515b5 | ||
|
|
9c62f313ff | ||
|
|
731a32e536 | ||
|
|
53885e6c0d | ||
|
|
b4346dcb8f | ||
|
|
0f27f956c7 | ||
|
|
f9df2635c6 | ||
|
|
c1b87e209f | ||
|
|
55eb958c2a | ||
|
|
20447746e1 | ||
|
|
83670dbe34 | ||
|
|
df2f6a9f0b | ||
|
|
2fa9cdb522 | ||
|
|
285cdcd69a | ||
|
|
38d127a354 | ||
|
|
3d140a657e | ||
|
|
0714d3e126 | ||
|
|
0baffff3b5 | ||
|
|
f87ed05aac | ||
|
|
a35f64ef7d | ||
|
|
980600f31b | ||
|
|
f4c22b3e27 | ||
|
|
c2df6223e6 | ||
|
|
4d6f70b55e | ||
|
|
c088d4ed42 | ||
|
|
0202709efc | ||
|
|
9695fc5eeb | ||
|
|
5447c94ca8 | ||
|
|
7053fbb195 | ||
|
|
5a09d38056 | ||
|
|
967a72da11 |
@@ -825,7 +825,27 @@ steps:
|
||||
elasticsearch \
|
||||
elasticsearch
|
||||
|
||||
|
||||
- id: "snowflake"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
- "SNOWFLAKE_DATABASE=$_SNOWFLAKE_DATABASE"
|
||||
- "SNOWFLAKE_SCHEMA=$_SNOWFLAKE_SCHEMA"
|
||||
secretEnv: ["CLIENT_ID", "SNOWFLAKE_USER", "SNOWFLAKE_PASS", "SNOWFLAKE_ACCOUNT"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Snowflake" \
|
||||
snowflake \
|
||||
snowflake
|
||||
|
||||
- id: "cassandra"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
@@ -875,8 +895,8 @@ steps:
|
||||
total_coverage=$(go tool cover -func=oracle_coverage.out | grep "total:" | awk '{print $3}')
|
||||
echo "Oracle total coverage: $total_coverage"
|
||||
coverage_numeric=$(echo "$total_coverage" | sed 's/%//')
|
||||
if awk -v cov="$coverage_numeric" 'BEGIN {exit !(cov < 30)}'; then
|
||||
echo "Coverage failure: $total_coverage is below 30%."
|
||||
if awk -v cov="$coverage_numeric" 'BEGIN {exit !(cov < 20)}'; then
|
||||
echo "Coverage failure: $total_coverage is below 20%."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -1038,6 +1058,12 @@ availableSecrets:
|
||||
env: ELASTICSEARCH_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/elastic_search_pass/versions/latest
|
||||
env: ELASTICSEARCH_PASS
|
||||
- versionName: projects/$PROJECT_ID/secrets/snowflake_account/versions/latest
|
||||
env: SNOWFLAKE_ACCOUNT
|
||||
- versionName: projects/$PROJECT_ID/secrets/snowflake_user/versions/latest
|
||||
env: SNOWFLAKE_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/snowflake_pass/versions/latest
|
||||
env: SNOWFLAKE_PASS
|
||||
- versionName: projects/$PROJECT_ID/secrets/cassandra_user/versions/latest
|
||||
env: CASSANDRA_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/cassandra_pass/versions/latest
|
||||
@@ -1124,4 +1150,5 @@ substitutions:
|
||||
_SINGLESTORE_USER: "root"
|
||||
_MARIADB_PORT: "3307"
|
||||
_MARIADB_DATABASE: test_database
|
||||
|
||||
_SNOWFLAKE_DATABASE: "test"
|
||||
_SNOWFLAKE_SCHEMA: "PUBLIC"
|
||||
|
||||
6
.github/workflows/deploy_dev_docs.yaml
vendored
6
.github/workflows/deploy_dev_docs.yaml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
group: docs-deployment
|
||||
cancel-in-progress: false
|
||||
steps:
|
||||
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
with:
|
||||
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
|
||||
|
||||
@@ -51,12 +51,12 @@ jobs:
|
||||
extended: true
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
|
||||
with:
|
||||
path: ~/.npm
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
|
||||
@@ -30,14 +30,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout main branch (for latest templates and theme)
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
with:
|
||||
ref: 'main'
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Checkout old content from tag into a temporary directory
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
with:
|
||||
ref: ${{ github.event.inputs.version_tag }}
|
||||
path: 'old_version_source' # Checkout into a temp subdir
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
with:
|
||||
hugo-version: "0.145.0"
|
||||
extended: true
|
||||
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6
|
||||
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
|
||||
4
.github/workflows/deploy_versioned_docs.yaml
vendored
4
.github/workflows/deploy_versioned_docs.yaml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
cancel-in-progress: false
|
||||
steps:
|
||||
- name: Checkout Code at Tag
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
with:
|
||||
ref: ${{ github.event.release.tag_name }}
|
||||
|
||||
@@ -44,7 +44,7 @@ jobs:
|
||||
extended: true
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
|
||||
2
.github/workflows/docs_preview_clean.yaml
vendored
2
.github/workflows/docs_preview_clean.yaml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
group: "preview-${{ github.event.number }}"
|
||||
cancel-in-progress: true
|
||||
steps:
|
||||
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
with:
|
||||
ref: versioned-gh-pages
|
||||
|
||||
|
||||
6
.github/workflows/docs_preview_deploy.yaml
vendored
6
.github/workflows/docs_preview_deploy.yaml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
group: "preview-${{ github.event.number }}"
|
||||
cancel-in-progress: true
|
||||
steps:
|
||||
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
with:
|
||||
# Checkout the PR's HEAD commit (supports forks).
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
@@ -62,12 +62,12 @@ jobs:
|
||||
extended: true
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
|
||||
with:
|
||||
path: ~/.npm
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
|
||||
59
.github/workflows/link_checker_workflow.yaml
vendored
Normal file
59
.github/workflows/link_checker_workflow.yaml
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
name: Link Checker
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
|
||||
jobs:
|
||||
link-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
|
||||
- name: Restore lychee cache
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
|
||||
with:
|
||||
path: .lycheecache
|
||||
key: cache-lychee-${{ github.sha }}
|
||||
restore-keys: cache-lychee-
|
||||
|
||||
- name: Link Checker
|
||||
uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 # v2
|
||||
with:
|
||||
args: >
|
||||
--verbose
|
||||
--no-progress
|
||||
--cache
|
||||
--max-cache-age 1d
|
||||
README.md
|
||||
docs/
|
||||
output: /tmp/foo.txt
|
||||
fail: true
|
||||
jobSummary: true
|
||||
debug: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# This step only runs if the 'lychee_check' step fails, ensuring the
|
||||
# context note only appears when the developer needs to troubleshoot.
|
||||
- name: Display Link Context Note on Failure
|
||||
if: ${{ failure() }}
|
||||
run: |
|
||||
echo "## Link Resolution Note" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Local links and directory changes work differently on GitHub than on the docsite." >> $GITHUB_STEP_SUMMARY
|
||||
echo "You must ensure fixes pass the **GitHub check** and also work with **\`hugo server\`**." >> $GITHUB_STEP_SUMMARY
|
||||
echo "---" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
4
.github/workflows/lint.yaml
vendored
4
.github/workflows/lint.yaml
vendored
@@ -55,7 +55,7 @@ jobs:
|
||||
with:
|
||||
go-version: "1.25"
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
run: |
|
||||
go mod tidy && git diff --exit-code
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@4afd733a84b1f43292c63897423277bb7f4313a9 # v8.0.0
|
||||
uses: golangci/golangci-lint-action@1e7e51e771db61008b38414a730f564565cf7c20 # v9.2.0
|
||||
with:
|
||||
version: latest
|
||||
args: --timeout 10m
|
||||
|
||||
2
.github/workflows/publish-mcp.yml
vendored
2
.github/workflows/publish-mcp.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
|
||||
- name: Wait for image in Artifact Registry
|
||||
shell: bash
|
||||
|
||||
2
.github/workflows/sync-labels.yaml
vendored
2
.github/workflows/sync-labels.yaml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
issues: 'write'
|
||||
pull-requests: 'write'
|
||||
steps:
|
||||
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: micnncim/action-label-syncer@3abd5ab72fda571e69fffd97bd4e0033dd5f495c # v1.3.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
2
.github/workflows/tests.yaml
vendored
2
.github/workflows/tests.yaml
vendored
@@ -62,7 +62,7 @@ jobs:
|
||||
go-version: "1.24"
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -20,4 +20,4 @@ node_modules
|
||||
|
||||
# executable
|
||||
genai-toolbox
|
||||
toolbox
|
||||
toolbox
|
||||
@@ -51,6 +51,10 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
|
||||
# Add a new version block here before every release
|
||||
# The order of versions in this file is mirrored into the dropdown
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.25.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.25.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.24.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.24.0/"
|
||||
|
||||
45
.lycheeignore
Normal file
45
.lycheeignore
Normal file
@@ -0,0 +1,45 @@
|
||||
# Ignore documentation placeholders and generic example domains
|
||||
^https?://([a-zA-Z0-9-]+\.)?example\.com(:\d+)?(/.*)?$
|
||||
^http://example\.net
|
||||
|
||||
# Shields.io badges often trigger rate limits or intermittent 503s
|
||||
^https://img\.shields\.io/.*
|
||||
|
||||
# PDF files are ignored as lychee cannot reliably parse internal PDF links
|
||||
\.pdf$
|
||||
|
||||
# Standard mailto: protocol is not a web URL
|
||||
^mailto:
|
||||
|
||||
# Ignore local development endpoints that won't resolve in CI/CD environments
|
||||
^https?://(127\.0\.0\.1|localhost)(:\d+)?(/.*)?$
|
||||
|
||||
# Placeholder for Google Cloud Run service discovery
|
||||
https://cloud-run-url.app/
|
||||
|
||||
# DGraph Cloud and private instance endpoints
|
||||
https://xxx.cloud.dgraph.io/
|
||||
https://cloud.dgraph.io/login
|
||||
https://dgraph.io/docs
|
||||
|
||||
# MySQL Community downloads and main site (often protected by bot mitigation)
|
||||
https://dev.mysql.com/downloads/installer/
|
||||
https://www.mysql.com/
|
||||
|
||||
# Claude desktop download link
|
||||
https://claude.ai/download
|
||||
|
||||
# Google Cloud Run product page
|
||||
https://cloud.google.com/run
|
||||
|
||||
# These specific deep links are known to cause redirect loops or 403s in automated scrapers
|
||||
https://dev.mysql.com/doc/refman/8.4/en/sql-prepared-statements.html
|
||||
https://dev.mysql.com/doc/refman/8.4/en/user-names.html
|
||||
|
||||
# npmjs links can occasionally trigger rate limiting during high-frequency CI builds
|
||||
https://www.npmjs.com/package/@toolbox-sdk/core
|
||||
https://www.npmjs.com/package/@toolbox-sdk/adk
|
||||
|
||||
|
||||
# Ignore social media and blog profiles to reduce external request overhead
|
||||
https://medium.com/@mcp_toolbox
|
||||
25
CHANGELOG.md
25
CHANGELOG.md
@@ -1,5 +1,30 @@
|
||||
# Changelog
|
||||
|
||||
## [0.25.0](https://github.com/googleapis/genai-toolbox/compare/v0.24.0...v0.25.0) (2026-01-08)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add `embeddingModel` support ([#2121](https://github.com/googleapis/genai-toolbox/issues/2121)) ([9c62f31](https://github.com/googleapis/genai-toolbox/commit/9c62f313ff5edf0a3b5b8a3e996eba078fba4095))
|
||||
* Add `allowed-hosts` flag ([#2254](https://github.com/googleapis/genai-toolbox/issues/2254)) ([17b41f6](https://github.com/googleapis/genai-toolbox/commit/17b41f64531b8fe417c28ada45d1992ba430dc1b))
|
||||
* Add parameter default value to manifest ([#2264](https://github.com/googleapis/genai-toolbox/issues/2264)) ([9d1feca](https://github.com/googleapis/genai-toolbox/commit/9d1feca10810fa42cb4c94a409252f1bd373ee36))
|
||||
* **snowflake:** Add Snowflake Source and Tools ([#858](https://github.com/googleapis/genai-toolbox/issues/858)) ([b706b5b](https://github.com/googleapis/genai-toolbox/commit/b706b5bc685aeda277f277868bae77d38d5fd7b6))
|
||||
* **prebuilt/cloud-sql-mysql:** Update CSQL MySQL prebuilt tools to use IAM ([#2202](https://github.com/googleapis/genai-toolbox/issues/2202)) ([731a32e](https://github.com/googleapis/genai-toolbox/commit/731a32e5360b4d6862d81fcb27d7127c655679a8))
|
||||
* **sources/bigquery:** Make credentials scope configurable ([#2210](https://github.com/googleapis/genai-toolbox/issues/2210)) ([a450600](https://github.com/googleapis/genai-toolbox/commit/a4506009b93771b77fb05ae97044f914967e67ed))
|
||||
* **sources/trino:** Add ssl verification options and fix docs example ([#2155](https://github.com/googleapis/genai-toolbox/issues/2155)) ([4a4cf1e](https://github.com/googleapis/genai-toolbox/commit/4a4cf1e712b671853678dba99c4dc49dd4fc16a2))
|
||||
* **tools/looker:** Add ability to set destination folder with `make_look` and `make_dashboard`. ([#2245](https://github.com/googleapis/genai-toolbox/issues/2245)) ([eb79339](https://github.com/googleapis/genai-toolbox/commit/eb793398cd1cc4006d9808ccda5dc7aea5e92bd5))
|
||||
* **tools/postgressql:** Add tool to list store procedure ([#2156](https://github.com/googleapis/genai-toolbox/issues/2156)) ([cf0fc51](https://github.com/googleapis/genai-toolbox/commit/cf0fc515b57d9b84770076f3c0c5597c4597ef62))
|
||||
* **tools/postgressql:** Add Parameter `embeddedBy` config support ([#2151](https://github.com/googleapis/genai-toolbox/issues/2151)) ([17b70cc](https://github.com/googleapis/genai-toolbox/commit/17b70ccaa754d15bcc33a1a3ecb7e652520fa600))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **server:** Add `embeddingModel` config initialization ([#2281](https://github.com/googleapis/genai-toolbox/issues/2281)) ([a779975](https://github.com/googleapis/genai-toolbox/commit/a7799757c9345f99b6d2717841fbf792d364e1a2))
|
||||
* **sources/cloudgda:** Add import for cloudgda source ([#2217](https://github.com/googleapis/genai-toolbox/issues/2217)) ([7daa411](https://github.com/googleapis/genai-toolbox/commit/7daa4111f4ebfb0a35319fd67a8f7b9f0f99efcf))
|
||||
* **tools/alloydb-wait-for-operation:** Fix connection message generation ([#2228](https://github.com/googleapis/genai-toolbox/issues/2228)) ([7053fbb](https://github.com/googleapis/genai-toolbox/commit/7053fbb1953653143d39a8510916ea97a91022a6))
|
||||
* **tools/alloydbainl:** Only add psv when NL Config Param is defined ([#2265](https://github.com/googleapis/genai-toolbox/issues/2265)) ([ef8f3b0](https://github.com/googleapis/genai-toolbox/commit/ef8f3b02f2f38ce94a6ba9acf35d08b9469bef4e))
|
||||
* **tools/looker:** Looker client OAuth nil pointer error ([#2231](https://github.com/googleapis/genai-toolbox/issues/2231)) ([268700b](https://github.com/googleapis/genai-toolbox/commit/268700bdbf8281de0318d60ca613ed3672990b20))
|
||||
|
||||
## [0.24.0](https://github.com/googleapis/genai-toolbox/compare/v0.23.0...v0.24.0) (2025-12-19)
|
||||
|
||||
|
||||
|
||||
@@ -59,6 +59,13 @@ You can manually trigger the bot by commenting on your Pull Request:
|
||||
* `/gemini summary`: Posts a summary of the changes in the pull request.
|
||||
* `/gemini help`: Overview of the available commands
|
||||
|
||||
## Guidelines for Pull Requests
|
||||
|
||||
1. Please keep your PR small for more thorough review and easier updates. In case of regression, it also allows us to roll back a single feature instead of multiple ones.
|
||||
1. For non-trivial changes, consider opening an issue and discussing it with the code owners first.
|
||||
1. Provide a good PR description as a record of what change is being made and why it was made. Link to a GitHub issue if it exists.
|
||||
1. Make sure your code is thoroughly tested with unit tests and integration tests. Remember to clean up the test instances properly in your code to avoid memory leaks.
|
||||
|
||||
## Adding a New Database Source or Tool
|
||||
|
||||
Please create an
|
||||
@@ -85,11 +92,11 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s
|
||||
`newdb.go`. Create a `Config` struct to include all the necessary parameters
|
||||
for connecting to the database (e.g., host, port, username, password, database
|
||||
name) and a `Source` struct to store necessary parameters for tools (e.g.,
|
||||
Name, Kind, connection object, additional config).
|
||||
Name, Type, connection object, additional config).
|
||||
* **Implement the
|
||||
[`SourceConfig`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/sources/sources.go#L57)
|
||||
interface**. This interface requires two methods:
|
||||
* `SourceConfigKind() string`: Returns a unique string identifier for your
|
||||
* `SourceConfigType() string`: Returns a unique string identifier for your
|
||||
data source (e.g., `"newdb"`).
|
||||
* `Initialize(ctx context.Context, tracer trace.Tracer) (Source, error)`:
|
||||
Creates a new instance of your data source and establishes a connection to
|
||||
@@ -97,7 +104,7 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s
|
||||
* **Implement the
|
||||
[`Source`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/sources/sources.go#L63)
|
||||
interface**. This interface requires one method:
|
||||
* `SourceKind() string`: Returns the same string identifier as `SourceConfigKind()`.
|
||||
* `SourceType() string`: Returns the same string identifier as `SourceConfigType()`.
|
||||
* **Implement `init()`** to register the new Source.
|
||||
* **Implement Unit Tests** in a file named `newdb_test.go`.
|
||||
|
||||
@@ -110,6 +117,8 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s
|
||||
We recommend looking at an [example tool
|
||||
implementation](https://github.com/googleapis/genai-toolbox/tree/main/internal/tools/postgres/postgressql).
|
||||
|
||||
Remember to keep your PRs small. For example, if you are contributing a new Source, only include one or two core Tools within the same PR, the rest of the Tools can come in subsequent PRs.
|
||||
|
||||
* **Create a new directory** under `internal/tools` for your tool type (e.g., `internal/tools/newdb/newdbtool`).
|
||||
* **Define a configuration struct** for your tool in a file named `newdbtool.go`.
|
||||
Create a `Config` struct and a `Tool` struct to store necessary parameters for
|
||||
@@ -117,7 +126,7 @@ tools.
|
||||
* **Implement the
|
||||
[`ToolConfig`](https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/internal/tools/tools.go#L61)
|
||||
interface**. This interface requires one method:
|
||||
* `ToolConfigKind() string`: Returns a unique string identifier for your tool
|
||||
* `ToolConfigType() string`: Returns a unique string identifier for your tool
|
||||
(e.g., `"newdb-tool"`).
|
||||
* `Initialize(sources map[string]Source) (Tool, error)`: Creates a new
|
||||
instance of your tool and validates that it can connect to the specified
|
||||
@@ -163,6 +172,8 @@ tools.
|
||||
parameters][temp-param-doc]. Only run this test if template
|
||||
parameters apply to your tool.
|
||||
|
||||
* **Add additional tests** for the tools that are not covered by the predefined tests. Every tool must be tested!
|
||||
|
||||
* **Add the new database to the integration test workflow** in
|
||||
[integration.cloudbuild.yaml](.ci/integration.cloudbuild.yaml).
|
||||
|
||||
@@ -232,7 +243,7 @@ resources.
|
||||
| style | Update src code, with only formatting and whitespace updates (e.g. code formatter or linter changes). |
|
||||
|
||||
Pull requests should always add scope whenever possible. The scope is
|
||||
formatted as `<scope-type>/<scope-kind>` (e.g., `sources/postgres`, or
|
||||
formatted as `<scope-resource>/<scope-type>` (e.g., `sources/postgres`, or
|
||||
`tools/mssql-sql`).
|
||||
|
||||
Ideally, **each PR covers only one scope**, if this is
|
||||
@@ -244,4 +255,4 @@ resources.
|
||||
* **PR Description:** PR description should **always** be included. It should
|
||||
include a concise description of the changes, it's impact, along with a
|
||||
summary of the solution. If the PR is related to a specific issue, the issue
|
||||
number should be mentioned in the PR description (e.g. `Fixes #1`).
|
||||
number should be mentioned in the PR description (e.g. `Fixes #1`).
|
||||
|
||||
59
DEVELOPER.md
59
DEVELOPER.md
@@ -47,12 +47,13 @@ Before you begin, ensure you have the following:
|
||||
### Tool Naming Conventions
|
||||
|
||||
This section details the purpose and conventions for MCP Toolbox's tools naming
|
||||
properties, **tool name** and **tool kind**.
|
||||
properties, **tool name** and **tool type**.
|
||||
|
||||
```
|
||||
cancel_hotel: <- tool name
|
||||
kind: postgres-sql <- tool kind
|
||||
source: my_pg_source
|
||||
kind: tools
|
||||
name: cancel_hotel <- tool name
|
||||
type: postgres-sql <- tool type
|
||||
source: my_pg_source
|
||||
```
|
||||
|
||||
#### Tool Name
|
||||
@@ -76,17 +77,17 @@ The following guidelines apply to tool names:
|
||||
to a function) until they can be validated through extensive testing to ensure
|
||||
they do not negatively impact agent's performances.
|
||||
|
||||
#### Tool Kind
|
||||
#### Tool Type
|
||||
|
||||
Tool kind serves as a category or type that a user can assign to a tool.
|
||||
Tool type serves as a category or type that a user can assign to a tool.
|
||||
|
||||
The following guidelines apply to tool kinds:
|
||||
The following guidelines apply to tool types:
|
||||
|
||||
* Should user hyphens over underscores (e.g. `firestore-list-collections` or
|
||||
`firestore_list_colelctions`).
|
||||
* Should use product name in name (e.g. `firestore-list-collections` over
|
||||
`list-collections`).
|
||||
* Changes to tool kind are breaking changes and should be avoided.
|
||||
* Changes to tool type are breaking changes and should be avoided.
|
||||
|
||||
## Testing
|
||||
|
||||
@@ -207,6 +208,30 @@ variables for each source.
|
||||
* SQLite - setup in the integration test, where we create a temporary database
|
||||
file
|
||||
|
||||
### Link Checking and Fixing with Lychee
|
||||
|
||||
We use **[lychee](https://github.com/lycheeverse/lychee-action)** for repository link checks.
|
||||
|
||||
* To run the checker **locally**, see the [command-line usage guide](https://github.com/lycheeverse/lychee?tab=readme-ov-file#commandline-usage).
|
||||
|
||||
#### Fixing Broken Links
|
||||
|
||||
1. **Update the Link:** Correct the broken URL or update the content where it is used.
|
||||
2. **Ignore the Link:** If you can't fix the link (e.g., due to **external rate-limits** or if it's a **local-only URL**), tell Lychee to **ignore** it.
|
||||
|
||||
* List **regular expressions** or **direct links** in the **[.lycheeignore](https://github.com/googleapis/genai-toolbox/blob/main/.lycheeignore)** file, one entry per line.
|
||||
* **Always add a comment** explaining **why** the link is being skipped to prevent link rot. **Example `.lycheeignore`:**
|
||||
```text
|
||||
# These are email addresses, not standard web URLs, and usually cause check failures.
|
||||
^mailto:.*
|
||||
```
|
||||
> [!NOTE]
|
||||
> To avoid build failures in GitHub Actions, follow the linking pattern demonstrated here: <br>
|
||||
> **Avoid:** (Works in Hugo, breaks Link Checker): `[Read more](docs/setup)` or `[Read more](docs/setup/)` <br>
|
||||
> **Reason:** The link checker cannot find a file named "setup" or a directory with that name containing an index. <br>
|
||||
> **Preferred:** `[Read more](docs/setup.md)` <br>
|
||||
> **Reason:** The GitHub Action finds the physical file. Hugo then uses its internal logic (or render hooks) to resolve this to the correct `/docs/setup/` web URL. <br>
|
||||
|
||||
### Other GitHub Checks
|
||||
|
||||
* License header check (`.github/header-checker-lint.yml`) - Ensures files have
|
||||
@@ -280,6 +305,7 @@ There are 3 GHA workflows we use to achieve document versioning:
|
||||
Request a repo owner to run the preview deployment workflow on your PR. A
|
||||
preview link will be automatically added as a comment to your PR.
|
||||
|
||||
|
||||
#### Maintainers
|
||||
|
||||
1. **Inspect Changes:** Review the proposed changes in the PR to ensure they are
|
||||
@@ -354,6 +380,23 @@ to approve PRs for main. TeamSync is used to create this team from the MDB
|
||||
Group `toolbox-contributors`. Googlers who are developing for MCP-Toolbox
|
||||
but aren't part of the core team should join this group.
|
||||
|
||||
### Issue/PR Triage and SLO
|
||||
After an issue is created, maintainers will assign the following labels:
|
||||
* `Priority` (defaulted to P0)
|
||||
* `Type` (if applicable)
|
||||
* `Product` (if applicable)
|
||||
|
||||
All incoming issues and PRs will follow the following SLO:
|
||||
| Type | Priority | Objective |
|
||||
|-----------------|----------|------------------------------------------------------------------------|
|
||||
| Feature Request | P0 | Must respond within **5 days** |
|
||||
| Process | P0 | Must respond within **5 days** |
|
||||
| Bugs | P0 | Must respond within **5 days**, and resolve/closure within **14 days** |
|
||||
| Bugs | P1 | Must respond within **7 days**, and resolve/closure within **90 days** |
|
||||
| Bugs | P2 | Must respond within **30 days**
|
||||
|
||||
_Types that are not listed in the table do not adhere to any SLO._
|
||||
|
||||
### Releasing
|
||||
|
||||
Toolbox has two types of releases: versioned and continuous. It uses Google
|
||||
|
||||
64
README.md
64
README.md
@@ -140,7 +140,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.24.0
|
||||
> export VERSION=0.25.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -153,7 +153,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.24.0
|
||||
> export VERSION=0.25.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -166,7 +166,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.24.0
|
||||
> export VERSION=0.25.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -179,7 +179,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```cmd
|
||||
> :: see releases page for other versions
|
||||
> set VERSION=0.24.0
|
||||
> set VERSION=0.25.0
|
||||
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
> ```
|
||||
>
|
||||
@@ -191,7 +191,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```powershell
|
||||
> # see releases page for other versions
|
||||
> $VERSION = "0.24.0"
|
||||
> $VERSION = "0.25.0"
|
||||
> curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||
> ```
|
||||
>
|
||||
@@ -204,7 +204,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -228,7 +228,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.24.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.25.0
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -272,7 +272,7 @@ To run Toolbox from binary:
|
||||
To run the server after pulling the [container image](#installing-the-server):
|
||||
|
||||
```sh
|
||||
export VERSION=0.11.0 # Use the version you pulled
|
||||
export VERSION=0.24.0 # Use the version you pulled
|
||||
docker run -p 5000:5000 \
|
||||
-v $(pwd)/tools.yaml:/app/tools.yaml \
|
||||
us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION \
|
||||
@@ -938,14 +938,14 @@ Toolbox should have access to. Most tools will have at least one source to
|
||||
execute against.
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-pg-source:
|
||||
kind: postgres
|
||||
host: 127.0.0.1
|
||||
port: 5432
|
||||
database: toolbox_db
|
||||
user: toolbox_user
|
||||
password: my-password
|
||||
kind: sources
|
||||
name: my-pg-source
|
||||
type: postgres
|
||||
host: 127.0.0.1
|
||||
port: 5432
|
||||
database: toolbox_db
|
||||
user: toolbox_user
|
||||
password: my-password
|
||||
```
|
||||
|
||||
For more details on configuring different types of sources, see the
|
||||
@@ -954,19 +954,19 @@ For more details on configuring different types of sources, see the
|
||||
### Tools
|
||||
|
||||
The `tools` section of a `tools.yaml` define the actions an agent can take: what
|
||||
kind of tool it is, which source(s) it affects, what parameters it uses, etc.
|
||||
type of tool it is, which source(s) it affects, what parameters it uses, etc.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search-hotels-by-name:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on name.
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: The name of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
|
||||
kind: tools
|
||||
name: search-hotels-by-name
|
||||
type: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on name.
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: The name of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
|
||||
```
|
||||
|
||||
For more details on configuring different types of tools, see the
|
||||
@@ -1035,12 +1035,12 @@ The version will be incremented as follows:
|
||||
|
||||
### Post-1.0.0 Versioning
|
||||
|
||||
Once the project reaches a stable `1.0.0` release, the versioning will follow
|
||||
the more common convention:
|
||||
Once the project reaches a stable `1.0.0` release, the version number
|
||||
**`MAJOR.MINOR.PATCH`** will follow the more common convention:
|
||||
|
||||
- **`MAJOR.MINOR.PATCH`**: Incremented for incompatible API changes.
|
||||
- **`MAJOR.MINOR.PATCH`**: Incremented for new, backward-compatible functionality.
|
||||
- **`MAJOR.MINOR.PATCH`**: Incremented for backward-compatible bug fixes.
|
||||
- **`MAJOR`**: Incremented for incompatible API changes.
|
||||
- **`MINOR`**: Incremented for new, backward-compatible functionality.
|
||||
- **`PATCH`**: Incremented for backward-compatible bug fixes.
|
||||
|
||||
The public API that this applies to is the CLI associated with Toolbox, the
|
||||
interactions with official SDKs, and the definitions in the `tools.yaml` file.
|
||||
|
||||
200
cmd/root.go
200
cmd/root.go
@@ -15,6 +15,7 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
_ "embed"
|
||||
"fmt"
|
||||
@@ -33,6 +34,7 @@ import (
|
||||
"github.com/fsnotify/fsnotify"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/auth"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/log"
|
||||
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
|
||||
"github.com/googleapis/genai-toolbox/internal/prompts"
|
||||
@@ -91,6 +93,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomstudies"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcloneinstance"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatebackup"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatedatabase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreateusers"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
|
||||
@@ -197,6 +200,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistroles"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistschemas"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistsequences"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresliststoredprocedure"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablespaces"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablestats"
|
||||
@@ -213,6 +217,8 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparklistbatches"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoreexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoresql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakeexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlistgraphs"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlisttables"
|
||||
@@ -261,6 +267,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/redis"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/serverlessspark"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/singlestore"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/snowflake"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/spanner"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/tidb"
|
||||
@@ -376,7 +383,9 @@ func NewCommand(opts ...Option) *Command {
|
||||
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
|
||||
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
|
||||
flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
|
||||
// TODO: Insecure by default. Might consider updating this for v1.0.0
|
||||
flags.StringSliceVar(&cmd.cfg.AllowedOrigins, "allowed-origins", []string{"*"}, "Specifies a list of origins permitted to access this server. Defaults to '*'.")
|
||||
flags.StringSliceVar(&cmd.cfg.AllowedHosts, "allowed-hosts", []string{"*"}, "Specifies a list of hosts permitted to access this server. Defaults to '*'.")
|
||||
|
||||
// wrap RunE command so that we have access to original Command object
|
||||
cmd.RunE = func(*cobra.Command, []string) error { return run(cmd) }
|
||||
@@ -385,12 +394,12 @@ func NewCommand(opts ...Option) *Command {
|
||||
}
|
||||
|
||||
type ToolsFile struct {
|
||||
Sources server.SourceConfigs `yaml:"sources"`
|
||||
AuthSources server.AuthServiceConfigs `yaml:"authSources"` // Deprecated: Kept for compatibility.
|
||||
AuthServices server.AuthServiceConfigs `yaml:"authServices"`
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
Toolsets server.ToolsetConfigs `yaml:"toolsets"`
|
||||
Prompts server.PromptConfigs `yaml:"prompts"`
|
||||
Sources server.SourceConfigs `yaml:"sources"`
|
||||
AuthServices server.AuthServiceConfigs `yaml:"authServices"`
|
||||
EmbeddingModels server.EmbeddingModelConfigs `yaml:"embeddingModels"`
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
Toolsets server.ToolsetConfigs `yaml:"toolsets"`
|
||||
Prompts server.PromptConfigs `yaml:"prompts"`
|
||||
}
|
||||
|
||||
// parseEnv replaces environment variables ${ENV_NAME} with their values.
|
||||
@@ -416,6 +425,106 @@ func parseEnv(input string) (string, error) {
|
||||
return output, err
|
||||
}
|
||||
|
||||
func convertToolsFile(ctx context.Context, raw []byte) ([]byte, error) {
|
||||
var input yaml.MapSlice
|
||||
decoder := yaml.NewDecoder(bytes.NewReader(raw), yaml.UseOrderedMap())
|
||||
if err := decoder.Decode(&input); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Convert raw MapSlice to a helper map for quick lookup
|
||||
// while keeping the values as MapSlices to preserve internal order
|
||||
resourceOrder := []string{}
|
||||
lookup := make(map[string]yaml.MapSlice)
|
||||
for _, item := range input {
|
||||
key, ok := item.Key.(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected non-string key in input: %v", item.Key)
|
||||
}
|
||||
if slice, ok := item.Value.(yaml.MapSlice); ok {
|
||||
// convert authSources to authServices
|
||||
if key == "authSources" {
|
||||
key = "authServices"
|
||||
}
|
||||
// works even if lookup[key] is nil
|
||||
lookup[key] = append(lookup[key], slice...)
|
||||
// preserving the resource's order of original toolsFile
|
||||
if !slices.Contains(resourceOrder, key) {
|
||||
resourceOrder = append(resourceOrder, key)
|
||||
}
|
||||
} else {
|
||||
// toolsfile is already v2
|
||||
if key == "kind" {
|
||||
return raw, nil
|
||||
}
|
||||
return nil, fmt.Errorf("'%s' is not a map", key)
|
||||
}
|
||||
}
|
||||
// convert to tools file v2
|
||||
var buf bytes.Buffer
|
||||
encoder := yaml.NewEncoder(&buf)
|
||||
for _, kind := range resourceOrder {
|
||||
data, exists := lookup[kind]
|
||||
if !exists {
|
||||
// if this is skipped for all keys, the tools file is in v2
|
||||
continue
|
||||
}
|
||||
// Transform each entry
|
||||
for _, entry := range data {
|
||||
entryName, ok := entry.Key.(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected non-string key for entry in '%s': %v", kind, entry.Key)
|
||||
}
|
||||
entryBody := ProcessValue(entry.Value, kind == "toolsets")
|
||||
|
||||
transformed := yaml.MapSlice{
|
||||
{Key: "kind", Value: kind},
|
||||
{Key: "name", Value: entryName},
|
||||
}
|
||||
|
||||
// Merge the transformed body into our result
|
||||
if bodySlice, ok := entryBody.(yaml.MapSlice); ok {
|
||||
transformed = append(transformed, bodySlice...)
|
||||
} else {
|
||||
return nil, fmt.Errorf("unable to convert entryBody to MapSlice")
|
||||
}
|
||||
|
||||
if err := encoder.Encode(transformed); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
// ProcessValue recursively looks for MapSlices to rename 'kind' -> 'type'
|
||||
func ProcessValue(v any, isToolset bool) any {
|
||||
switch val := v.(type) {
|
||||
case yaml.MapSlice:
|
||||
for i := range val {
|
||||
// Perform renaming
|
||||
if val[i].Key == "kind" {
|
||||
val[i].Key = "type"
|
||||
}
|
||||
// Recursive call for nested values (e.g., nested objects or lists)
|
||||
val[i].Value = ProcessValue(val[i].Value, false)
|
||||
}
|
||||
return val
|
||||
case []any:
|
||||
// Process lists: If it's a toolset top-level list, wrap it.
|
||||
if isToolset {
|
||||
return yaml.MapSlice{{Key: "tools", Value: val}}
|
||||
}
|
||||
// Otherwise, recurse into list items (to catch nested objects)
|
||||
for i := range val {
|
||||
val[i] = ProcessValue(val[i], false)
|
||||
}
|
||||
return val
|
||||
default:
|
||||
return val
|
||||
}
|
||||
}
|
||||
|
||||
// parseToolsFile parses the provided yaml into appropriate configs.
|
||||
func parseToolsFile(ctx context.Context, raw []byte) (ToolsFile, error) {
|
||||
var toolsFile ToolsFile
|
||||
@@ -426,8 +535,13 @@ func parseToolsFile(ctx context.Context, raw []byte) (ToolsFile, error) {
|
||||
}
|
||||
raw = []byte(output)
|
||||
|
||||
raw, err = convertToolsFile(ctx, raw)
|
||||
if err != nil {
|
||||
return toolsFile, fmt.Errorf("error converting tools file: %s", err)
|
||||
}
|
||||
|
||||
// Parse contents
|
||||
err = yaml.UnmarshalContext(ctx, raw, &toolsFile, yaml.Strict())
|
||||
toolsFile.Sources, toolsFile.AuthServices, toolsFile.EmbeddingModels, toolsFile.Tools, toolsFile.Toolsets, toolsFile.Prompts, err = server.UnmarshalResourceConfig(ctx, raw)
|
||||
if err != nil {
|
||||
return toolsFile, err
|
||||
}
|
||||
@@ -439,11 +553,12 @@ func parseToolsFile(ctx context.Context, raw []byte) (ToolsFile, error) {
|
||||
// All resource names (sources, authServices, tools, toolsets) must be unique across all files.
|
||||
func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) {
|
||||
merged := ToolsFile{
|
||||
Sources: make(server.SourceConfigs),
|
||||
AuthServices: make(server.AuthServiceConfigs),
|
||||
Tools: make(server.ToolConfigs),
|
||||
Toolsets: make(server.ToolsetConfigs),
|
||||
Prompts: make(server.PromptConfigs),
|
||||
Sources: make(server.SourceConfigs),
|
||||
AuthServices: make(server.AuthServiceConfigs),
|
||||
EmbeddingModels: make(server.EmbeddingModelConfigs),
|
||||
Tools: make(server.ToolConfigs),
|
||||
Toolsets: make(server.ToolsetConfigs),
|
||||
Prompts: make(server.PromptConfigs),
|
||||
}
|
||||
|
||||
var conflicts []string
|
||||
@@ -458,18 +573,6 @@ func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) {
|
||||
}
|
||||
}
|
||||
|
||||
// Check for conflicts and merge authSources (deprecated, but still support)
|
||||
for name, authSource := range file.AuthSources {
|
||||
if _, exists := merged.AuthSources[name]; exists {
|
||||
conflicts = append(conflicts, fmt.Sprintf("authSource '%s' (file #%d)", name, fileIndex+1))
|
||||
} else {
|
||||
if merged.AuthSources == nil {
|
||||
merged.AuthSources = make(server.AuthServiceConfigs)
|
||||
}
|
||||
merged.AuthSources[name] = authSource
|
||||
}
|
||||
}
|
||||
|
||||
// Check for conflicts and merge authServices
|
||||
for name, authService := range file.AuthServices {
|
||||
if _, exists := merged.AuthServices[name]; exists {
|
||||
@@ -479,6 +582,15 @@ func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) {
|
||||
}
|
||||
}
|
||||
|
||||
// Check for conflicts and merge embeddingModels
|
||||
for name, em := range file.EmbeddingModels {
|
||||
if _, exists := merged.EmbeddingModels[name]; exists {
|
||||
conflicts = append(conflicts, fmt.Sprintf("embedding model '%s' (file #%d)", name, fileIndex+1))
|
||||
} else {
|
||||
merged.EmbeddingModels[name] = em
|
||||
}
|
||||
}
|
||||
|
||||
// Check for conflicts and merge tools
|
||||
for name, tool := range file.Tools {
|
||||
if _, exists := merged.Tools[name]; exists {
|
||||
@@ -583,14 +695,14 @@ func handleDynamicReload(ctx context.Context, toolsFile ToolsFile, s *server.Ser
|
||||
panic(err)
|
||||
}
|
||||
|
||||
sourcesMap, authServicesMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := validateReloadEdits(ctx, toolsFile)
|
||||
sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := validateReloadEdits(ctx, toolsFile)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to validate reloaded edits: %w", err)
|
||||
logger.WarnContext(ctx, errMsg.Error())
|
||||
return err
|
||||
}
|
||||
|
||||
s.ResourceMgr.SetResources(sourcesMap, authServicesMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap)
|
||||
s.ResourceMgr.SetResources(sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap)
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -598,7 +710,7 @@ func handleDynamicReload(ctx context.Context, toolsFile ToolsFile, s *server.Ser
|
||||
// validateReloadEdits checks that the reloaded tools file configs can initialized without failing
|
||||
func validateReloadEdits(
|
||||
ctx context.Context, toolsFile ToolsFile,
|
||||
) (map[string]sources.Source, map[string]auth.AuthService, map[string]tools.Tool, map[string]tools.Toolset, map[string]prompts.Prompt, map[string]prompts.Promptset, error,
|
||||
) (map[string]sources.Source, map[string]auth.AuthService, map[string]embeddingmodels.EmbeddingModel, map[string]tools.Tool, map[string]tools.Toolset, map[string]prompts.Prompt, map[string]prompts.Promptset, error,
|
||||
) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
@@ -616,22 +728,23 @@ func validateReloadEdits(
|
||||
defer span.End()
|
||||
|
||||
reloadedConfig := server.ServerConfig{
|
||||
Version: versionString,
|
||||
SourceConfigs: toolsFile.Sources,
|
||||
AuthServiceConfigs: toolsFile.AuthServices,
|
||||
ToolConfigs: toolsFile.Tools,
|
||||
ToolsetConfigs: toolsFile.Toolsets,
|
||||
PromptConfigs: toolsFile.Prompts,
|
||||
Version: versionString,
|
||||
SourceConfigs: toolsFile.Sources,
|
||||
AuthServiceConfigs: toolsFile.AuthServices,
|
||||
EmbeddingModelConfigs: toolsFile.EmbeddingModels,
|
||||
ToolConfigs: toolsFile.Tools,
|
||||
ToolsetConfigs: toolsFile.Toolsets,
|
||||
PromptConfigs: toolsFile.Prompts,
|
||||
}
|
||||
|
||||
sourcesMap, authServicesMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, reloadedConfig)
|
||||
sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, reloadedConfig)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to initialize reloaded configs: %w", err)
|
||||
logger.WarnContext(ctx, errMsg.Error())
|
||||
return nil, nil, nil, nil, nil, nil, err
|
||||
return nil, nil, nil, nil, nil, nil, nil, err
|
||||
}
|
||||
|
||||
return sourcesMap, authServicesMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, nil
|
||||
return sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, nil
|
||||
}
|
||||
|
||||
// watchChanges checks for changes in the provided yaml tools file(s) or folder.
|
||||
@@ -930,24 +1043,11 @@ func run(cmd *Command) error {
|
||||
|
||||
cmd.cfg.SourceConfigs = finalToolsFile.Sources
|
||||
cmd.cfg.AuthServiceConfigs = finalToolsFile.AuthServices
|
||||
cmd.cfg.EmbeddingModelConfigs = finalToolsFile.EmbeddingModels
|
||||
cmd.cfg.ToolConfigs = finalToolsFile.Tools
|
||||
cmd.cfg.ToolsetConfigs = finalToolsFile.Toolsets
|
||||
cmd.cfg.PromptConfigs = finalToolsFile.Prompts
|
||||
|
||||
authSourceConfigs := finalToolsFile.AuthSources
|
||||
if authSourceConfigs != nil {
|
||||
cmd.logger.WarnContext(ctx, "`authSources` is deprecated, use `authServices` instead")
|
||||
|
||||
for k, v := range authSourceConfigs {
|
||||
if _, exists := cmd.cfg.AuthServiceConfigs[k]; exists {
|
||||
errMsg := fmt.Errorf("resource conflict detected: authSource '%s' has the same name as an existing authService. Please rename your authSource", k)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
cmd.cfg.AuthServiceConfigs[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
instrumentation, err := telemetry.CreateTelemetryInstrumentation(versionString)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to create telemetry instrumentation: %w", err)
|
||||
|
||||
998
cmd/root_test.go
998
cmd/root_test.go
File diff suppressed because it is too large
Load Diff
0
cmd/test.db
Normal file
0
cmd/test.db
Normal file
@@ -1 +1 @@
|
||||
0.24.0
|
||||
0.25.0
|
||||
|
||||
@@ -68,6 +68,7 @@ The BigQuery MCP server is configured using environment variables.
|
||||
export BIGQUERY_PROJECT="<your-gcp-project-id>"
|
||||
export BIGQUERY_LOCATION="<your-dataset-location>" # Optional
|
||||
export BIGQUERY_USE_CLIENT_OAUTH="true" # Optional
|
||||
export BIGQUERY_SCOPES="<comma-separated-scopes>" # Optional
|
||||
```
|
||||
|
||||
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
|
||||
|
||||
@@ -45,7 +45,7 @@ most popular issues, so make sure to +1 ones you are the most interested in.
|
||||
## Can Toolbox be used for non-database tools?
|
||||
|
||||
**Yes!** While Toolbox is primarily focused on databases, it also supports generic
|
||||
**HTTP tools** (`kind: http`). These allow you to connect your agents to REST APIs
|
||||
**HTTP tools** (`type: http`). These allow you to connect your agents to REST APIs
|
||||
and other web services, enabling workflows that extend beyond database interactions.
|
||||
|
||||
For configuration details, see the [HTTP Tools documentation](../resources/tools/http/http.md).
|
||||
|
||||
18
docs/en/blogs/_index.md
Normal file
18
docs/en/blogs/_index.md
Normal file
@@ -0,0 +1,18 @@
|
||||
---
|
||||
title: "Featured Articles"
|
||||
weight: 3
|
||||
description: Toolbox Medium Blogs
|
||||
manualLink: "https://medium.com/@mcp_toolbox"
|
||||
manualLinkTarget: _blank
|
||||
---
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<title>Redirecting to Featured Articles</title>
|
||||
<link rel="canonical" href="https://medium.com/@mcp_toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://medium.com/@mcp_toolbox"/>
|
||||
</head>
|
||||
<body>
|
||||
<p>If you are not automatically redirected, please <a href="https://medium.com/@mcp_toolbox">follow this link to our articles</a>.</p>
|
||||
</body>
|
||||
</html>
|
||||
@@ -64,7 +64,7 @@ The structured logging outputs log as JSON:
|
||||
"timestamp":"2024-11-04T16:45:11.987299-08:00",
|
||||
"severity":"ERROR",
|
||||
"logging.googleapis.com/sourceLocation":{...},
|
||||
"message":"unable to parse tool file at \"tools.yaml\": \"cloud-sql-postgres1\" is not a valid kind of data source"
|
||||
"message":"unable to parse tool file at \"tools.yaml\": \"cloud-sql-postgres1\" is not a valid type of data source"
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -234,7 +234,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.24.0\" # x-release-please-version\n",
|
||||
"version = \"0.25.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
@@ -300,78 +300,89 @@
|
||||
"# You can also upload a tools file and use that to run toolbox.\n",
|
||||
"tools_file_name = \"tools.yml\"\n",
|
||||
"file_content = f\"\"\"\n",
|
||||
"sources:\n",
|
||||
" my-pg-source:\n",
|
||||
" kind: postgres\n",
|
||||
" host: 127.0.0.1\n",
|
||||
" port: 5432\n",
|
||||
" database: toolbox_db\n",
|
||||
" user: toolbox_user\n",
|
||||
" password: my-password\n",
|
||||
"kind: sources\n",
|
||||
"name: my-pg-source\n",
|
||||
"type: postgres\n",
|
||||
"host: 127.0.0.1\n",
|
||||
"port: 5432\n",
|
||||
"database: toolbox_db\n",
|
||||
"user: toolbox_user\n",
|
||||
"password: my-password\n",
|
||||
"---\n",
|
||||
"kind: tools\n",
|
||||
"name: search-hotels-by-name\n",
|
||||
"type: postgres-sql\n",
|
||||
"source: my-pg-source\n",
|
||||
"description: Search for hotels based on name.\n",
|
||||
"parameters:\n",
|
||||
" - name: name\n",
|
||||
" type: string\n",
|
||||
" description: The name of the hotel.\n",
|
||||
"statement: SELECT * FROM hotels WHERE name ILIKE '%' || \\$1 || '%';\n",
|
||||
"---\n",
|
||||
"kind: tools\n",
|
||||
"name: search-hotels-by-location\n",
|
||||
"type: postgres-sql\n",
|
||||
"source: my-pg-source\n",
|
||||
"description: Search for hotels based on location.\n",
|
||||
"parameters:\n",
|
||||
" - name: location\n",
|
||||
" type: string\n",
|
||||
" description: The location of the hotel.\n",
|
||||
"statement: SELECT * FROM hotels WHERE location ILIKE '%' || \\$1 || '%';\n",
|
||||
"---\n",
|
||||
"kind: tools\n",
|
||||
"name: book-hotel\n",
|
||||
"type: postgres-sql\n",
|
||||
"source: my-pg-source\n",
|
||||
"description: >-\n",
|
||||
" Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.\n",
|
||||
"parameters:\n",
|
||||
" - name: hotel_id\n",
|
||||
" type: string\n",
|
||||
" description: The ID of the hotel to book.\n",
|
||||
"statement: UPDATE hotels SET booked = B'1' WHERE id = \\$1;\n",
|
||||
"---\n",
|
||||
"kind: tools\n",
|
||||
"name: update-hotel\n",
|
||||
"type: postgres-sql\n",
|
||||
"source: my-pg-source\n",
|
||||
"description: >-\n",
|
||||
" Update a hotel's check-in and check-out dates by its ID. Returns a message\n",
|
||||
" indicating whether the hotel was successfully updated or not.\n",
|
||||
"parameters:\n",
|
||||
" - name: hotel_id\n",
|
||||
" type: string\n",
|
||||
" description: The ID of the hotel to update.\n",
|
||||
" - name: checkin_date\n",
|
||||
" type: string\n",
|
||||
" description: The new check-in date of the hotel.\n",
|
||||
" - name: checkout_date\n",
|
||||
" type: string\n",
|
||||
" description: The new check-out date of the hotel.\n",
|
||||
"statement: >-\n",
|
||||
" UPDATE hotels SET checkin_date = CAST(\\$2 as date), checkout_date = CAST(\\$3\n",
|
||||
" as date) WHERE id = \\$1;\n",
|
||||
"---\n",
|
||||
"kind: tools\n",
|
||||
"name: cancel-hotel\n",
|
||||
"type: postgres-sql\n",
|
||||
"source: my-pg-source\n",
|
||||
"description: Cancel a hotel by its ID.\n",
|
||||
"parameters:\n",
|
||||
" - name: hotel_id\n",
|
||||
" type: string\n",
|
||||
" description: The ID of the hotel to cancel.\n",
|
||||
"statement: UPDATE hotels SET booked = B'0' WHERE id = \\$1;\n",
|
||||
"---\n",
|
||||
"kind: toolsets\n",
|
||||
"name: my-toolset\n",
|
||||
"tools:\n",
|
||||
" search-hotels-by-name:\n",
|
||||
" kind: postgres-sql\n",
|
||||
" source: my-pg-source\n",
|
||||
" description: Search for hotels based on name.\n",
|
||||
" parameters:\n",
|
||||
" - name: name\n",
|
||||
" type: string\n",
|
||||
" description: The name of the hotel.\n",
|
||||
" statement: SELECT * FROM hotels WHERE name ILIKE '%' || \\$1 || '%';\n",
|
||||
" search-hotels-by-location:\n",
|
||||
" kind: postgres-sql\n",
|
||||
" source: my-pg-source\n",
|
||||
" description: Search for hotels based on location.\n",
|
||||
" parameters:\n",
|
||||
" - name: location\n",
|
||||
" type: string\n",
|
||||
" description: The location of the hotel.\n",
|
||||
" statement: SELECT * FROM hotels WHERE location ILIKE '%' || \\$1 || '%';\n",
|
||||
" book-hotel:\n",
|
||||
" kind: postgres-sql\n",
|
||||
" source: my-pg-source\n",
|
||||
" description: >-\n",
|
||||
" Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.\n",
|
||||
" parameters:\n",
|
||||
" - name: hotel_id\n",
|
||||
" type: string\n",
|
||||
" description: The ID of the hotel to book.\n",
|
||||
" statement: UPDATE hotels SET booked = B'1' WHERE id = \\$1;\n",
|
||||
" update-hotel:\n",
|
||||
" kind: postgres-sql\n",
|
||||
" source: my-pg-source\n",
|
||||
" description: >-\n",
|
||||
" Update a hotel's check-in and check-out dates by its ID. Returns a message\n",
|
||||
" indicating whether the hotel was successfully updated or not.\n",
|
||||
" parameters:\n",
|
||||
" - name: hotel_id\n",
|
||||
" type: string\n",
|
||||
" description: The ID of the hotel to update.\n",
|
||||
" - name: checkin_date\n",
|
||||
" type: string\n",
|
||||
" description: The new check-in date of the hotel.\n",
|
||||
" - name: checkout_date\n",
|
||||
" type: string\n",
|
||||
" description: The new check-out date of the hotel.\n",
|
||||
" statement: >-\n",
|
||||
" UPDATE hotels SET checkin_date = CAST(\\$2 as date), checkout_date = CAST(\\$3\n",
|
||||
" as date) WHERE id = \\$1;\n",
|
||||
" cancel-hotel:\n",
|
||||
" kind: postgres-sql\n",
|
||||
" source: my-pg-source\n",
|
||||
" description: Cancel a hotel by its ID.\n",
|
||||
" parameters:\n",
|
||||
" - name: hotel_id\n",
|
||||
" type: string\n",
|
||||
" description: The ID of the hotel to cancel.\n",
|
||||
" statement: UPDATE hotels SET booked = B'0' WHERE id = \\$1;\n",
|
||||
"toolsets:\n",
|
||||
" my-toolset:\n",
|
||||
" - search-hotels-by-name\n",
|
||||
" - search-hotels-by-location\n",
|
||||
" - book-hotel\n",
|
||||
" - update-hotel\n",
|
||||
" - cancel-hotel\n",
|
||||
" - search-hotels-by-name\n",
|
||||
" - search-hotels-by-location\n",
|
||||
" - book-hotel\n",
|
||||
" - update-hotel\n",
|
||||
" - cancel-hotel\n",
|
||||
"\"\"\""
|
||||
]
|
||||
},
|
||||
|
||||
@@ -36,14 +36,14 @@ Toolbox should have access to. Most tools will have at least one source to
|
||||
execute against.
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-pg-source:
|
||||
kind: postgres
|
||||
host: 127.0.0.1
|
||||
port: 5432
|
||||
database: toolbox_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
kind: sources
|
||||
name: my-pg-source
|
||||
type: postgres
|
||||
host: 127.0.0.1
|
||||
port: 5432
|
||||
database: toolbox_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
```
|
||||
|
||||
For more details on configuring different types of sources, see the
|
||||
@@ -52,20 +52,20 @@ For more details on configuring different types of sources, see the
|
||||
### Tools
|
||||
|
||||
The `tools` section of your `tools.yaml` defines the actions your agent can
|
||||
take: what kind of tool it is, which source(s) it affects, what parameters it
|
||||
take: what type of tool it is, which source(s) it affects, what parameters it
|
||||
uses, etc.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search-hotels-by-name:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on name.
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: The name of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
|
||||
kind: tools
|
||||
name: search-hotels-by-name
|
||||
type: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on name.
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: The name of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
|
||||
```
|
||||
|
||||
For more details on configuring different types of tools, see the
|
||||
@@ -78,13 +78,17 @@ that you want to be able to load together. This can be useful for defining
|
||||
different sets for different agents or different applications.
|
||||
|
||||
```yaml
|
||||
toolsets:
|
||||
my_first_toolset:
|
||||
- my_first_tool
|
||||
- my_second_tool
|
||||
my_second_toolset:
|
||||
- my_second_tool
|
||||
- my_third_tool
|
||||
kind: toolsets
|
||||
name: my_first_toolset
|
||||
tools:
|
||||
- my_first_tool
|
||||
- my_second_tool
|
||||
---
|
||||
kind: toolsets
|
||||
name: my_second_toolset
|
||||
tools:
|
||||
- my_second_tool
|
||||
- my_third_tool
|
||||
```
|
||||
|
||||
You can load toolsets by name:
|
||||
@@ -103,14 +107,14 @@ The `prompts` section of your `tools.yaml` defines the templates containing
|
||||
structured messages and instructions for interacting with language models.
|
||||
|
||||
```yaml
|
||||
prompts:
|
||||
code_review:
|
||||
description: "Asks the LLM to analyze code quality and suggest improvements."
|
||||
messages:
|
||||
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
||||
arguments:
|
||||
- name: "code"
|
||||
description: "The code to review"
|
||||
kind: prompts
|
||||
name: code_review
|
||||
description: "Asks the LLM to analyze code quality and suggest improvements."
|
||||
messages:
|
||||
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
||||
arguments:
|
||||
- name: "code"
|
||||
description: "The code to review"
|
||||
```
|
||||
|
||||
For more details on configuring different types of prompts, see the
|
||||
|
||||
@@ -103,7 +103,7 @@ To install Toolbox as a binary on Linux (AMD64):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -114,7 +114,7 @@ To install Toolbox as a binary on macOS (Apple Silicon):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -125,7 +125,7 @@ To install Toolbox as a binary on macOS (Intel):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -136,7 +136,7 @@ To install Toolbox as a binary on Windows (Command Prompt):
|
||||
|
||||
```cmd
|
||||
:: see releases page for other versions
|
||||
set VERSION=0.24.0
|
||||
set VERSION=0.25.0
|
||||
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
```
|
||||
|
||||
@@ -146,7 +146,7 @@ To install Toolbox as a binary on Windows (PowerShell):
|
||||
|
||||
```powershell
|
||||
# see releases page for other versions
|
||||
$VERSION = "0.24.0"
|
||||
$VERSION = "0.25.0"
|
||||
curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||
```
|
||||
|
||||
@@ -158,7 +158,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.24.0
|
||||
export VERSION=0.25.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -177,7 +177,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.24.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.25.0
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -125,78 +125,89 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
{{< /notice >}}
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-pg-source:
|
||||
kind: postgres
|
||||
host: 127.0.0.1
|
||||
port: 5432
|
||||
database: toolbox_db
|
||||
user: toolbox_user
|
||||
password: my-password
|
||||
kind: sources
|
||||
name: my-pg-source
|
||||
type: postgres
|
||||
host: 127.0.0.1
|
||||
port: 5432
|
||||
database: toolbox_db
|
||||
user: toolbox_user
|
||||
password: my-password
|
||||
---
|
||||
kind: tools
|
||||
name: search-hotels-by-name
|
||||
type: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on name.
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: The name of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
|
||||
---
|
||||
kind: tools
|
||||
name: search-hotels-by-location
|
||||
type: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on location.
|
||||
parameters:
|
||||
- name: location
|
||||
type: string
|
||||
description: The location of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%';
|
||||
---
|
||||
kind: tools
|
||||
name: book-hotel
|
||||
type: postgres-sql
|
||||
source: my-pg-source
|
||||
description: >-
|
||||
Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to book.
|
||||
statement: UPDATE hotels SET booked = B'1' WHERE id = $1;
|
||||
---
|
||||
kind: tools
|
||||
name: update-hotel
|
||||
type: postgres-sql
|
||||
source: my-pg-source
|
||||
description: >-
|
||||
Update a hotel's check-in and check-out dates by its ID. Returns a message
|
||||
indicating whether the hotel was successfully updated or not.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to update.
|
||||
- name: checkin_date
|
||||
type: string
|
||||
description: The new check-in date of the hotel.
|
||||
- name: checkout_date
|
||||
type: string
|
||||
description: The new check-out date of the hotel.
|
||||
statement: >-
|
||||
UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3
|
||||
as date) WHERE id = $1;
|
||||
---
|
||||
kind: tools
|
||||
name: cancel-hotel
|
||||
type: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Cancel a hotel by its ID.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to cancel.
|
||||
statement: UPDATE hotels SET booked = B'0' WHERE id = $1;
|
||||
---
|
||||
kind: toolsets
|
||||
name: my-toolset
|
||||
tools:
|
||||
search-hotels-by-name:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on name.
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: The name of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
|
||||
search-hotels-by-location:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on location.
|
||||
parameters:
|
||||
- name: location
|
||||
type: string
|
||||
description: The location of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%';
|
||||
book-hotel:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: >-
|
||||
Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to book.
|
||||
statement: UPDATE hotels SET booked = B'1' WHERE id = $1;
|
||||
update-hotel:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: >-
|
||||
Update a hotel's check-in and check-out dates by its ID. Returns a message
|
||||
indicating whether the hotel was successfully updated or not.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to update.
|
||||
- name: checkin_date
|
||||
type: string
|
||||
description: The new check-in date of the hotel.
|
||||
- name: checkout_date
|
||||
type: string
|
||||
description: The new check-out date of the hotel.
|
||||
statement: >-
|
||||
UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3
|
||||
as date) WHERE id = $1;
|
||||
cancel-hotel:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Cancel a hotel by its ID.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to cancel.
|
||||
statement: UPDATE hotels SET booked = B'0' WHERE id = $1;
|
||||
toolsets:
|
||||
my-toolset:
|
||||
- search-hotels-by-name
|
||||
- search-hotels-by-location
|
||||
- book-hotel
|
||||
- update-hotel
|
||||
- cancel-hotel
|
||||
- search-hotels-by-name
|
||||
- search-hotels-by-location
|
||||
- book-hotel
|
||||
- update-hotel
|
||||
- cancel-hotel
|
||||
```
|
||||
|
||||
For more info on tools, check out the
|
||||
|
||||
@@ -157,61 +157,67 @@ Create a file named `tools.yaml`. This file defines the database connection, the
|
||||
SQL tools available, and the prompts the agents will use.
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-foodiefind-db:
|
||||
kind: postgres
|
||||
host: 127.0.0.1
|
||||
port: 5432
|
||||
database: toolbox_db
|
||||
user: toolbox_user
|
||||
password: my-password
|
||||
tools:
|
||||
find_user_by_email:
|
||||
kind: postgres-sql
|
||||
source: my-foodiefind-db
|
||||
description: Find a user's ID by their email address.
|
||||
parameters:
|
||||
- name: email
|
||||
type: string
|
||||
description: The email address of the user to find.
|
||||
statement: SELECT id FROM users WHERE email = $1;
|
||||
find_restaurant_by_name:
|
||||
kind: postgres-sql
|
||||
source: my-foodiefind-db
|
||||
description: Find a restaurant's ID by its exact name.
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: The name of the restaurant to find.
|
||||
statement: SELECT id FROM restaurants WHERE name = $1;
|
||||
find_review_by_user_and_restaurant:
|
||||
kind: postgres-sql
|
||||
source: my-foodiefind-db
|
||||
description: Find the full record for a specific review using the user's ID and the restaurant's ID.
|
||||
parameters:
|
||||
- name: user_id
|
||||
type: integer
|
||||
description: The numerical ID of the user.
|
||||
- name: restaurant_id
|
||||
type: integer
|
||||
description: The numerical ID of the restaurant.
|
||||
statement: SELECT * FROM reviews WHERE user_id = $1 AND restaurant_id = $2;
|
||||
prompts:
|
||||
investigate_missing_review:
|
||||
description: "Investigates a user's missing review by finding the user, restaurant, and the review itself, then analyzing its status."
|
||||
arguments:
|
||||
- name: "user_email"
|
||||
description: "The email of the user who wrote the review."
|
||||
- name: "restaurant_name"
|
||||
description: "The name of the restaurant being reviewed."
|
||||
messages:
|
||||
- content: >-
|
||||
**Goal:** Find the review written by the user with email '{{.user_email}}' for the restaurant named '{{.restaurant_name}}' and understand its status.
|
||||
**Workflow:**
|
||||
1. Use the `find_user_by_email` tool with the email '{{.user_email}}' to get the `user_id`.
|
||||
2. Use the `find_restaurant_by_name` tool with the name '{{.restaurant_name}}' to get the `restaurant_id`.
|
||||
3. Use the `find_review_by_user_and_restaurant` tool with the `user_id` and `restaurant_id` you just found.
|
||||
4. Analyze the results from the final tool call. Examine the `is_published` and `moderation_status` fields and explain the review's status to the user in a clear, human-readable sentence.
|
||||
kind: sources
|
||||
name: my-foodiefind-db
|
||||
type: postgres
|
||||
host: 127.0.0.1
|
||||
port: 5432
|
||||
database: toolbox_db
|
||||
user: toolbox_user
|
||||
password: my-password
|
||||
---
|
||||
kind: tools
|
||||
name: find_user_by_email
|
||||
type: postgres-sql
|
||||
source: my-foodiefind-db
|
||||
description: Find a user's ID by their email address.
|
||||
parameters:
|
||||
- name: email
|
||||
type: string
|
||||
description: The email address of the user to find.
|
||||
statement: SELECT id FROM users WHERE email = $1;
|
||||
---
|
||||
kind: tools
|
||||
name: find_restaurant_by_name
|
||||
type: postgres-sql
|
||||
source: my-foodiefind-db
|
||||
description: Find a restaurant's ID by its exact name.
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: The name of the restaurant to find.
|
||||
statement: SELECT id FROM restaurants WHERE name = $1;
|
||||
---
|
||||
kind: tools
|
||||
name: find_review_by_user_and_restaurant
|
||||
type: postgres-sql
|
||||
source: my-foodiefind-db
|
||||
description: Find the full record for a specific review using the user's ID and the restaurant's ID.
|
||||
parameters:
|
||||
- name: user_id
|
||||
type: integer
|
||||
description: The numerical ID of the user.
|
||||
- name: restaurant_id
|
||||
type: integer
|
||||
description: The numerical ID of the restaurant.
|
||||
statement: SELECT * FROM reviews WHERE user_id = $1 AND restaurant_id = $2;
|
||||
---
|
||||
kind: prompts
|
||||
name: investigate_missing_review
|
||||
description: "Investigates a user's missing review by finding the user, restaurant, and the review itself, then analyzing its status."
|
||||
arguments:
|
||||
- name: "user_email"
|
||||
description: "The email of the user who wrote the review."
|
||||
- name: "restaurant_name"
|
||||
description: "The name of the restaurant being reviewed."
|
||||
messages:
|
||||
- content: >-
|
||||
**Goal:** Find the review written by the user with email '{{.user_email}}' for the restaurant named '{{.restaurant_name}}' and understand its status.
|
||||
**Workflow:**
|
||||
1. Use the `find_user_by_email` tool with the email '{{.user_email}}' to get the `user_id`.
|
||||
2. Use the `find_restaurant_by_name` tool with the name '{{.restaurant_name}}' to get the `restaurant_id`.
|
||||
3. Use the `find_review_by_user_and_restaurant` tool with the `user_id` and `restaurant_id` you just found.
|
||||
4. Analyze the results from the final tool call. Examine the `is_published` and `moderation_status` fields and explain the review's status to the user in a clear, human-readable sentence.
|
||||
```
|
||||
|
||||
## Step 3: Connect to Gemini CLI
|
||||
|
||||
@@ -28,11 +28,11 @@ require (
|
||||
go.opentelemetry.io/otel/metric v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.38.0 // indirect
|
||||
golang.org/x/crypto v0.43.0 // indirect
|
||||
golang.org/x/net v0.46.0 // indirect
|
||||
golang.org/x/crypto v0.45.0 // indirect
|
||||
golang.org/x/net v0.47.0 // indirect
|
||||
golang.org/x/oauth2 v0.32.0 // indirect
|
||||
golang.org/x/sys v0.37.0 // indirect
|
||||
golang.org/x/text v0.30.0 // indirect
|
||||
golang.org/x/sys v0.38.0 // indirect
|
||||
golang.org/x/text v0.31.0 // indirect
|
||||
google.golang.org/api v0.255.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251029180050-ab9386a59fda // indirect
|
||||
google.golang.org/grpc v1.76.0 // indirect
|
||||
|
||||
@@ -88,18 +88,18 @@ go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJr
|
||||
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=
|
||||
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
|
||||
golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=
|
||||
golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210=
|
||||
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
|
||||
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
|
||||
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
|
||||
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
|
||||
golang.org/x/oauth2 v0.32.0 h1:jsCblLleRMDrxMN29H3z/k1KliIvpLgCkE6R8FXXNgY=
|
||||
golang.org/x/oauth2 v0.32.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
||||
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
|
||||
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
|
||||
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
|
||||
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
|
||||
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
|
||||
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
||||
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
||||
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
|
||||
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
|
||||
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||
|
||||
@@ -39,11 +39,11 @@ require (
|
||||
go.opentelemetry.io/otel/metric v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.38.0 // indirect
|
||||
golang.org/x/crypto v0.43.0 // indirect
|
||||
golang.org/x/net v0.46.0 // indirect
|
||||
golang.org/x/crypto v0.45.0 // indirect
|
||||
golang.org/x/net v0.47.0 // indirect
|
||||
golang.org/x/oauth2 v0.32.0 // indirect
|
||||
golang.org/x/sys v0.37.0 // indirect
|
||||
golang.org/x/text v0.30.0 // indirect
|
||||
golang.org/x/sys v0.38.0 // indirect
|
||||
golang.org/x/text v0.31.0 // indirect
|
||||
google.golang.org/api v0.255.0 // indirect
|
||||
google.golang.org/genai v1.34.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251029180050-ab9386a59fda // indirect
|
||||
|
||||
@@ -123,18 +123,18 @@ go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJr
|
||||
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=
|
||||
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
|
||||
golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=
|
||||
golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210=
|
||||
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
|
||||
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
|
||||
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
|
||||
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
|
||||
golang.org/x/oauth2 v0.32.0 h1:jsCblLleRMDrxMN29H3z/k1KliIvpLgCkE6R8FXXNgY=
|
||||
golang.org/x/oauth2 v0.32.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
||||
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
|
||||
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
|
||||
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
|
||||
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
|
||||
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
|
||||
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
||||
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
||||
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
|
||||
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
|
||||
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||
|
||||
@@ -26,11 +26,11 @@ require (
|
||||
go.opentelemetry.io/otel v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.38.0 // indirect
|
||||
golang.org/x/crypto v0.43.0 // indirect
|
||||
golang.org/x/net v0.46.0 // indirect
|
||||
golang.org/x/crypto v0.45.0 // indirect
|
||||
golang.org/x/net v0.47.0 // indirect
|
||||
golang.org/x/oauth2 v0.32.0 // indirect
|
||||
golang.org/x/sys v0.37.0 // indirect
|
||||
golang.org/x/text v0.30.0 // indirect
|
||||
golang.org/x/sys v0.38.0 // indirect
|
||||
golang.org/x/text v0.31.0 // indirect
|
||||
google.golang.org/api v0.255.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251029180050-ab9386a59fda // indirect
|
||||
google.golang.org/grpc v1.76.0 // indirect
|
||||
|
||||
@@ -94,18 +94,18 @@ go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6
|
||||
go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA=
|
||||
go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE=
|
||||
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
|
||||
golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=
|
||||
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
|
||||
golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=
|
||||
golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210=
|
||||
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
|
||||
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
|
||||
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
|
||||
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
|
||||
golang.org/x/oauth2 v0.32.0 h1:jsCblLleRMDrxMN29H3z/k1KliIvpLgCkE6R8FXXNgY=
|
||||
golang.org/x/oauth2 v0.32.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
||||
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
|
||||
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
|
||||
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
|
||||
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
|
||||
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
|
||||
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
||||
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
||||
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
|
||||
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
|
||||
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
"resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-5.0.2.tgz",
|
||||
"integrity": "sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg==",
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"arrify": "^2.0.0",
|
||||
"extend": "^3.0.2"
|
||||
@@ -32,7 +31,6 @@
|
||||
"resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz",
|
||||
"integrity": "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==",
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=14.0.0"
|
||||
}
|
||||
@@ -42,7 +40,6 @@
|
||||
"resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.0.0.tgz",
|
||||
"integrity": "sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g==",
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
@@ -52,7 +49,6 @@
|
||||
"resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.18.0.tgz",
|
||||
"integrity": "sha512-r3ZwDMiz4nwW6R922Z1pwpePxyRwE5GdevYX63hRmAQUkUQJcBH/79EnQPDv5cOv1mFBgevdNWQfi3tie3dHrQ==",
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@google-cloud/paginator": "^5.0.0",
|
||||
"@google-cloud/projectify": "^4.0.0",
|
||||
@@ -79,7 +75,6 @@
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
|
||||
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"bin": {
|
||||
"uuid": "dist/bin/uuid"
|
||||
}
|
||||
@@ -102,6 +97,7 @@
|
||||
"resolved": "https://registry.npmjs.org/@google/genai/-/genai-1.14.0.tgz",
|
||||
"integrity": "sha512-jirYprAAJU1svjwSDVCzyVq+FrJpJd5CSxR/g2Ga/gZ0ZYZpcWjMS75KJl9y71K1mDN+tcx6s21CzCbB2R840g==",
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"google-auth-library": "^9.14.2",
|
||||
"ws": "^8.18.0"
|
||||
@@ -140,6 +136,7 @@
|
||||
"resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.17.5.tgz",
|
||||
"integrity": "sha512-QakrKIGniGuRVfWBdMsDea/dx1PNE739QJ7gCM41s9q+qaCYTHCdsIBXQVVXry3mfWAiaM9kT22Hyz53Uw8mfg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"ajv": "^6.12.6",
|
||||
"content-type": "^1.0.5",
|
||||
@@ -302,7 +299,6 @@
|
||||
"resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz",
|
||||
"integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
@@ -311,15 +307,13 @@
|
||||
"version": "0.12.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz",
|
||||
"integrity": "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==",
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "24.10.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.1.tgz",
|
||||
"integrity": "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"undici-types": "~7.16.0"
|
||||
}
|
||||
@@ -329,7 +323,6 @@
|
||||
"resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.13.tgz",
|
||||
"integrity": "sha512-FGJ6udDNUCjd19pp0Q3iTiDkwhYup7J8hpMW9c4k53NrccQFFWKRho6hvtPPEhnXWKvukfwAlB6DbDz4yhH5Gg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@types/caseless": "*",
|
||||
"@types/node": "*",
|
||||
@@ -342,7 +335,6 @@
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.5.tgz",
|
||||
"integrity": "sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
@@ -360,7 +352,6 @@
|
||||
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
||||
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
@@ -370,7 +361,6 @@
|
||||
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
||||
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"mime-db": "1.52.0"
|
||||
},
|
||||
@@ -382,15 +372,13 @@
|
||||
"version": "4.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz",
|
||||
"integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==",
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/abort-controller": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
|
||||
"integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"event-target-shim": "^5.0.0"
|
||||
},
|
||||
@@ -465,7 +453,6 @@
|
||||
"resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz",
|
||||
"integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
@@ -475,7 +462,6 @@
|
||||
"resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz",
|
||||
"integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"retry": "0.13.1"
|
||||
}
|
||||
@@ -768,7 +754,6 @@
|
||||
"resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz",
|
||||
"integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"end-of-stream": "^1.4.1",
|
||||
"inherits": "^2.0.3",
|
||||
@@ -817,7 +802,6 @@
|
||||
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz",
|
||||
"integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"once": "^1.4.0"
|
||||
}
|
||||
@@ -887,7 +871,6 @@
|
||||
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
|
||||
"integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
@@ -918,6 +901,7 @@
|
||||
"resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz",
|
||||
"integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"accepts": "^2.0.0",
|
||||
"body-parser": "^2.2.0",
|
||||
@@ -999,7 +983,6 @@
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"strnum": "^1.1.1"
|
||||
},
|
||||
@@ -1350,8 +1333,7 @@
|
||||
"url": "https://patreon.com/mdevils"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/http-errors": {
|
||||
"version": "2.0.0",
|
||||
@@ -1383,7 +1365,6 @@
|
||||
"resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz",
|
||||
"integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@tootallnate/once": "2",
|
||||
"agent-base": "6",
|
||||
@@ -1398,7 +1379,6 @@
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
|
||||
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"debug": "4"
|
||||
},
|
||||
@@ -1525,12 +1505,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/jws": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
||||
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
|
||||
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"jwa": "^2.0.0",
|
||||
"jwa": "^2.0.1",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
@@ -1575,7 +1555,6 @@
|
||||
"resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz",
|
||||
"integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"bin": {
|
||||
"mime": "cli.js"
|
||||
},
|
||||
@@ -1736,7 +1715,6 @@
|
||||
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
|
||||
"integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"yocto-queue": "^0.1.0"
|
||||
},
|
||||
@@ -1835,9 +1813,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/qs": {
|
||||
"version": "6.14.0",
|
||||
"resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz",
|
||||
"integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==",
|
||||
"version": "6.14.1",
|
||||
"resolved": "https://registry.npmjs.org/qs/-/qs-6.14.1.tgz",
|
||||
"integrity": "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==",
|
||||
"license": "BSD-3-Clause",
|
||||
"dependencies": {
|
||||
"side-channel": "^1.1.0"
|
||||
@@ -1878,7 +1856,6 @@
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
|
||||
"integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"inherits": "^2.0.3",
|
||||
"string_decoder": "^1.1.1",
|
||||
@@ -1893,7 +1870,6 @@
|
||||
"resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz",
|
||||
"integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">= 4"
|
||||
}
|
||||
@@ -1903,7 +1879,6 @@
|
||||
"resolved": "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz",
|
||||
"integrity": "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@types/request": "^2.48.8",
|
||||
"extend": "^3.0.2",
|
||||
@@ -2132,7 +2107,6 @@
|
||||
"resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz",
|
||||
"integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"stubs": "^3.0.0"
|
||||
}
|
||||
@@ -2141,15 +2115,13 @@
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz",
|
||||
"integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==",
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/string_decoder": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
|
||||
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"safe-buffer": "~5.2.0"
|
||||
}
|
||||
@@ -2260,22 +2232,19 @@
|
||||
"url": "https://github.com/sponsors/NaturalIntelligence"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/stubs": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz",
|
||||
"integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==",
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/teeny-request": {
|
||||
"version": "9.0.0",
|
||||
"resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz",
|
||||
"integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==",
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"http-proxy-agent": "^5.0.0",
|
||||
"https-proxy-agent": "^5.0.0",
|
||||
@@ -2292,7 +2261,6 @@
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
|
||||
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"debug": "4"
|
||||
},
|
||||
@@ -2305,7 +2273,6 @@
|
||||
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz",
|
||||
"integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"agent-base": "6",
|
||||
"debug": "4"
|
||||
@@ -2347,8 +2314,7 @@
|
||||
"version": "7.16.0",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz",
|
||||
"integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==",
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/unpipe": {
|
||||
"version": "1.0.0",
|
||||
@@ -2372,8 +2338,7 @@
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
|
||||
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/uuid": {
|
||||
"version": "9.0.1",
|
||||
@@ -2560,7 +2525,6 @@
|
||||
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
|
||||
"integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
@@ -2573,6 +2537,7 @@
|
||||
"resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
|
||||
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/colinhacks"
|
||||
}
|
||||
|
||||
@@ -569,11 +569,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/jws": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
||||
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
|
||||
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"jwa": "^2.0.0",
|
||||
"jwa": "^2.0.1",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -3376,22 +3376,23 @@
|
||||
}
|
||||
},
|
||||
"node_modules/body-parser": {
|
||||
"version": "1.20.3",
|
||||
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz",
|
||||
"integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==",
|
||||
"version": "1.20.4",
|
||||
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.4.tgz",
|
||||
"integrity": "sha512-ZTgYYLMOXY9qKU/57FAo8F+HA2dGX7bqGc71txDRC1rS4frdFI5R7NhluHxH6M0YItAP0sHB4uqAOcYKxO6uGA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"bytes": "3.1.2",
|
||||
"bytes": "~3.1.2",
|
||||
"content-type": "~1.0.5",
|
||||
"debug": "2.6.9",
|
||||
"depd": "2.0.0",
|
||||
"destroy": "1.2.0",
|
||||
"http-errors": "2.0.0",
|
||||
"iconv-lite": "0.4.24",
|
||||
"on-finished": "2.4.1",
|
||||
"qs": "6.13.0",
|
||||
"raw-body": "2.5.2",
|
||||
"destroy": "~1.2.0",
|
||||
"http-errors": "~2.0.1",
|
||||
"iconv-lite": "~0.4.24",
|
||||
"on-finished": "~2.4.1",
|
||||
"qs": "~6.14.0",
|
||||
"raw-body": "~2.5.3",
|
||||
"type-is": "~1.6.18",
|
||||
"unpipe": "1.0.0"
|
||||
"unpipe": "~1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8",
|
||||
@@ -3406,11 +3407,40 @@
|
||||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/body-parser/node_modules/http-errors": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz",
|
||||
"integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"depd": "~2.0.0",
|
||||
"inherits": "~2.0.4",
|
||||
"setprototypeof": "~1.2.0",
|
||||
"statuses": "~2.0.2",
|
||||
"toidentifier": "~1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/express"
|
||||
}
|
||||
},
|
||||
"node_modules/body-parser/node_modules/ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
|
||||
},
|
||||
"node_modules/body-parser/node_modules/statuses": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz",
|
||||
"integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/buffer-equal-constant-time": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
|
||||
@@ -3434,6 +3464,7 @@
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
|
||||
"integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
@@ -3830,38 +3861,39 @@
|
||||
}
|
||||
},
|
||||
"node_modules/express": {
|
||||
"version": "4.21.2",
|
||||
"resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz",
|
||||
"integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==",
|
||||
"version": "4.22.1",
|
||||
"resolved": "https://registry.npmjs.org/express/-/express-4.22.1.tgz",
|
||||
"integrity": "sha512-F2X8g9P1X7uCPZMA3MVf9wcTqlyNp7IhH5qPCI0izhaOIYXaW9L535tGA3qmjRzpH+bZczqq7hVKxTR4NWnu+g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"accepts": "~1.3.8",
|
||||
"array-flatten": "1.1.1",
|
||||
"body-parser": "1.20.3",
|
||||
"content-disposition": "0.5.4",
|
||||
"body-parser": "~1.20.3",
|
||||
"content-disposition": "~0.5.4",
|
||||
"content-type": "~1.0.4",
|
||||
"cookie": "0.7.1",
|
||||
"cookie-signature": "1.0.6",
|
||||
"cookie": "~0.7.1",
|
||||
"cookie-signature": "~1.0.6",
|
||||
"debug": "2.6.9",
|
||||
"depd": "2.0.0",
|
||||
"encodeurl": "~2.0.0",
|
||||
"escape-html": "~1.0.3",
|
||||
"etag": "~1.8.1",
|
||||
"finalhandler": "1.3.1",
|
||||
"fresh": "0.5.2",
|
||||
"http-errors": "2.0.0",
|
||||
"finalhandler": "~1.3.1",
|
||||
"fresh": "~0.5.2",
|
||||
"http-errors": "~2.0.0",
|
||||
"merge-descriptors": "1.0.3",
|
||||
"methods": "~1.1.2",
|
||||
"on-finished": "2.4.1",
|
||||
"on-finished": "~2.4.1",
|
||||
"parseurl": "~1.3.3",
|
||||
"path-to-regexp": "0.1.12",
|
||||
"path-to-regexp": "~0.1.12",
|
||||
"proxy-addr": "~2.0.7",
|
||||
"qs": "6.13.0",
|
||||
"qs": "~6.14.0",
|
||||
"range-parser": "~1.2.1",
|
||||
"safe-buffer": "5.2.1",
|
||||
"send": "0.19.0",
|
||||
"serve-static": "1.16.2",
|
||||
"send": "~0.19.0",
|
||||
"serve-static": "~1.16.2",
|
||||
"setprototypeof": "1.2.0",
|
||||
"statuses": "2.0.1",
|
||||
"statuses": "~2.0.1",
|
||||
"type-is": "~1.6.18",
|
||||
"utils-merge": "1.0.1",
|
||||
"vary": "~1.1.2"
|
||||
@@ -4904,6 +4936,7 @@
|
||||
"version": "0.4.24",
|
||||
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
|
||||
"integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"safer-buffer": ">= 2.1.2 < 3"
|
||||
},
|
||||
@@ -5661,11 +5694,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/qs": {
|
||||
"version": "6.13.0",
|
||||
"resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz",
|
||||
"integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==",
|
||||
"version": "6.14.1",
|
||||
"resolved": "https://registry.npmjs.org/qs/-/qs-6.14.1.tgz",
|
||||
"integrity": "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==",
|
||||
"license": "BSD-3-Clause",
|
||||
"dependencies": {
|
||||
"side-channel": "^1.0.6"
|
||||
"side-channel": "^1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.6"
|
||||
@@ -5683,19 +5717,49 @@
|
||||
}
|
||||
},
|
||||
"node_modules/raw-body": {
|
||||
"version": "2.5.2",
|
||||
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz",
|
||||
"integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==",
|
||||
"version": "2.5.3",
|
||||
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.3.tgz",
|
||||
"integrity": "sha512-s4VSOf6yN0rvbRZGxs8Om5CWj6seneMwK3oDb4lWDH0UPhWcxwOWw5+qk24bxq87szX1ydrwylIOp2uG1ojUpA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"bytes": "3.1.2",
|
||||
"http-errors": "2.0.0",
|
||||
"iconv-lite": "0.4.24",
|
||||
"unpipe": "1.0.0"
|
||||
"bytes": "~3.1.2",
|
||||
"http-errors": "~2.0.1",
|
||||
"iconv-lite": "~0.4.24",
|
||||
"unpipe": "~1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/raw-body/node_modules/http-errors": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz",
|
||||
"integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"depd": "~2.0.0",
|
||||
"inherits": "~2.0.4",
|
||||
"setprototypeof": "~1.2.0",
|
||||
"statuses": "~2.0.2",
|
||||
"toidentifier": "~1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/express"
|
||||
}
|
||||
},
|
||||
"node_modules/raw-body/node_modules/statuses": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz",
|
||||
"integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/readable-stream": {
|
||||
"version": "3.6.2",
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
|
||||
@@ -5813,7 +5877,8 @@
|
||||
"node_modules/safer-buffer": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
|
||||
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
|
||||
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/semver": {
|
||||
"version": "7.7.2",
|
||||
|
||||
@@ -45,9 +45,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@langchain/core": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@langchain/core/-/core-1.1.0.tgz",
|
||||
"integrity": "sha512-yJ6JHcU9psjnQbzRFkXjIdNTA+3074dA+2pHdH8ewvQCSleSk6JcjkCMIb5+NASjeMoi1ZuntlLKVsNqF38YxA==",
|
||||
"version": "1.1.8",
|
||||
"resolved": "https://registry.npmjs.org/@langchain/core/-/core-1.1.8.tgz",
|
||||
"integrity": "sha512-kIUidOgc0ZdyXo4Ahn9Zas+OayqOfk4ZoKPi7XaDipNSWSApc2+QK5BVcjvwtzxstsNOrmXJiJWEN6WPF/MvAw==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
@@ -56,10 +56,9 @@
|
||||
"camelcase": "6",
|
||||
"decamelize": "1.2.0",
|
||||
"js-tiktoken": "^1.0.12",
|
||||
"langsmith": "^0.3.64",
|
||||
"langsmith": ">=0.4.0 <1.0.0",
|
||||
"mustache": "^4.2.0",
|
||||
"p-queue": "^6.6.2",
|
||||
"p-retry": "^7.0.0",
|
||||
"uuid": "^10.0.0",
|
||||
"zod": "^3.25.76 || ^4"
|
||||
},
|
||||
@@ -67,25 +66,10 @@
|
||||
"node": ">=20"
|
||||
}
|
||||
},
|
||||
"node_modules/@langchain/core/node_modules/p-retry": {
|
||||
"version": "7.1.0",
|
||||
"resolved": "https://registry.npmjs.org/p-retry/-/p-retry-7.1.0.tgz",
|
||||
"integrity": "sha512-xL4PiFRQa/f9L9ZvR4/gUCRNus4N8YX80ku8kv9Jqz+ZokkiZLM0bcvX0gm1F3PDi9SPRsww1BDsTWgE6Y1GLQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"is-network-error": "^1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/@langchain/google-genai": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@langchain/google-genai/-/google-genai-2.0.0.tgz",
|
||||
"integrity": "sha512-PaAWkogQdF+Y2bhhXWXUrC2nO7sTgWLtobBbZl/0V8Aa1F/KG2wrMECie3S17bAdFu/6VmQOuFFrlgSMwQC5KA==",
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/@langchain/google-genai/-/google-genai-2.1.3.tgz",
|
||||
"integrity": "sha512-ZdlFK/N10GyU6ATzkM01Sk1rlHBoy36Q/MawGD1SyXdD2lQxZxuQZjFWewj6uzWQ2Nnjj70EvU/kmmHVPn6sfQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@google/generative-ai": "^0.24.0",
|
||||
@@ -95,7 +79,7 @@
|
||||
"node": ">=20"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@langchain/core": "1.1.0"
|
||||
"@langchain/core": "1.1.8"
|
||||
}
|
||||
},
|
||||
"node_modules/@langchain/google-genai/node_modules/uuid": {
|
||||
@@ -814,18 +798,6 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/is-network-error": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/is-network-error/-/is-network-error-1.3.0.tgz",
|
||||
"integrity": "sha512-6oIwpsgRfnDiyEDLMay/GqCl3HoAtH5+RUKW29gYkL0QA+ipzpDLA16yQs7/RHCSu+BwgbJaOUqa4A99qNVQVw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/isexe": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
|
||||
@@ -882,13 +854,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/langchain": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/langchain/-/langchain-1.0.2.tgz",
|
||||
"integrity": "sha512-He/xvjVl8DHESvdaW6Dpyba72OaLCAfS2CyOm1aWrlJ4C38dKXyTIxphtld8hiii6MWX7qMSmu2EaUwWBx2STg==",
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/langchain/-/langchain-1.2.3.tgz",
|
||||
"integrity": "sha512-3k986xJuqg4az53JxV5LnGlOzIXF1d9Kq6Y9s7XjitvzhpsbFuTDV5/kiF4cx3pkNGyw0mUXC4tLz9RxucO0hw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@langchain/langgraph": "^1.0.0",
|
||||
"@langchain/langgraph-checkpoint": "^1.0.0",
|
||||
"langsmith": "~0.3.74",
|
||||
"langsmith": ">=0.4.0 <1.0.0",
|
||||
"uuid": "^10.0.0",
|
||||
"zod": "^3.25.76 || ^4"
|
||||
},
|
||||
@@ -896,19 +869,19 @@
|
||||
"node": ">=20"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@langchain/core": "^1.0.0"
|
||||
"@langchain/core": "1.1.8"
|
||||
}
|
||||
},
|
||||
"node_modules/langsmith": {
|
||||
"version": "0.3.77",
|
||||
"resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.3.77.tgz",
|
||||
"integrity": "sha512-wbS/9IX/hOAsOEOtPj8kCS8H0tFHaelwQ97gTONRtIfoPPLd9MMUmhk0KQB5DdsGAI5abg966+f0dZ/B+YRRzg==",
|
||||
"version": "0.4.3",
|
||||
"resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.4.3.tgz",
|
||||
"integrity": "sha512-vuBAagBZulXj0rpZhUTxmHhrYIBk53z8e2Q8ty4OHVkahN4ul7Im3OZxD9jsXZB0EuncK1xRYtY8J3BW4vj1zw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/uuid": "^10.0.0",
|
||||
"chalk": "^4.1.2",
|
||||
"console-table-printer": "^2.12.1",
|
||||
"p-queue": "^6.6.2",
|
||||
"p-retry": "4",
|
||||
"semver": "^7.6.3",
|
||||
"uuid": "^10.0.0"
|
||||
},
|
||||
|
||||
@@ -882,11 +882,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/jws": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
||||
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
|
||||
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"jwa": "^2.0.0",
|
||||
"jwa": "^2.0.1",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
google-adk==1.19.0
|
||||
toolbox-core==0.5.3
|
||||
pytest==9.0.1
|
||||
google-adk==1.21.0
|
||||
toolbox-core==0.5.4
|
||||
pytest==9.0.2
|
||||
@@ -1,3 +1,3 @@
|
||||
google-genai==1.52.0
|
||||
toolbox-core==0.5.3
|
||||
pytest==9.0.1
|
||||
google-genai==1.57.0
|
||||
toolbox-core==0.5.4
|
||||
pytest==9.0.2
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
langchain==1.1.0
|
||||
langchain-google-vertexai==3.1.0
|
||||
langgraph==1.0.4
|
||||
toolbox-langchain==0.5.3
|
||||
pytest==9.0.1
|
||||
langchain==1.2.2
|
||||
langchain-google-vertexai==3.2.1
|
||||
langgraph==1.0.5
|
||||
toolbox-langchain==0.5.4
|
||||
pytest==9.0.2
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
llama-index==0.14.10
|
||||
llama-index-llms-google-genai==0.7.3
|
||||
toolbox-llamaindex==0.5.3
|
||||
pytest==9.0.1
|
||||
llama-index==0.14.12
|
||||
llama-index-llms-google-genai==0.8.3
|
||||
toolbox-llamaindex==0.5.4
|
||||
pytest==9.0.2
|
||||
|
||||
@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -33,78 +33,89 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
{{< /notice >}}
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-pg-source:
|
||||
kind: postgres
|
||||
host: 127.0.0.1
|
||||
port: 5432
|
||||
database: toolbox_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
kind: sources
|
||||
name: my-pg-source
|
||||
type: postgres
|
||||
host: 127.0.0.1
|
||||
port: 5432
|
||||
database: toolbox_db
|
||||
user: toolbox_user
|
||||
password: my-password
|
||||
---
|
||||
kind: tools
|
||||
name: search-hotels-by-name
|
||||
type: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on name.
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: The name of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
|
||||
---
|
||||
kind: tools
|
||||
name: search-hotels-by-location
|
||||
type: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on location.
|
||||
parameters:
|
||||
- name: location
|
||||
type: string
|
||||
description: The location of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%';
|
||||
---
|
||||
kind: tools
|
||||
name: book-hotel
|
||||
type: postgres-sql
|
||||
source: my-pg-source
|
||||
description: >-
|
||||
Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to book.
|
||||
statement: UPDATE hotels SET booked = B'1' WHERE id = $1;
|
||||
---
|
||||
kind: tools
|
||||
name: update-hotel
|
||||
type: postgres-sql
|
||||
source: my-pg-source
|
||||
description: >-
|
||||
Update a hotel's check-in and check-out dates by its ID. Returns a message
|
||||
indicating whether the hotel was successfully updated or not.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to update.
|
||||
- name: checkin_date
|
||||
type: string
|
||||
description: The new check-in date of the hotel.
|
||||
- name: checkout_date
|
||||
type: string
|
||||
description: The new check-out date of the hotel.
|
||||
statement: >-
|
||||
UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3
|
||||
as date) WHERE id = $1;
|
||||
---
|
||||
kind: tools
|
||||
name: cancel-hotel
|
||||
type: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Cancel a hotel by its ID.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to cancel.
|
||||
statement: UPDATE hotels SET booked = B'0' WHERE id = $1;
|
||||
---
|
||||
kind: toolsets
|
||||
name: my-toolset
|
||||
tools:
|
||||
search-hotels-by-name:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on name.
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: The name of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
|
||||
search-hotels-by-location:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on location.
|
||||
parameters:
|
||||
- name: location
|
||||
type: string
|
||||
description: The location of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%';
|
||||
book-hotel:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: >-
|
||||
Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to book.
|
||||
statement: UPDATE hotels SET booked = B'1' WHERE id = $1;
|
||||
update-hotel:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: >-
|
||||
Update a hotel's check-in and check-out dates by its ID. Returns a message
|
||||
indicating whether the hotel was successfully updated or not.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to update.
|
||||
- name: checkin_date
|
||||
type: string
|
||||
description: The new check-in date of the hotel.
|
||||
- name: checkout_date
|
||||
type: string
|
||||
description: The new check-out date of the hotel.
|
||||
statement: >-
|
||||
UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3
|
||||
as date) WHERE id = $1;
|
||||
cancel-hotel:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Cancel a hotel by its ID.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to cancel.
|
||||
statement: UPDATE hotels SET booked = B'0' WHERE id = $1;
|
||||
toolsets:
|
||||
my-toolset:
|
||||
- search-hotels-by-name
|
||||
- search-hotels-by-location
|
||||
- book-hotel
|
||||
- update-hotel
|
||||
- cancel-hotel
|
||||
- search-hotels-by-name
|
||||
- search-hotels-by-location
|
||||
- book-hotel
|
||||
- update-hotel
|
||||
- cancel-hotel
|
||||
```
|
||||
|
||||
For more info on tools, check out the `Resources` section of the docs.
|
||||
|
||||
@@ -48,6 +48,7 @@ instance, database and users:
|
||||
* `roles/cloudsql.editor`: Provides permissions to manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* `roles/cloudsql.admin`: Provides full control over all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
@@ -299,6 +300,7 @@ instances and interacting with your database:
|
||||
* **create_user**: Creates a new user in a Cloud SQL instance.
|
||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for SQL Server instance.
|
||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
|
||||
@@ -48,6 +48,7 @@ database and users:
|
||||
* `roles/cloudsql.editor`: Provides permissions to manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* `roles/cloudsql.admin`: Provides full control over all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
@@ -299,6 +300,7 @@ instances and interacting with your database:
|
||||
* **create_user**: Creates a new user in a Cloud SQL instance.
|
||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for MySQL instance.
|
||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
|
||||
@@ -48,6 +48,7 @@ instance, database and users:
|
||||
* `roles/cloudsql.editor`: Provides permissions to manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* `roles/cloudsql.admin`: Provides full control over all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
@@ -299,6 +300,7 @@ instances and interacting with your database:
|
||||
* **create_user**: Creates a new user in a Cloud SQL instance.
|
||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for PostgreSQL instance.
|
||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
|
||||
@@ -18,6 +18,7 @@ to expose your developer assistant tools to a Looker instance:
|
||||
* [Cline][cline] (VS Code extension)
|
||||
* [Claude desktop][claudedesktop]
|
||||
* [Claude code][claudecode]
|
||||
* [Antigravity][antigravity]
|
||||
|
||||
[toolbox]: https://github.com/googleapis/genai-toolbox
|
||||
[gemini-cli]: #configure-your-mcp-client
|
||||
@@ -27,6 +28,7 @@ to expose your developer assistant tools to a Looker instance:
|
||||
[cline]: #configure-your-mcp-client
|
||||
[claudedesktop]: #configure-your-mcp-client
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
[antigravity]: #connect-with-antigravity
|
||||
|
||||
## Set up Looker
|
||||
|
||||
@@ -38,6 +40,55 @@ to expose your developer assistant tools to a Looker instance:
|
||||
listening at a different port, and you will need to use
|
||||
`https://looker.example.com:19999` instead.
|
||||
|
||||
## Connect with Antigravity
|
||||
|
||||
You can connect Looker to Antigravity in the following ways:
|
||||
|
||||
* Using the MCP Store
|
||||
* Using a custom configuration
|
||||
|
||||
{{< notice note >}}
|
||||
You don't need to download the MCP Toolbox binary to use these methods.
|
||||
{{< /notice >}}
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="MCP Store" lang="en" %}}
|
||||
The most straightforward way to connect to Looker in Antigravity is by using the built-in MCP Store.
|
||||
|
||||
1. Open Antigravity and open the editor's agent panel.
|
||||
1. Click the **"..."** icon at the top of the panel and select **MCP Servers**.
|
||||
1. Locate **Looker** in the list of available servers and click Install.
|
||||
1. Follow the on-screen prompts to securely link your accounts where applicable.
|
||||
|
||||
After you install Looker in the MCP Store, resources and tools from the server are automatically available to the editor.
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Custom config" lang="en" %}}
|
||||
To connect to a custom MCP server, follow these steps:
|
||||
|
||||
1. Open Antigravity and navigate to the MCP store using the **"..."** drop-down at the top of the editor's agent panel.
|
||||
1. To open the **mcp_config.json** file, click **MCP Servers** and then click **Manage MCP Servers > View raw config**.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save.
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"looker": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "looker", "--stdio"],
|
||||
"env": {
|
||||
"LOOKER_BASE_URL": "https://looker.example.com",
|
||||
"LOOKER_CLIENT_ID": "your-client-id",
|
||||
"LOOKER_CLIENT_SECRET": "your-client-secret"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct
|
||||
@@ -49,19 +100,19 @@ to expose your developer assistant tools to a Looker instance:
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
@@ -290,7 +341,7 @@ assistant to list models, explores, dimensions, and measures. Run a
|
||||
query, retrieve the SQL for a query, and run a saved Look.
|
||||
|
||||
The full tool list is available in the [Prebuilt Tools
|
||||
Reference](../../reference/prebuilt-tools/#looker).
|
||||
Reference](../../reference/prebuilt-tools.md/#looker).
|
||||
|
||||
The following tools are available to the LLM:
|
||||
|
||||
|
||||
@@ -45,19 +45,19 @@ instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -43,19 +43,19 @@ expose your developer assistant tools to a MySQL instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -44,19 +44,19 @@ expose your developer assistant tools to a Neo4j instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -43,19 +43,19 @@ to expose your developer assistant tools to a SQLite instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -68,7 +68,12 @@ networks:
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
To prevent DNS rebinding attack, use the `--allowed-origins` flag to specify a
|
||||
To prevent DNS rebinding attack, use the `--allowed-hosts` flag to specify a
|
||||
list of hosts for validation. E.g. `command: [ "toolbox",
|
||||
"--tools-file", "/config/tools.yaml", "--address", "0.0.0.0",
|
||||
"--allowed-hosts", "localhost:5000"]`
|
||||
|
||||
To implement CORs, use the `--allowed-origins` flag to specify a
|
||||
list of origins permitted to access the server. E.g. `command: [ "toolbox",
|
||||
"--tools-file", "/config/tools.yaml", "--address", "0.0.0.0",
|
||||
"--allowed-origins", "https://foo.bar"]`
|
||||
|
||||
@@ -188,9 +188,13 @@ description: >
|
||||
path: tools.yaml
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
{{< notice tip >}}
|
||||
To prevent DNS rebinding attack, use the `--allowed-origins` flag to specify a
|
||||
list of origins permitted to access the server. E.g. `args: ["--address",
|
||||
"0.0.0.0", "--allowed-hosts", "foo.bar:5000"]`
|
||||
|
||||
To implement CORs, use the `--allowed-origins` flag to specify a
|
||||
list of origins permitted to access the server. E.g. `args: ["--address",
|
||||
"0.0.0.0", "--allowed-origins", "https://foo.bar"]`
|
||||
{{< /notice >}}
|
||||
|
||||
|
||||
@@ -142,14 +142,18 @@ deployment will time out.
|
||||
|
||||
### Update deployed server to be secure
|
||||
|
||||
To prevent DNS rebinding attack, use the `--allowed-origins` flag to specify a
|
||||
list of origins permitted to access the server. In order to do that, you will
|
||||
To prevent DNS rebinding attack, use the `--allowed-hosts` flag to specify a
|
||||
list of hosts. In order to do that, you will
|
||||
have to re-deploy the cloud run service with the new flag.
|
||||
|
||||
To implement CORs checks, use the `--allowed-origins` flag to specify a list of
|
||||
origins permitted to access the server.
|
||||
|
||||
1. Set an environment variable to the cloud run url:
|
||||
|
||||
```bash
|
||||
export URL=<cloud run url>
|
||||
export HOST=<cloud run host>
|
||||
```
|
||||
|
||||
2. Redeploy Toolbox:
|
||||
@@ -160,7 +164,7 @@ have to re-deploy the cloud run service with the new flag.
|
||||
--service-account toolbox-identity \
|
||||
--region us-central1 \
|
||||
--set-secrets "/app/tools.yaml=tools:latest" \
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL"
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL","--allowed-hosts=$HOST"
|
||||
# --allow-unauthenticated # https://cloud.google.com/run/docs/authenticating/public#gcloud
|
||||
```
|
||||
|
||||
@@ -172,7 +176,7 @@ have to re-deploy the cloud run service with the new flag.
|
||||
--service-account toolbox-identity \
|
||||
--region us-central1 \
|
||||
--set-secrets "/app/tools.yaml=tools:latest" \
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL" \
|
||||
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL","--allowed-hosts=$HOST" \
|
||||
# TODO(dev): update the following to match your VPC if necessary
|
||||
--network default \
|
||||
--subnet default
|
||||
|
||||
@@ -8,25 +8,26 @@ description: >
|
||||
|
||||
## Reference
|
||||
|
||||
| Flag (Short) | Flag (Long) | Description | Default |
|
||||
|--------------|----------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------|
|
||||
| `-a` | `--address` | Address of the interface the server will listen on. | `127.0.0.1` |
|
||||
| | `--disable-reload` | Disables dynamic reloading of tools file. | |
|
||||
| `-h` | `--help` | help for toolbox | |
|
||||
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
|
||||
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
|
||||
| `-p` | `--port` | Port the server will listen on. | `5000` |
|
||||
| | `--prebuilt` | Use a prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
|
||||
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
|
||||
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
|
||||
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
|
||||
| | `--telemetry-service-name` | Sets the value of the service.name resource attribute for telemetry data. | `toolbox` |
|
||||
| Flag (Short) | Flag (Long) | Description | Default |
|
||||
|--------------|----------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------|
|
||||
| `-a` | `--address` | Address of the interface the server will listen on. | `127.0.0.1` |
|
||||
| | `--disable-reload` | Disables dynamic reloading of tools file. | |
|
||||
| `-h` | `--help` | help for toolbox | |
|
||||
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
|
||||
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
|
||||
| `-p` | `--port` | Port the server will listen on. | `5000` |
|
||||
| | `--prebuilt` | Use a prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
|
||||
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
|
||||
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
|
||||
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
|
||||
| | `--telemetry-service-name` | Sets the value of the service.name resource attribute for telemetry data. | `toolbox` |
|
||||
| | `--tools-file` | File path specifying the tool configuration. Cannot be used with --tools-files or --tools-folder. | |
|
||||
| | `--tools-files` | Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file or --tools-folder. | |
|
||||
| | `--tools-folder` | Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file or --tools-files. | |
|
||||
| | `--ui` | Launches the Toolbox UI web server. | |
|
||||
| | `--allowed-origins` | Specifies a list of origins permitted to access this server. | `*` |
|
||||
| `-v` | `--version` | version for toolbox | |
|
||||
| | `--ui` | Launches the Toolbox UI web server. | |
|
||||
| | `--allowed-origins` | Specifies a list of origins permitted to access this server for CORs access. | `*` |
|
||||
| | `--allowed-hosts` | Specifies a list of hosts permitted to access this server to prevent DNS rebinding attacks. | `*` |
|
||||
| `-v` | `--version` | version for toolbox | |
|
||||
|
||||
## Examples
|
||||
|
||||
|
||||
@@ -105,6 +105,8 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `BIGQUERY_LOCATION`: (Optional) The dataset location.
|
||||
* `BIGQUERY_USE_CLIENT_OAUTH`: (Optional) If `true`, forwards the client's
|
||||
OAuth access token for authentication. Defaults to `false`.
|
||||
* `BIGQUERY_SCOPES`: (Optional) A comma-separated list of OAuth scopes to
|
||||
use for authentication.
|
||||
* **Permissions:**
|
||||
* **BigQuery User** (`roles/bigquery.user`) to execute queries and view
|
||||
metadata.
|
||||
@@ -185,6 +187,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* **Cloud SQL Admin** (`roles/cloudsql.admin`): Provides full control over
|
||||
all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
@@ -201,6 +204,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `create_user`: Creates a new user in a Cloud SQL instance.
|
||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for MySQL instance.
|
||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||
|
||||
## Cloud SQL for PostgreSQL
|
||||
|
||||
@@ -273,6 +277,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* **Cloud SQL Admin** (`roles/cloudsql.admin`): Provides full control over
|
||||
all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
@@ -288,6 +293,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `create_user`: Creates a new user in a Cloud SQL instance.
|
||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for PostgreSQL instance.
|
||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||
|
||||
## Cloud SQL for SQL Server
|
||||
|
||||
@@ -334,6 +340,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* **Cloud SQL Admin** (`roles/cloudsql.admin`): Provides full control over
|
||||
all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
@@ -349,6 +356,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `create_user`: Creates a new user in a Cloud SQL instance.
|
||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for SQL Server instance.
|
||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||
|
||||
## Dataplex
|
||||
|
||||
|
||||
@@ -28,17 +28,19 @@ The following configurations are placed at the top level of a `tools.yaml` file.
|
||||
{{< notice tip >}}
|
||||
If you are accessing Toolbox with multiple applications, each
|
||||
application should register their own Client ID even if they use the same
|
||||
"kind" of auth provider.
|
||||
"type" of auth provider.
|
||||
{{< /notice >}}
|
||||
|
||||
```yaml
|
||||
authServices:
|
||||
my_auth_app_1:
|
||||
kind: google
|
||||
clientId: ${YOUR_CLIENT_ID_1}
|
||||
my_auth_app_2:
|
||||
kind: google
|
||||
clientId: ${YOUR_CLIENT_ID_2}
|
||||
kind: authServices
|
||||
name: my_auth_app_1
|
||||
type: google
|
||||
clientId: ${YOUR_CLIENT_ID_1}
|
||||
---
|
||||
kind: authServices
|
||||
name: my_auth_app_2
|
||||
type: google
|
||||
clientId: ${YOUR_CLIENT_ID_2}
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
|
||||
@@ -40,10 +40,10 @@ id-token][provided-claims] can be used for the parameter.
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
authServices:
|
||||
my-google-auth:
|
||||
kind: google
|
||||
clientId: ${YOUR_GOOGLE_CLIENT_ID}
|
||||
kind: authServices
|
||||
name: my-google-auth
|
||||
type: google
|
||||
clientId: ${YOUR_GOOGLE_CLIENT_ID}
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -55,5 +55,5 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "google". |
|
||||
| type | string | true | Must be "google". |
|
||||
| clientId | string | true | Client ID of your application from registering your application. |
|
||||
|
||||
84
docs/en/resources/embeddingModels/_index.md
Normal file
84
docs/en/resources/embeddingModels/_index.md
Normal file
@@ -0,0 +1,84 @@
|
||||
---
|
||||
title: "EmbeddingModels"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
EmbeddingModels represent services that transform text into vector embeddings for semantic search.
|
||||
---
|
||||
|
||||
EmbeddingModels represent services that generate vector representations of text
|
||||
data. In the MCP Toolbox, these models enable **Semantic Queries**,
|
||||
allowing [Tools](../tools/) to automatically convert human-readable text into
|
||||
numerical vectors before using them in a query.
|
||||
|
||||
This is primarily used in two scenarios:
|
||||
|
||||
- **Vector Ingestion**: Converting a text parameter into a vector string during
|
||||
an `INSERT` operation.
|
||||
|
||||
- **Semantic Search**: Converting a natural language query into a vector to
|
||||
perform similarity searches.
|
||||
|
||||
## Example
|
||||
|
||||
The following configuration defines an embedding model and applies it to
|
||||
specific tool parameters.
|
||||
|
||||
{{< notice tip >}}
|
||||
Use environment variable replacement with the format ${ENV_NAME}
|
||||
instead of hardcoding your API keys into the configuration file.
|
||||
{{< /notice >}}
|
||||
|
||||
### Step 1 - Define an Embedding Model
|
||||
|
||||
Define an embedding model in the `embeddingModels` section:
|
||||
|
||||
```yaml
|
||||
kind: embeddingModels
|
||||
name: gemini-model: # Name of the embedding model
|
||||
type: gemini
|
||||
model: gemini-embedding-001
|
||||
apiKey: ${GOOGLE_API_KEY}
|
||||
dimension: 768
|
||||
```
|
||||
|
||||
### Step 2 - Embed Tool Parameters
|
||||
|
||||
Use the defined embedding model, embed your query parameters using the
|
||||
`embeddedBy` field. Only string-typed
|
||||
parameters can be embedded:
|
||||
|
||||
```yaml
|
||||
# Vector ingestion tool
|
||||
kind: tools
|
||||
name: insert_embedding
|
||||
type: postgres-sql
|
||||
source: my-pg-instance
|
||||
statement: |
|
||||
INSERT INTO documents (content, embedding)
|
||||
VALUES ($1, $2);
|
||||
parameters:
|
||||
- name: content
|
||||
type: string
|
||||
- name: vector_string
|
||||
type: string
|
||||
description: The text to be vectorized and stored.
|
||||
embeddedBy: gemini-model # refers to the name of a defined embedding model
|
||||
---
|
||||
# Semantic search tool
|
||||
kind: tools
|
||||
name: search_embedding
|
||||
type: postgres-sql
|
||||
source: my-pg-instance
|
||||
statement: |
|
||||
SELECT id, content, embedding <-> $1 AS distance
|
||||
FROM documents
|
||||
ORDER BY distance LIMIT 1
|
||||
parameters:
|
||||
- name: semantic_search_string
|
||||
type: string
|
||||
description: The search query that will be converted to a vector.
|
||||
embeddedBy: gemini-model # refers to the name of a defined embedding model
|
||||
```
|
||||
|
||||
## Kinds of Embedding Models
|
||||
73
docs/en/resources/embeddingModels/gemini.md
Normal file
73
docs/en/resources/embeddingModels/gemini.md
Normal file
@@ -0,0 +1,73 @@
|
||||
---
|
||||
title: "Gemini Embedding"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Use Google's Gemini models to generate high-performance text embeddings for vector databases.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
Google Gemini provides state-of-the-art embedding models that convert text into
|
||||
high-dimensional vectors.
|
||||
|
||||
### Authentication
|
||||
|
||||
Toolbox uses your [Application Default Credentials
|
||||
(ADC)][adc] to authorize with the
|
||||
Gemini API client.
|
||||
|
||||
Optionally, you can use an [API key][api-key] obtain an API
|
||||
Key from the [Google AI Studio][ai-studio].
|
||||
|
||||
We recommend using an API key for testing and using application default
|
||||
credentials for production.
|
||||
|
||||
[adc]: https://cloud.google.com/docs/authentication#adc
|
||||
[api-key]: https://ai.google.dev/gemini-api/docs/api-key#api-keys
|
||||
[ai-studio]: https://aistudio.google.com/app/apikey
|
||||
|
||||
## Behavior
|
||||
|
||||
### Automatic Vectorization
|
||||
|
||||
When a tool parameter is configured with `embeddedBy: <your-gemini-model-name>`,
|
||||
the Toolbox intercepts the raw text input from the client and sends it to the
|
||||
Gemini API. The resulting numerical array is then formatted before being passed
|
||||
to your database source.
|
||||
|
||||
### Dimension Matching
|
||||
|
||||
The `dimension` field must match the expected size of your database column
|
||||
(e.g., a `vector(768)` column in PostgreSQL). This setting is supported by newer
|
||||
models since 2024 only. You cannot set this value if using the earlier model
|
||||
(`models/embedding-001`). Check out [available Gemini models][modellist] for more
|
||||
information.
|
||||
|
||||
[modellist]:
|
||||
https://docs.cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#supported-models
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
kind: embeddingModels
|
||||
name: gemini-model
|
||||
type: gemini
|
||||
model: gemini-embedding-001
|
||||
apiKey: ${GOOGLE_API_KEY}
|
||||
dimension: 768
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
Use environment variable replacement with the format ${ENV_NAME}
|
||||
instead of hardcoding your secrets into the configuration file.
|
||||
{{< /notice >}}
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|--------------------------------------------------------------|
|
||||
| type | string | true | Must be `gemini`. |
|
||||
| model | string | true | The Gemini model ID to use (e.g., `gemini-embedding-001`). |
|
||||
| apiKey | string | false | Your API Key from Google AI Studio. |
|
||||
| dimension | integer | false | The number of dimensions in the output vector (e.g., `768`). |
|
||||
@@ -16,14 +16,14 @@ can be sent to a Large Language Model (LLM). The Toolbox server implements the
|
||||
specification, allowing clients to discover and retrieve these prompts.
|
||||
|
||||
```yaml
|
||||
prompts:
|
||||
code_review:
|
||||
description: "Asks the LLM to analyze code quality and suggest improvements."
|
||||
messages:
|
||||
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
||||
arguments:
|
||||
- name: "code"
|
||||
description: "The code to review"
|
||||
kind: prompts
|
||||
name: code_review
|
||||
description: "Asks the LLM to analyze code quality and suggest improvements."
|
||||
messages:
|
||||
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
||||
arguments:
|
||||
- name: "code"
|
||||
description: "The code to review"
|
||||
```
|
||||
|
||||
## Prompt Schema
|
||||
@@ -31,7 +31,7 @@ prompts:
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|--------------------------------|--------------|--------------------------------------------------------------------------|
|
||||
| description | string | No | A brief explanation of what the prompt does. |
|
||||
| kind | string | No | The kind of prompt. Defaults to `"custom"`. |
|
||||
| type | string | No | The type of prompt. Defaults to `"custom"`. |
|
||||
| messages | [][Message](#message-schema) | Yes | A list of one or more message objects that make up the prompt's content. |
|
||||
| arguments | [][Argument](#argument-schema) | No | A list of arguments that can be interpolated into the prompt's content. |
|
||||
|
||||
|
||||
@@ -17,14 +17,14 @@ Here is an example of a simple prompt that takes a single argument, code, and
|
||||
asks an LLM to review it.
|
||||
|
||||
```yaml
|
||||
prompts:
|
||||
code_review:
|
||||
description: "Asks the LLM to analyze code quality and suggest improvements."
|
||||
messages:
|
||||
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
||||
arguments:
|
||||
- name: "code"
|
||||
description: "The code to review"
|
||||
kind: prompts
|
||||
name: code_review
|
||||
description: "Asks the LLM to analyze code quality and suggest improvements."
|
||||
messages:
|
||||
- content: "Please review the following code for quality, correctness, and potential improvements: \n\n{{.code}}"
|
||||
arguments:
|
||||
- name: "code"
|
||||
description: "The code to review"
|
||||
```
|
||||
|
||||
### Multi-message prompt
|
||||
@@ -33,19 +33,19 @@ You can define prompts with multiple messages to set up more complex
|
||||
conversational contexts, like a role-playing scenario.
|
||||
|
||||
```yaml
|
||||
prompts:
|
||||
roleplay_scenario:
|
||||
description: "Sets up a roleplaying scenario with initial messages."
|
||||
arguments:
|
||||
- name: "character"
|
||||
description: "The character the AI should embody."
|
||||
- name: "situation"
|
||||
description: "The initial situation for the roleplay."
|
||||
messages:
|
||||
- role: "user"
|
||||
content: "Let's roleplay. You are {{.character}}. The situation is: {{.situation}}"
|
||||
- role: "assistant"
|
||||
content: "Okay, I understand. I am ready. What happens next?"
|
||||
kind: prompts
|
||||
name: roleplay_scenario
|
||||
description: "Sets up a roleplaying scenario with initial messages."
|
||||
arguments:
|
||||
- name: "character"
|
||||
description: "The character the AI should embody."
|
||||
- name: "situation"
|
||||
description: "The initial situation for the roleplay."
|
||||
messages:
|
||||
- role: "user"
|
||||
content: "Let's roleplay. You are {{.character}}. The situation is: {{.situation}}"
|
||||
- role: "assistant"
|
||||
content: "Okay, I understand. I am ready. What happens next?"
|
||||
```
|
||||
|
||||
## Reference
|
||||
@@ -54,7 +54,7 @@ prompts:
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|--------------------------------|--------------|--------------------------------------------------------------------------|
|
||||
| kind | string | No | The kind of prompt. Must be `"custom"`. |
|
||||
| type | string | No | The type of prompt. Must be `"custom"`. |
|
||||
| description | string | No | A brief explanation of what the prompt does. |
|
||||
| messages | [][Message](#message-schema) | Yes | A list of one or more message objects that make up the prompt's content. |
|
||||
| arguments | [][Argument](#argument-schema) | No | A list of arguments that can be interpolated into the prompt's content. |
|
||||
|
||||
@@ -17,15 +17,15 @@ instead of hardcoding your secrets into the configuration file.
|
||||
{{< /notice >}}
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-cloud-sql-source:
|
||||
kind: cloud-sql-postgres
|
||||
project: my-project-id
|
||||
region: us-central1
|
||||
instance: my-instance-name
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
kind: sources
|
||||
name: my-cloud-sql-source
|
||||
type: cloud-sql-postgres
|
||||
project: my-project-id
|
||||
region: us-central1
|
||||
instance: my-instance-name
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
```
|
||||
|
||||
In implementation, each source is a different connection pool or client that used
|
||||
|
||||
@@ -25,19 +25,20 @@ Authentication can be handled in two ways:
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-alloydb-admin:
|
||||
kind: alloy-admin
|
||||
|
||||
my-oauth-alloydb-admin:
|
||||
kind: alloydb-admin
|
||||
useClientOAuth: true
|
||||
kind: sources
|
||||
name: my-alloydb-admin
|
||||
type: alloy-admin
|
||||
---
|
||||
kind: sources
|
||||
name: my-oauth-alloydb-admin
|
||||
type: alloydb-admin
|
||||
useClientOAuth: true
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| -------------- | :------: | :----------: | ---------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "alloydb-admin". |
|
||||
| type | string | true | Must be "alloydb-admin". |
|
||||
| defaultProject | string | false | The Google Cloud project ID to use for AlloyDB infrastructure tools. |
|
||||
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |
|
||||
|
||||
@@ -94,7 +94,10 @@ cluster][alloydb-free-trial].
|
||||
instance.
|
||||
|
||||
- [`postgres-list-roles`](../tools/postgres/postgres-list-roles.md)
|
||||
Lists all the user-created roles in PostgreSQL database..
|
||||
Lists all the user-created roles in PostgreSQL database.
|
||||
|
||||
- [`postgres-list-stored-procedure`](../tools/postgres/postgres-list-stored-procedure.md)
|
||||
Lists all the stored procedure in PostgreSQL database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
@@ -173,17 +176,17 @@ To connect using IAM authentication:
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-alloydb-pg-source:
|
||||
kind: alloydb-postgres
|
||||
project: my-project-id
|
||||
region: us-central1
|
||||
cluster: my-cluster
|
||||
instance: my-instance
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
# ipType: "public"
|
||||
kind: sources
|
||||
name: my-alloydb-pg-source
|
||||
type: alloydb-postgres
|
||||
project: my-project-id
|
||||
region: us-central1
|
||||
cluster: my-cluster
|
||||
instance: my-instance
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
# ipType: "public"
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -195,7 +198,7 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "alloydb-postgres". |
|
||||
| type | string | true | Must be "alloydb-postgres". |
|
||||
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
|
||||
| region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). |
|
||||
| cluster | string | true | Name of the AlloyDB cluster (e.g. "my-cluster"). |
|
||||
|
||||
@@ -94,6 +94,13 @@ intend to run. Common roles include `roles/bigquery.user` (which includes
|
||||
permissions to run jobs and read data) or `roles/bigbigquery.dataViewer`.
|
||||
Follow this [guide][set-adc] to set up your ADC.
|
||||
|
||||
If you are running on Google Compute Engine (GCE) or Google Kubernetes Engine
|
||||
(GKE), you might need to explicitly set the access scopes for the service
|
||||
account. While you can configure scopes when creating the VM or node pool, you
|
||||
can also specify them in the source configuration using the `scopes` field.
|
||||
Common scopes include `https://www.googleapis.com/auth/bigquery` or
|
||||
`https://www.googleapis.com/auth/cloud-platform`.
|
||||
|
||||
### Authentication via User's OAuth Access Token
|
||||
|
||||
If the `useClientOAuth` parameter is set to `true`, Toolbox will instead use the
|
||||
@@ -114,42 +121,52 @@ identity used has been granted the correct IAM permissions.
|
||||
Initialize a BigQuery source that uses ADC:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-bigquery-source:
|
||||
kind: "bigquery"
|
||||
project: "my-project-id"
|
||||
# location: "US" # Optional: Specifies the location for query jobs.
|
||||
# writeMode: "allowed" # One of: allowed, blocked, protected. Defaults to "allowed".
|
||||
# allowedDatasets: # Optional: Restricts tool access to a specific list of datasets.
|
||||
# - "my_dataset_1"
|
||||
# - "other_project.my_dataset_2"
|
||||
# impersonateServiceAccount: "service-account@project-id.iam.gserviceaccount.com" # Optional: Service account to impersonate
|
||||
kind: sources
|
||||
name: my-bigquery-source
|
||||
type: "bigquery"
|
||||
project: "my-project-id"
|
||||
# location: "US" # Optional: Specifies the location for query jobs.
|
||||
# writeMode: "allowed" # One of: allowed, blocked, protected. Defaults to "allowed".
|
||||
# allowedDatasets: # Optional: Restricts tool access to a specific list of datasets.
|
||||
# - "my_dataset_1"
|
||||
# - "other_project.my_dataset_2"
|
||||
# impersonateServiceAccount: "service-account@project-id.iam.gserviceaccount.com" # Optional: Service account to impersonate
|
||||
# scopes: # Optional: List of OAuth scopes to request.
|
||||
# - "https://www.googleapis.com/auth/bigquery"
|
||||
# - "https://www.googleapis.com/auth/drive.readonly"
|
||||
# maxQueryResultRows: 50 # Optional: Limits the number of rows returned by queries. Defaults to 50.
|
||||
```
|
||||
|
||||
Initialize a BigQuery source that uses the client's access token:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-bigquery-client-auth-source:
|
||||
kind: "bigquery"
|
||||
project: "my-project-id"
|
||||
useClientOAuth: true
|
||||
# location: "US" # Optional: Specifies the location for query jobs.
|
||||
# writeMode: "allowed" # One of: allowed, blocked, protected. Defaults to "allowed".
|
||||
# allowedDatasets: # Optional: Restricts tool access to a specific list of datasets.
|
||||
# - "my_dataset_1"
|
||||
# - "other_project.my_dataset_2"
|
||||
# impersonateServiceAccount: "service-account@project-id.iam.gserviceaccount.com" # Optional: Service account to impersonate
|
||||
kind: sources
|
||||
name: my-bigquery-client-auth-source
|
||||
type: "bigquery"
|
||||
project: "my-project-id"
|
||||
useClientOAuth: true
|
||||
# location: "US" # Optional: Specifies the location for query jobs.
|
||||
# writeMode: "allowed" # One of: allowed, blocked, protected. Defaults to "allowed".
|
||||
# allowedDatasets: # Optional: Restricts tool access to a specific list of datasets.
|
||||
# - "my_dataset_1"
|
||||
# - "other_project.my_dataset_2"
|
||||
# impersonateServiceAccount: "service-account@project-id.iam.gserviceaccount.com" # Optional: Service account to impersonate
|
||||
# scopes: # Optional: List of OAuth scopes to request.
|
||||
# - "https://www.googleapis.com/auth/bigquery"
|
||||
# - "https://www.googleapis.com/auth/drive.readonly"
|
||||
# maxQueryResultRows: 50 # Optional: Limits the number of rows returned by queries. Defaults to 50.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|---------------------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "bigquery". |
|
||||
| type | string | true | Must be "bigquery". |
|
||||
| project | string | true | Id of the Google Cloud project to use for billing and as the default project for BigQuery resources. |
|
||||
| location | string | false | Specifies the location (e.g., 'us', 'asia-northeast1') in which to run the query job. This location must match the location of any tables referenced in the query. Defaults to the table's location or 'US' if the location cannot be determined. [Learn More](https://cloud.google.com/bigquery/docs/locations) |
|
||||
| writeMode | string | false | Controls the write behavior for tools. `allowed` (default): All queries are permitted. `blocked`: Only `SELECT` statements are allowed for the `bigquery-execute-sql` tool. `protected`: Enables session-based execution where all tools associated with this source instance share the same [BigQuery session](https://cloud.google.com/bigquery/docs/sessions-intro). This allows for stateful operations using temporary tables (e.g., `CREATE TEMP TABLE`). For `bigquery-execute-sql`, `SELECT` statements can be used on all tables, but write operations are restricted to the session's temporary dataset. For tools like `bigquery-sql`, `bigquery-forecast`, and `bigquery-analyze-contribution`, the `writeMode` restrictions do not apply, but they will operate within the shared session. **Note:** The `protected` mode cannot be used with `useClientOAuth: true`. It is also not recommended for multi-user server environments, as all users would share the same session. A session is terminated automatically after 24 hours of inactivity or after 7 days, whichever comes first. A new session is created on the next request, and any temporary data from the previous session will be lost. |
|
||||
| allowedDatasets | []string | false | An optional list of dataset IDs that tools using this source are allowed to access. If provided, any tool operation attempting to access a dataset not in this list will be rejected. To enforce this, two types of operations are also disallowed: 1) Dataset-level operations (e.g., `CREATE SCHEMA`), and 2) operations where table access cannot be statically analyzed (e.g., `EXECUTE IMMEDIATE`, `CREATE PROCEDURE`). If a single dataset is provided, it will be treated as the default for prebuilt tools. |
|
||||
| useClientOAuth | bool | false | If true, forwards the client's OAuth access token from the "Authorization" header to downstream queries. **Note:** This cannot be used with `writeMode: protected`. |
|
||||
| scopes | []string | false | A list of OAuth 2.0 scopes to use for the credentials. If not provided, default scopes are used. |
|
||||
| impersonateServiceAccount | string | false | Service account email to impersonate when making BigQuery and Dataplex API calls. The authenticated principal must have the `roles/iam.serviceAccountTokenCreator` role on the target service account. [Learn More](https://cloud.google.com/iam/docs/service-account-impersonation) |
|
||||
| maxQueryResultRows | int | false | The maximum number of rows to return from a query. Defaults to 50. |
|
||||
|
||||
@@ -59,17 +59,17 @@ applying IAM permissions and roles to an identity.
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-bigtable-source:
|
||||
kind: "bigtable"
|
||||
project: "my-project-id"
|
||||
instance: "test-instance"
|
||||
kind: sources
|
||||
name: my-bigtable-source
|
||||
type: "bigtable"
|
||||
project: "my-project-id"
|
||||
instance: "test-instance"
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|-------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "bigtable". |
|
||||
| type | string | true | Must be "bigtable". |
|
||||
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
|
||||
| instance | string | true | Name of the Bigtable instance. |
|
||||
|
||||
@@ -23,19 +23,19 @@ distributed architectures, and a flexible approach to schema definition.
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-cassandra-source:
|
||||
kind: cassandra
|
||||
hosts:
|
||||
- 127.0.0.1
|
||||
keyspace: my_keyspace
|
||||
protoVersion: 4
|
||||
username: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
caPath: /path/to/ca.crt # Optional: path to CA certificate
|
||||
certPath: /path/to/client.crt # Optional: path to client certificate
|
||||
keyPath: /path/to/client.key # Optional: path to client key
|
||||
enableHostVerification: true # Optional: enable host verification
|
||||
kind: sources
|
||||
name: my-cassandra-source
|
||||
type: cassandra
|
||||
hosts:
|
||||
- 127.0.0.1
|
||||
keyspace: my_keyspace
|
||||
protoVersion: 4
|
||||
username: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
caPath: /path/to/ca.crt # Optional: path to CA certificate
|
||||
certPath: /path/to/client.crt # Optional: path to client certificate
|
||||
keyPath: /path/to/client.key # Optional: path to client key
|
||||
enableHostVerification: true # Optional: enable host verification
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -47,7 +47,7 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|------------------------|:--------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "cassandra". |
|
||||
| type | string | true | Must be "cassandra". |
|
||||
| hosts | string[] | true | List of IP addresses to connect to (e.g., ["192.168.1.1:9042", "192.168.1.2:9042","192.168.1.3:9042"]). The default port is 9042 if not specified. |
|
||||
| keyspace | string | true | Name of the Cassandra keyspace to connect to (e.g., "my_keyspace"). |
|
||||
| protoVersion | integer | false | Protocol version for the Cassandra connection (e.g., 4). |
|
||||
|
||||
@@ -46,31 +46,31 @@ ClickHouse supports multiple protocols:
|
||||
### Secure Connection Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
secure-clickhouse-source:
|
||||
kind: clickhouse
|
||||
host: clickhouse.example.com
|
||||
port: "8443"
|
||||
database: analytics
|
||||
user: ${CLICKHOUSE_USER}
|
||||
password: ${CLICKHOUSE_PASSWORD}
|
||||
protocol: https
|
||||
secure: true
|
||||
kind: sources
|
||||
name: secure-clickhouse-source
|
||||
type: clickhouse
|
||||
host: clickhouse.example.com
|
||||
port: "8443"
|
||||
database: analytics
|
||||
user: ${CLICKHOUSE_USER}
|
||||
password: ${CLICKHOUSE_PASSWORD}
|
||||
protocol: https
|
||||
secure: true
|
||||
```
|
||||
|
||||
### HTTP Protocol Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
http-clickhouse-source:
|
||||
kind: clickhouse
|
||||
host: localhost
|
||||
port: "8123"
|
||||
database: logs
|
||||
user: ${CLICKHOUSE_USER}
|
||||
password: ${CLICKHOUSE_PASSWORD}
|
||||
protocol: http
|
||||
secure: false
|
||||
kind: sources
|
||||
name: http-clickhouse-source
|
||||
type: clickhouse
|
||||
host: localhost
|
||||
port: "8123"
|
||||
database: logs
|
||||
user: ${CLICKHOUSE_USER}
|
||||
password: ${CLICKHOUSE_PASSWORD}
|
||||
protocol: http
|
||||
secure: false
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -82,7 +82,7 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|-------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "clickhouse". |
|
||||
| type | string | true | Must be "clickhouse". |
|
||||
| host | string | true | IP address or hostname to connect to (e.g. "127.0.0.1" or "clickhouse.example.com") |
|
||||
| port | string | true | Port to connect to (e.g. "8443" for HTTPS, "8123" for HTTP) |
|
||||
| database | string | true | Name of the ClickHouse database to connect to (e.g. "my_database"). |
|
||||
|
||||
@@ -20,21 +20,22 @@ Authentication can be handled in two ways:
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-gda-source:
|
||||
kind: cloud-gemini-data-analytics
|
||||
projectId: my-project-id
|
||||
|
||||
my-oauth-gda-source:
|
||||
kind: cloud-gemini-data-analytics
|
||||
projectId: my-project-id
|
||||
useClientOAuth: true
|
||||
kind: sources
|
||||
name: my-gda-source
|
||||
type: cloud-gemini-data-analytics
|
||||
projectId: my-project-id
|
||||
---
|
||||
kind: sources
|
||||
name: my-oauth-gda-source
|
||||
type: cloud-gemini-data-analytics
|
||||
projectId: my-project-id
|
||||
useClientOAuth: true
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| -------------- | :------: | :----------: | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "cloud-gemini-data-analytics". |
|
||||
| type | string | true | Must be "cloud-gemini-data-analytics". |
|
||||
| projectId | string | true | The Google Cloud Project ID where the API is enabled. |
|
||||
| useClientOAuth | boolean | false | If true, the source uses the token provided by the caller (forwarded to the API). Otherwise, it uses server-side Application Default Credentials (ADC). Defaults to `false`. |
|
||||
|
||||
@@ -123,41 +123,41 @@ identity used has been granted the correct IAM permissions.
|
||||
Initialize a Cloud Healthcare API source that uses ADC:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-healthcare-source:
|
||||
kind: "cloud-healthcare"
|
||||
project: "my-project-id"
|
||||
region: "us-central1"
|
||||
dataset: "my-healthcare-dataset-id"
|
||||
# allowedFhirStores: # Optional: Restricts tool access to a specific list of FHIR store IDs.
|
||||
# - "my_fhir_store_1"
|
||||
# allowedDicomStores: # Optional: Restricts tool access to a specific list of DICOM store IDs.
|
||||
# - "my_dicom_store_1"
|
||||
# - "my_dicom_store_2"
|
||||
kind: sources
|
||||
name: my-healthcare-source
|
||||
type: "cloud-healthcare"
|
||||
project: "my-project-id"
|
||||
region: "us-central1"
|
||||
dataset: "my-healthcare-dataset-id"
|
||||
# allowedFhirStores: # Optional: Restricts tool access to a specific list of FHIR store IDs.
|
||||
# - "my_fhir_store_1"
|
||||
# allowedDicomStores: # Optional: Restricts tool access to a specific list of DICOM store IDs.
|
||||
# - "my_dicom_store_1"
|
||||
# - "my_dicom_store_2"
|
||||
```
|
||||
|
||||
Initialize a Cloud Healthcare API source that uses the client's access token:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-healthcare-client-auth-source:
|
||||
kind: "cloud-healthcare"
|
||||
project: "my-project-id"
|
||||
region: "us-central1"
|
||||
dataset: "my-healthcare-dataset-id"
|
||||
useClientOAuth: true
|
||||
# allowedFhirStores: # Optional: Restricts tool access to a specific list of FHIR store IDs.
|
||||
# - "my_fhir_store_1"
|
||||
# allowedDicomStores: # Optional: Restricts tool access to a specific list of DICOM store IDs.
|
||||
# - "my_dicom_store_1"
|
||||
# - "my_dicom_store_2"
|
||||
kind: sources
|
||||
name: my-healthcare-client-auth-source
|
||||
type: "cloud-healthcare"
|
||||
project: "my-project-id"
|
||||
region: "us-central1"
|
||||
dataset: "my-healthcare-dataset-id"
|
||||
useClientOAuth: true
|
||||
# allowedFhirStores: # Optional: Restricts tool access to a specific list of FHIR store IDs.
|
||||
# - "my_fhir_store_1"
|
||||
# allowedDicomStores: # Optional: Restricts tool access to a specific list of DICOM store IDs.
|
||||
# - "my_dicom_store_1"
|
||||
# - "my_dicom_store_2"
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------------|:--------:|:------------:|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "cloud-healthcare". |
|
||||
| type | string | true | Must be "cloud-healthcare". |
|
||||
| project | string | true | ID of the GCP project that the dataset lives in. |
|
||||
| region | string | true | Specifies the region (e.g., 'us', 'asia-northeast1') of the healthcare dataset. [Learn More](https://cloud.google.com/healthcare-api/docs/regions) |
|
||||
| dataset | string | true | ID of the healthcare dataset. |
|
||||
|
||||
@@ -25,18 +25,19 @@ Authentication can be handled in two ways:
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-cloud-monitoring:
|
||||
kind: cloud-monitoring
|
||||
|
||||
my-oauth-cloud-monitoring:
|
||||
kind: cloud-monitoring
|
||||
useClientOAuth: true
|
||||
kind: sources
|
||||
name: my-cloud-monitoring
|
||||
type: cloud-monitoring
|
||||
---
|
||||
kind: sources
|
||||
name: my-oauth-cloud-monitoring
|
||||
type: cloud-monitoring
|
||||
useClientOAuth: true
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------|:--------:|:------------:|------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "cloud-monitoring". |
|
||||
| type | string | true | Must be "cloud-monitoring". |
|
||||
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |
|
||||
|
||||
@@ -24,19 +24,20 @@ Authentication can be handled in two ways:
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-cloud-sql-admin:
|
||||
kind: cloud-sql-admin
|
||||
|
||||
my-oauth-cloud-sql-admin:
|
||||
kind: cloud-sql-admin
|
||||
useClientOAuth: true
|
||||
kind: sources
|
||||
name: my-cloud-sql-admin
|
||||
type: cloud-sql-admin
|
||||
---
|
||||
kind: sources
|
||||
name: my-oauth-cloud-sql-admin
|
||||
type: cloud-sql-admin
|
||||
useClientOAuth: true
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| -------------- | :------: | :----------: | ---------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "cloud-sql-admin". |
|
||||
| type | string | true | Must be "cloud-sql-admin". |
|
||||
| defaultProject | string | false | The Google Cloud project ID to use for Cloud SQL infrastructure tools. |
|
||||
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |
|
||||
|
||||
@@ -87,16 +87,16 @@ Currently, this source only uses standard authentication. You will need to
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-cloud-sql-mssql-instance:
|
||||
kind: cloud-sql-mssql
|
||||
project: my-project
|
||||
region: my-region
|
||||
instance: my-instance
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
# ipType: private
|
||||
kind: sources
|
||||
name: my-cloud-sql-mssql-instance
|
||||
type: cloud-sql-mssql
|
||||
project: my-project
|
||||
region: my-region
|
||||
instance: my-instance
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
# ipType: private
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -108,7 +108,7 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "cloud-sql-mssql". |
|
||||
| type | string | true | Must be "cloud-sql-mssql". |
|
||||
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
|
||||
| region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). |
|
||||
| instance | string | true | Name of the Cloud SQL instance within the cluster (e.g. "my-instance"). |
|
||||
|
||||
@@ -128,16 +128,16 @@ To connect using IAM authentication:
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-cloud-sql-mysql-source:
|
||||
kind: cloud-sql-mysql
|
||||
project: my-project-id
|
||||
region: us-central1
|
||||
instance: my-instance
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
# ipType: "private"
|
||||
kind: sources
|
||||
name: my-cloud-sql-mysql-source
|
||||
type: cloud-sql-mysql
|
||||
project: my-project-id
|
||||
region: us-central1
|
||||
instance: my-instance
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
# ipType: "private"
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -149,7 +149,7 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "cloud-sql-mysql". |
|
||||
| type | string | true | Must be "cloud-sql-mysql". |
|
||||
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
|
||||
| region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). |
|
||||
| instance | string | true | Name of the Cloud SQL instance within the cluster (e.g. "my-instance"). |
|
||||
|
||||
@@ -91,7 +91,10 @@ to a database by following these instructions][csql-pg-quickstart].
|
||||
instance.
|
||||
|
||||
- [`postgres-list-roles`](../tools/postgres/postgres-list-roles.md)
|
||||
Lists all the user-created roles in PostgreSQL database..
|
||||
Lists all the user-created roles in PostgreSQL database.
|
||||
|
||||
- [`postgres-list-stored-procedure`](../tools/postgres/postgres-list-stored-procedure.md)
|
||||
Lists all the stored procedure in PostgreSQL database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
@@ -175,16 +178,16 @@ To connect using IAM authentication:
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-cloud-sql-pg-source:
|
||||
kind: cloud-sql-postgres
|
||||
project: my-project-id
|
||||
region: us-central1
|
||||
instance: my-instance
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
# ipType: "private"
|
||||
kind: sources
|
||||
name: my-cloud-sql-pg-source
|
||||
type: cloud-sql-postgres
|
||||
project: my-project-id
|
||||
region: us-central1
|
||||
instance: my-instance
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
# ipType: "private"
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -196,7 +199,7 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "cloud-sql-postgres". |
|
||||
| type | string | true | Must be "cloud-sql-postgres". |
|
||||
| project | string | true | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
|
||||
| region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). |
|
||||
| instance | string | true | Name of the Cloud SQL instance within the cluster (e.g. "my-instance"). |
|
||||
|
||||
@@ -19,14 +19,14 @@ allowing tools to execute SQL queries against it.
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-couchbase-instance:
|
||||
kind: couchbase
|
||||
connectionString: couchbase://localhost
|
||||
bucket: travel-sample
|
||||
scope: inventory
|
||||
username: Administrator
|
||||
password: password
|
||||
kind: sources
|
||||
name: my-couchbase-instance
|
||||
type: couchbase
|
||||
connectionString: couchbase://localhost
|
||||
bucket: travel-sample
|
||||
scope: inventory
|
||||
username: Administrator
|
||||
password: password
|
||||
```
|
||||
|
||||
{{< notice note >}}
|
||||
@@ -38,7 +38,7 @@ Connections](https://docs.couchbase.com/java-sdk/current/howtos/managing-connect
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------------|:--------:|:------------:|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "couchbase". |
|
||||
| type | string | true | Must be "couchbase". |
|
||||
| connectionString | string | true | Connection string for the Couchbase cluster. |
|
||||
| bucket | string | true | Name of the bucket to connect to. |
|
||||
| scope | string | true | Name of the scope within the bucket. |
|
||||
|
||||
@@ -23,10 +23,10 @@ applying artificial intelligence and machine learning.
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-dataplex-source:
|
||||
kind: "dataplex"
|
||||
project: "my-project-id"
|
||||
kind: sources
|
||||
name: my-dataplex-source
|
||||
type: "dataplex"
|
||||
project: "my-project-id"
|
||||
```
|
||||
|
||||
## Sample System Prompt
|
||||
@@ -229,22 +229,38 @@ Finds resources that were created within, before, or after a given date or time.
|
||||
### Aspect Search
|
||||
To search for entries based on their attached aspects, use the following query syntax.
|
||||
|
||||
aspect:x Matches x as a substring of the full path to the aspect type of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID
|
||||
aspect=x Matches x as the full path to the aspect type of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID
|
||||
aspect:xOPERATORvalue
|
||||
Searches for aspect field values. Matches x as a substring of the full path to the aspect type and field name of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID.FIELD_NAME
|
||||
`has:x`
|
||||
Matches `x` as a substring of the full path to the aspect type of an aspect that is attached to the entry, in the format `projectid.location.ASPECT_TYPE_ID`
|
||||
|
||||
The list of supported {OPERATOR}s depends on the type of field in the aspect, as follows:
|
||||
- String: = (exact match) and : (substring)
|
||||
- All number types: =, :, <, >, <=, >=, =>, =<
|
||||
- Enum: =
|
||||
- Datetime: same as for numbers, but the values to compare are treated as datetimes instead of numbers
|
||||
- Boolean: =
|
||||
`has=x`
|
||||
Matches `x` as the full path to the aspect type of an aspect that is attached to the entry, in the format `projectid.location.ASPECT_TYPE_ID`
|
||||
|
||||
Only top-level fields of the aspect are searchable. For example, all of the following queries match entries where the value of the is-enrolled field in the employee-info aspect type is true. Other entries that match on the substring are also returned.
|
||||
- aspect:example-project.us-central1.employee-info.is-enrolled=true
|
||||
- aspect:example-project.us-central1.employee=true
|
||||
- aspect:employee=true
|
||||
`xOPERATORvalue`
|
||||
Searches for aspect field values. Matches x as a substring of the full path to the aspect type and field name of an aspect that is attached to the entry, in the format `projectid.location.ASPECT_TYPE_ID.FIELD_NAME`
|
||||
|
||||
The list of supported operators depends on the type of field in the aspect, as follows:
|
||||
* **String**: `=` (exact match)
|
||||
* **All number types**: `=`, `:`, `<`, `>`, `<=`, `>=`, `=>`, `=<`
|
||||
* **Enum**: `=` (exact match only)
|
||||
* **Datetime**: same as for numbers, but the values to compare are treated as datetimes instead of numbers
|
||||
* **Boolean**: `=`
|
||||
|
||||
Only top-level fields of the aspect are searchable.
|
||||
|
||||
* Syntax for system aspect types:
|
||||
* `ASPECT_TYPE_ID.FIELD_NAME`
|
||||
* `dataplex-types.ASPECT_TYPE_ID.FIELD_NAME`
|
||||
* `dataplex-types.LOCATION.ASPECT_TYPE_ID.FIELD_NAME`
|
||||
For example, the following queries match entries where the value of the `type` field in the `bigquery-dataset` aspect is `default`:
|
||||
* `bigquery-dataset.type=default`
|
||||
* `dataplex-types.bigquery-dataset.type=default`
|
||||
* `dataplex-types.global.bigquery-dataset.type=default`
|
||||
* Syntax for custom aspect types:
|
||||
* If the aspect is created in the global region: `PROJECT_ID.ASPECT_TYPE_ID.FIELD_NAME`
|
||||
* If the aspect is created in a specific region: `PROJECT_ID.REGION.ASPECT_TYPE_ID.FIELD_NAME`
|
||||
For example, the following queries match entries where the value of the `is-enrolled` field in the `employee-info` aspect is `true`.
|
||||
* `example-project.us-central1.employee-info.is-enrolled=true`
|
||||
* `example-project.employee-info.is-enrolled=true`
|
||||
|
||||
Example:-
|
||||
You can use following filters
|
||||
@@ -258,6 +274,25 @@ Logical AND and logical OR are supported. For example, foo OR bar.
|
||||
You can negate a predicate with a - (hyphen) or NOT prefix. For example, -name:foo returns resources with names that don't match the predicate foo.
|
||||
Logical operators are case-sensitive. `OR` and `AND` are acceptable whereas `or` and `and` are not.
|
||||
|
||||
### Abbreviated syntax
|
||||
|
||||
An abbreviated search syntax is also available, using `|` (vertical bar) for `OR` operators and `,` (comma) for `AND` operators.
|
||||
|
||||
For example, to search for entries inside one of many projects using the `OR` operator, you can use the following abbreviated syntax:
|
||||
|
||||
`projectid:(id1|id2|id3|id4)`
|
||||
|
||||
The same search without using abbreviated syntax looks like the following:
|
||||
|
||||
`projectid:id1 OR projectid:id2 OR projectid:id3 OR projectid:id4`
|
||||
|
||||
To search for entries with matching column names, use the following:
|
||||
|
||||
* **AND**: `column:(name1,name2,name3)`
|
||||
* **OR**: `column:(name1|name2|name3)`
|
||||
|
||||
This abbreviated syntax works for the qualified predicates except for `label` in keyword search.
|
||||
|
||||
### Request
|
||||
1. Always try to rewrite the prompt using search syntax.
|
||||
|
||||
@@ -320,5 +355,5 @@ Logical operators are case-sensitive. `OR` and `AND` are acceptable whereas `or`
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|----------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "dataplex". |
|
||||
| type | string | true | Must be "dataplex". |
|
||||
| project | string | true | ID of the GCP project used for quota and billing purposes (e.g. "my-project-id").|
|
||||
|
||||
@@ -40,14 +40,14 @@ and user credentials for that namespace.
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-dgraph-source:
|
||||
kind: dgraph
|
||||
dgraphUrl: https://xxxx.cloud.dgraph.io
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
apiKey: ${API_KEY}
|
||||
namespace : 0
|
||||
kind: sources
|
||||
name: my-dgraph-source
|
||||
type: dgraph
|
||||
dgraphUrl: https://xxxx.cloud.dgraph.io
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
apiKey: ${API_KEY}
|
||||
namespace : 0
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -59,7 +59,7 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
| **Field** | **Type** | **Required** | **Description** |
|
||||
|-------------|:--------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "dgraph". |
|
||||
| type | string | true | Must be "dgraph". |
|
||||
| dgraphUrl | string | true | Connection URI (e.g. "<https://xxx.cloud.dgraph.io>", "<https://localhost:8080>"). |
|
||||
| user | string | false | Name of the Dgraph user to connect as (e.g., "groot"). |
|
||||
| password | string | false | Password of the Dgraph user (e.g., "password"). |
|
||||
|
||||
@@ -59,18 +59,18 @@ applying permissions to an API key.
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-elasticsearch-source:
|
||||
kind: "elasticsearch"
|
||||
addresses:
|
||||
- "http://localhost:9200"
|
||||
apikey: "my-api-key"
|
||||
kind: sources
|
||||
name: my-elasticsearch-source
|
||||
type: "elasticsearch"
|
||||
addresses:
|
||||
- "http://localhost:9200"
|
||||
apikey: "my-api-key"
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|--------------------------------------------|
|
||||
| kind | string | true | Must be "elasticsearch". |
|
||||
| type | string | true | Must be "elasticsearch". |
|
||||
| addresses | []string | true | List of Elasticsearch hosts to connect to. |
|
||||
| apikey | string | true | The API key to use for authentication. |
|
||||
|
||||
@@ -36,14 +36,14 @@ user][fb-users] to login to the database with.
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my_firebird_db:
|
||||
kind: firebird
|
||||
host: "localhost"
|
||||
port: 3050
|
||||
database: "/path/to/your/database.fdb"
|
||||
user: ${FIREBIRD_USER}
|
||||
password: ${FIREBIRD_PASS}
|
||||
kind: sources
|
||||
name: my_firebird_db
|
||||
type: firebird
|
||||
host: "localhost"
|
||||
port: 3050
|
||||
database: "/path/to/your/database.fdb"
|
||||
user: ${FIREBIRD_USER}
|
||||
password: ${FIREBIRD_PASS}
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -55,7 +55,7 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "firebird". |
|
||||
| type | string | true | Must be "firebird". |
|
||||
| host | string | true | IP address to connect to (e.g. "127.0.0.1") |
|
||||
| port | string | true | Port to connect to (e.g. "3050") |
|
||||
| database | string | true | Path to the Firebird database file (e.g. "/var/lib/firebird/data/test.fdb"). |
|
||||
|
||||
@@ -61,17 +61,17 @@ database named `(default)` will be used.
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-firestore-source:
|
||||
kind: "firestore"
|
||||
project: "my-project-id"
|
||||
# database: "my-database" # Optional, defaults to "(default)"
|
||||
kind: sources
|
||||
name: my-firestore-source
|
||||
type: "firestore"
|
||||
project: "my-project-id"
|
||||
# database: "my-database" # Optional, defaults to "(default)"
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|----------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "firestore". |
|
||||
| type | string | true | Must be "firestore". |
|
||||
| project | string | true | Id of the GCP project that contains the Firestore database (e.g. "my-project-id"). |
|
||||
| database | string | false | Name of the Firestore database to connect to. Defaults to "(default)" if not specified. |
|
||||
|
||||
@@ -21,18 +21,18 @@ and other HTTP-accessible resources.
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-http-source:
|
||||
kind: http
|
||||
baseUrl: https://api.example.com/data
|
||||
timeout: 10s # default to 30s
|
||||
headers:
|
||||
Authorization: Bearer ${API_KEY}
|
||||
Content-Type: application/json
|
||||
queryParams:
|
||||
param1: value1
|
||||
param2: value2
|
||||
# disableSslVerification: false
|
||||
kind: sources
|
||||
name: my-http-source
|
||||
type: http
|
||||
baseUrl: https://api.example.com/data
|
||||
timeout: 10s # default to 30s
|
||||
headers:
|
||||
Authorization: Bearer ${API_KEY}
|
||||
Content-Type: application/json
|
||||
queryParams:
|
||||
param1: value1
|
||||
param2: value2
|
||||
# disableSslVerification: false
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -44,7 +44,7 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|------------------------|:-----------------:|:------------:|------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "http". |
|
||||
| type | string | true | Must be "http". |
|
||||
| baseUrl | string | true | The base URL for the HTTP requests (e.g., `https://api.example.com`). |
|
||||
| timeout | string | false | The timeout for HTTP requests (e.g., "5s", "1m", refer to [ParseDuration][parse-duration-doc] for more examples). Defaults to 30s. |
|
||||
| headers | map[string]string | false | Default headers to include in the HTTP requests. |
|
||||
|
||||
@@ -56,16 +56,16 @@ To initialize the application default credential run `gcloud auth login
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-looker-source:
|
||||
kind: looker
|
||||
base_url: http://looker.example.com
|
||||
client_id: ${LOOKER_CLIENT_ID}
|
||||
client_secret: ${LOOKER_CLIENT_SECRET}
|
||||
project: ${LOOKER_PROJECT}
|
||||
location: ${LOOKER_LOCATION}
|
||||
verify_ssl: true
|
||||
timeout: 600s
|
||||
kind: sources
|
||||
name: my-looker-source
|
||||
type: looker
|
||||
base_url: http://looker.example.com
|
||||
client_id: ${LOOKER_CLIENT_ID}
|
||||
client_secret: ${LOOKER_CLIENT_SECRET}
|
||||
project: ${LOOKER_PROJECT}
|
||||
location: ${LOOKER_LOCATION}
|
||||
verify_ssl: true
|
||||
timeout: 600s
|
||||
```
|
||||
|
||||
The Looker base url will look like "https://looker.example.com", don't include
|
||||
@@ -93,7 +93,7 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------------|:--------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "looker". |
|
||||
| type | string | true | Must be "looker". |
|
||||
| base_url | string | true | The URL of your Looker server with no trailing /. |
|
||||
| client_id | string | false | The client id assigned by Looker. |
|
||||
| client_secret | string | false | The client secret assigned by Looker. |
|
||||
|
||||
@@ -45,18 +45,18 @@ MariaDB user][mariadb-users] to log in to the database.
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my_mariadb_db:
|
||||
kind: mysql
|
||||
host: 127.0.0.1
|
||||
port: 3306
|
||||
database: my_db
|
||||
user: ${MARIADB_USER}
|
||||
password: ${MARIADB_PASS}
|
||||
# Optional TLS and other driver parameters. For example, enable preferred TLS:
|
||||
# queryParams:
|
||||
# tls: preferred
|
||||
queryTimeout: 30s # Optional: query timeout duration
|
||||
kind: sources
|
||||
name: my_mariadb_db
|
||||
type: mysql
|
||||
host: 127.0.0.1
|
||||
port: 3306
|
||||
database: my_db
|
||||
user: ${MARIADB_USER}
|
||||
password: ${MARIADB_PASS}
|
||||
# Optional TLS and other driver parameters. For example, enable preferred TLS:
|
||||
# queryParams:
|
||||
# tls: preferred
|
||||
queryTimeout: 30s # Optional: query timeout duration
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -68,7 +68,7 @@ Use environment variables instead of committing credentials to source files.
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ------------ | :------: | :----------: | ----------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be `mysql`. |
|
||||
| type | string | true | Must be `mysql`. |
|
||||
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
|
||||
| port | string | true | Port to connect to (e.g. "3307"). |
|
||||
| database | string | true | Name of the MariaDB database to connect to (e.g. "my_db"). |
|
||||
|
||||
@@ -125,15 +125,15 @@ can omit the password field.
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-mindsdb-source:
|
||||
kind: mindsdb
|
||||
host: 127.0.0.1
|
||||
port: 3306
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD} # Optional: omit if MindsDB is configured without authentication
|
||||
queryTimeout: 30s # Optional: query timeout duration
|
||||
kind: sources
|
||||
name: my-mindsdb-source
|
||||
type: mindsdb
|
||||
host: 127.0.0.1
|
||||
port: 3306
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD} # Optional: omit if MindsDB is configured without authentication
|
||||
queryTimeout: 30s # Optional: query timeout duration
|
||||
```
|
||||
|
||||
### Working Configuration Example
|
||||
@@ -141,13 +141,13 @@ sources:
|
||||
Here's a working configuration that has been tested:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-pg-source:
|
||||
kind: mindsdb
|
||||
host: 127.0.0.1
|
||||
port: 47335
|
||||
database: files
|
||||
user: mindsdb
|
||||
kind: sources
|
||||
name: my-pg-source
|
||||
type: mindsdb
|
||||
host: 127.0.0.1
|
||||
port: 47335
|
||||
database: files
|
||||
user: mindsdb
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -176,7 +176,7 @@ With MindsDB integration, you can:
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|--------------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "mindsdb". |
|
||||
| type | string | true | Must be "mindsdb". |
|
||||
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
|
||||
| port | string | true | Port to connect to (e.g. "3306"). |
|
||||
| database | string | true | Name of the MindsDB database to connect to (e.g. "my_db"). |
|
||||
|
||||
@@ -17,10 +17,10 @@ flexible, JSON-like documents, making it easy to develop and scale applications.
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-mongodb:
|
||||
kind: mongodb
|
||||
uri: "mongodb+srv://username:password@host.mongodb.net"
|
||||
kind: sources
|
||||
name: my-mongodb
|
||||
type: mongodb
|
||||
uri: "mongodb+srv://username:password@host.mongodb.net"
|
||||
|
||||
```
|
||||
|
||||
@@ -28,5 +28,5 @@ sources:
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|-------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "mongodb". |
|
||||
| type | string | true | Must be "mongodb". |
|
||||
| uri | string | true | connection string to connect to MongoDB |
|
||||
|
||||
@@ -39,15 +39,15 @@ SQL Server user][mssql-users] to login to the database with.
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-mssql-source:
|
||||
kind: mssql
|
||||
host: 127.0.0.1
|
||||
port: 1433
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
# encrypt: strict
|
||||
kind: sources
|
||||
name: my-mssql-source
|
||||
type: mssql
|
||||
host: 127.0.0.1
|
||||
port: 1433
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
# encrypt: strict
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -59,7 +59,7 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "mssql". |
|
||||
| type | string | true | Must be "mssql". |
|
||||
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
|
||||
| port | string | true | Port to connect to (e.g. "1433"). |
|
||||
| database | string | true | Name of the SQL Server database to connect to (e.g. "my_db"). |
|
||||
|
||||
@@ -49,18 +49,18 @@ MySQL user][mysql-users] to login to the database with.
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-mysql-source:
|
||||
kind: mysql
|
||||
host: 127.0.0.1
|
||||
port: 3306
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
# Optional TLS and other driver parameters. For example, enable preferred TLS:
|
||||
# queryParams:
|
||||
# tls: preferred
|
||||
queryTimeout: 30s # Optional: query timeout duration
|
||||
kind: sources
|
||||
name: my-mysql-source
|
||||
type: mysql
|
||||
host: 127.0.0.1
|
||||
port: 3306
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
# Optional TLS and other driver parameters. For example, enable preferred TLS:
|
||||
# queryParams:
|
||||
# tls: preferred
|
||||
queryTimeout: 30s # Optional: query timeout duration
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -72,7 +72,7 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ------------ | :------: | :----------: | ----------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "mysql". |
|
||||
| type | string | true | Must be "mysql". |
|
||||
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
|
||||
| port | string | true | Port to connect to (e.g. "3306"). |
|
||||
| database | string | true | Name of the MySQL database to connect to (e.g. "my_db"). |
|
||||
|
||||
@@ -33,13 +33,13 @@ user if available.
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-neo4j-source:
|
||||
kind: neo4j
|
||||
uri: neo4j+s://xxxx.databases.neo4j.io:7687
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
database: "neo4j"
|
||||
kind: sources
|
||||
name: my-neo4j-source
|
||||
type: neo4j
|
||||
uri: neo4j+s://xxxx.databases.neo4j.io:7687
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
database: "neo4j"
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -51,7 +51,7 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|----------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "neo4j". |
|
||||
| type | string | true | Must be "neo4j". |
|
||||
| uri | string | true | Connect URI ("bolt://localhost", "neo4j+s://xxx.databases.neo4j.io") |
|
||||
| user | string | true | Name of the Neo4j user to connect as (e.g. "neo4j"). |
|
||||
| password | string | true | Password of the Neo4j user (e.g. "my-password"). |
|
||||
|
||||
@@ -33,15 +33,15 @@ with SSL).
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-oceanbase-source:
|
||||
kind: oceanbase
|
||||
host: 127.0.0.1
|
||||
port: 2881
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
queryTimeout: 30s # Optional: query timeout duration
|
||||
kind: sources
|
||||
name: my-oceanbase-source
|
||||
type: oceanbase
|
||||
host: 127.0.0.1
|
||||
port: 2881
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
queryTimeout: 30s # Optional: query timeout duration
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -53,7 +53,7 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ------------ | :------: | :----------: |-------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "oceanbase". |
|
||||
| type | string | true | Must be "oceanbase". |
|
||||
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
|
||||
| port | string | true | Port to connect to (e.g. "2881"). |
|
||||
| database | string | true | Name of the OceanBase database to connect to (e.g. "my_db"). |
|
||||
|
||||
@@ -90,27 +90,27 @@ using a TNS (Transparent Network Substrate) alias.
|
||||
This example demonstrates the four connection methods you could choose from:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-oracle-source:
|
||||
kind: oracle
|
||||
|
||||
# --- Choose one connection method ---
|
||||
# 1. Host, Port, and Service Name
|
||||
host: 127.0.0.1
|
||||
port: 1521
|
||||
serviceName: XEPDB1
|
||||
kind: sources
|
||||
name: my-oracle-source
|
||||
type: oracle
|
||||
|
||||
# 2. Direct Connection String
|
||||
connectionString: "127.0.0.1:1521/XEPDB1"
|
||||
# --- Choose one connection method ---
|
||||
# 1. Host, Port, and Service Name
|
||||
host: 127.0.0.1
|
||||
port: 1521
|
||||
serviceName: XEPDB1
|
||||
|
||||
# 3. TNS Alias (requires tnsnames.ora)
|
||||
tnsAlias: "MY_DB_ALIAS"
|
||||
tnsAdmin: "/opt/oracle/network/admin" # Optional: overrides TNS_ADMIN env var
|
||||
# 2. Direct Connection String
|
||||
connectionString: "127.0.0.1:1521/XEPDB1"
|
||||
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
# 3. TNS Alias (requires tnsnames.ora)
|
||||
tnsAlias: "MY_DB_ALIAS"
|
||||
tnsAdmin: "/opt/oracle/network/admin" # Optional: overrides TNS_ADMIN env var
|
||||
|
||||
# Optional: Set to true to use the OCI-based driver for advanced features (Requires Oracle Instant Client)
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
|
||||
# Optional: Set to true to use the OCI-based driver for advanced features (Requires Oracle Instant Client)
|
||||
```
|
||||
|
||||
### Using an Oracle Wallet
|
||||
@@ -122,15 +122,15 @@ Oracle Wallet allows you to store credentails used for database connection. Depe
|
||||
The `go-ora` driver uses the `walletLocation` field to connect to a database secured with an Oracle Wallet without standard username and password.
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
pure-go-wallet:
|
||||
kind: oracle
|
||||
connectionString: "127.0.0.1:1521/XEPDB1"
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
# The TNS Alias is often required to connect to a service registered in tnsnames.ora
|
||||
tnsAlias: "SECURE_DB_ALIAS"
|
||||
walletLocation: "/path/to/my/wallet/directory"
|
||||
kind: sources
|
||||
name: pure-go-wallet
|
||||
type: oracle
|
||||
connectionString: "127.0.0.1:1521/XEPDB1"
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
# The TNS Alias is often required to connect to a service registered in tnsnames.ora
|
||||
tnsAlias: "SECURE_DB_ALIAS"
|
||||
walletLocation: "/path/to/my/wallet/directory"
|
||||
```
|
||||
|
||||
#### OCI-Based Driver (`useOCI: true`) - Oracle Wallet
|
||||
@@ -138,15 +138,15 @@ sources:
|
||||
For the OCI-based driver, wallet authentication is triggered by setting tnsAdmin to the wallet directory and connecting via a tnsAlias.
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
oci-wallet:
|
||||
kind: oracle
|
||||
connectionString: "127.0.0.1:1521/XEPDB1"
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
tnsAlias: "WALLET_DB_ALIAS"
|
||||
tnsAdmin: "/opt/oracle/wallet" # Directory containing tnsnames.ora, sqlnet.ora, and wallet files
|
||||
useOCI: true
|
||||
kind: sources
|
||||
name: oci-wallet
|
||||
type: oracle
|
||||
connectionString: "127.0.0.1:1521/XEPDB1"
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
tnsAlias: "WALLET_DB_ALIAS"
|
||||
tnsAdmin: "/opt/oracle/wallet" # Directory containing tnsnames.ora, sqlnet.ora, and wallet files
|
||||
useOCI: true
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -158,7 +158,7 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|------------------|:--------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "oracle". |
|
||||
| type | string | true | Must be "oracle". |
|
||||
| user | string | true | Name of the Oracle user to connect as (e.g. "my-oracle-user"). |
|
||||
| password | string | true | Password of the Oracle user (e.g. "my-password"). |
|
||||
| host | string | false | IP address or hostname to connect to (e.g. "127.0.0.1"). Required if not using `connectionString` or `tnsAlias`. |
|
||||
|
||||
@@ -85,7 +85,10 @@ reputation for reliability, feature robustness, and performance.
|
||||
server.
|
||||
|
||||
- [`postgres-list-roles`](../tools/postgres/postgres-list-roles.md)
|
||||
Lists all the user-created roles in PostgreSQL database..
|
||||
Lists all the user-created roles in PostgreSQL database.
|
||||
|
||||
- [`postgres-list-stored-procedure`](../tools/postgres/postgres-list-stored-procedure.md)
|
||||
Lists all the stored procedure in PostgreSQL database.
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
@@ -104,14 +107,14 @@ PostgreSQL user][pg-users] to login to the database with.
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-pg-source:
|
||||
kind: postgres
|
||||
host: 127.0.0.1
|
||||
port: 5432
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
kind: sources
|
||||
name: my-pg-source
|
||||
type: postgres
|
||||
host: 127.0.0.1
|
||||
port: 5432
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -123,7 +126,7 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------:|:------------:|------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "postgres". |
|
||||
| type | string | true | Must be "postgres". |
|
||||
| host | string | true | IP address to connect to (e.g. "127.0.0.1") |
|
||||
| port | string | true | Port to connect to (e.g. "5432") |
|
||||
| database | string | true | Name of the Postgres database to connect to (e.g. "my_db"). |
|
||||
|
||||
@@ -34,16 +34,16 @@ connections must authenticate in order to connect.
|
||||
Specify your AUTH string in the password field:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-redis-instance:
|
||||
kind: redis
|
||||
address:
|
||||
- 127.0.0.1:6379
|
||||
username: ${MY_USER_NAME}
|
||||
password: ${MY_AUTH_STRING} # Omit this field if you don't have a password.
|
||||
# database: 0
|
||||
# clusterEnabled: false
|
||||
# useGCPIAM: false
|
||||
kind: sources
|
||||
name: my-redis-instance
|
||||
type: redis
|
||||
address:
|
||||
- 127.0.0.1:6379
|
||||
username: ${MY_USER_NAME}
|
||||
password: ${MY_AUTH_STRING} # Omit this field if you don't have a password.
|
||||
# database: 0
|
||||
# clusterEnabled: false
|
||||
# useGCPIAM: false
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -59,14 +59,14 @@ string.
|
||||
Here is an example tools.yaml config with [AUTH][auth] enabled:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-redis-cluster-instance:
|
||||
kind: memorystore-redis
|
||||
address:
|
||||
- 127.0.0.1:6379
|
||||
password: ${MY_AUTH_STRING}
|
||||
# useGCPIAM: false
|
||||
# clusterEnabled: false
|
||||
kind: sources
|
||||
name: my-redis-cluster-instance
|
||||
type: memorystore-redis
|
||||
address:
|
||||
- 127.0.0.1:6379
|
||||
password: ${MY_AUTH_STRING}
|
||||
# useGCPIAM: false
|
||||
# clusterEnabled: false
|
||||
```
|
||||
|
||||
Memorystore Redis Cluster supports IAM authentication instead. Grant your
|
||||
@@ -76,13 +76,13 @@ Here is an example tools.yaml config for Memorystore Redis Cluster instances
|
||||
using IAM authentication:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-redis-cluster-instance:
|
||||
kind: memorystore-redis
|
||||
address:
|
||||
- 127.0.0.1:6379
|
||||
useGCPIAM: true
|
||||
clusterEnabled: true
|
||||
kind: sources
|
||||
name: my-redis-cluster-instance
|
||||
type: memorystore-redis
|
||||
address:
|
||||
- 127.0.0.1:6379
|
||||
useGCPIAM: true
|
||||
clusterEnabled: true
|
||||
```
|
||||
|
||||
[iam]: https://cloud.google.com/memorystore/docs/cluster/about-iam-auth
|
||||
@@ -91,7 +91,7 @@ sources:
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "memorystore-redis". |
|
||||
| type | string | true | Must be "memorystore-redis". |
|
||||
| address | string | true | Primary endpoint for the Memorystore Redis instance to connect to. |
|
||||
| username | string | false | If you are using a non-default user, specify the user name here. If you are using Memorystore for Redis, leave this field blank |
|
||||
| password | string | false | If you have [Redis AUTH][auth] enabled, specify the AUTH string here |
|
||||
|
||||
@@ -49,17 +49,17 @@ set up your ADC.
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-serverless-spark-source:
|
||||
kind: serverless-spark
|
||||
project: my-project-id
|
||||
location: us-central1
|
||||
kind: sources
|
||||
name: my-serverless-spark-source
|
||||
type: serverless-spark
|
||||
project: my-project-id
|
||||
location: us-central1
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| --------- | :------: | :----------: | ----------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "serverless-spark". |
|
||||
| type | string | true | Must be "serverless-spark". |
|
||||
| project | string | true | ID of the GCP project with Serverless for Apache Spark resources. |
|
||||
| location | string | true | Location containing Serverless for Apache Spark resources. |
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user