mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-01-11 16:38:15 -05:00
Compare commits
1 Commits
cloud-sql-
...
label
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
170b54b5b9 |
@@ -328,23 +328,23 @@ steps:
|
||||
mssql \
|
||||
mssql
|
||||
|
||||
# - id: "dgraph"
|
||||
# name: golang:1
|
||||
# waitFor: ["compile-test-binary"]
|
||||
# entrypoint: /bin/bash
|
||||
# env:
|
||||
# - "GOPATH=/gopath"
|
||||
# - "DGRAPH_URL=$_DGRAPHURL"
|
||||
# volumes:
|
||||
# - name: "go"
|
||||
# path: "/gopath"
|
||||
# args:
|
||||
# - -c
|
||||
# - |
|
||||
# .ci/test_with_coverage.sh \
|
||||
# "Dgraph" \
|
||||
# dgraph \
|
||||
# dgraph
|
||||
- id: "dgraph"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "DGRAPH_URL=$_DGRAPHURL"
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Dgraph" \
|
||||
dgraph \
|
||||
dgraph
|
||||
|
||||
- id: "http"
|
||||
name: golang:1
|
||||
@@ -531,24 +531,6 @@ steps:
|
||||
utility \
|
||||
utility/alloydbwaitforoperation
|
||||
|
||||
- id: "cloud-sql"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
secretEnv: ["CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Cloud SQL Wait for Operation" \
|
||||
cloudsql \
|
||||
cloudsql
|
||||
|
||||
- id: "tidb"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
@@ -638,25 +620,6 @@ steps:
|
||||
trino \
|
||||
trinosql trinoexecutesql
|
||||
|
||||
- id: "yugabytedb"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "YUGABYTEDB_DATABASE=$_YUGABYTEDB_DATABASE"
|
||||
- "YUGABYTEDB_PORT=$_YUGABYTEDB_PORT"
|
||||
- "YUGABYTEDB_LOADBALANCE=$_YUGABYTEDB_LOADBALANCE"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["YUGABYTEDB_USER", "YUGABYTEDB_PASS", "YUGABYTEDB_HOST", "CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
./yugabytedb.test -test.v
|
||||
|
||||
availableSecrets:
|
||||
secretManager:
|
||||
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_user/versions/latest
|
||||
@@ -735,13 +698,6 @@ availableSecrets:
|
||||
env: OCEANBASE_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/oceanbase_pass/versions/latest
|
||||
env: OCEANBASE_PASSWORD
|
||||
- versionName: projects/$PROJECT_ID/secrets/yugabytedb_host/versions/latest
|
||||
env: YUGABYTEDB_HOST
|
||||
- versionName: projects/$PROJECT_ID/secrets/yugabytedb_user/versions/latest
|
||||
env: YUGABYTEDB_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/yugabytedb_pass/versions/latest
|
||||
env: YUGABYTEDB_PASS
|
||||
|
||||
|
||||
options:
|
||||
logging: CLOUD_LOGGING_ONLY
|
||||
@@ -788,6 +744,3 @@ substitutions:
|
||||
_TRINO_SCHEMA: "default"
|
||||
_OCEANBASE_PORT: "2883"
|
||||
_OCEANBASE_DATABASE: "oceanbase"
|
||||
_YUGABYTEDB_DATABASE: "yugabyte"
|
||||
_YUGABYTEDB_PORT: "5433"
|
||||
_YUGABYTEDB_LOADBALANCE: "false"
|
||||
|
||||
7
.github/PULL_REQUEST_TEMPLATE.md
vendored
7
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,19 +1,16 @@
|
||||
## Description
|
||||
|
||||
---
|
||||
> Should include a concise description of the changes (bug or feature), it's
|
||||
> impact, along with a summary of the solution
|
||||
|
||||
## PR Checklist
|
||||
|
||||
---
|
||||
> Thank you for opening a Pull Request! Before submitting your PR, there are a
|
||||
> few things you can do to make sure it goes smoothly:
|
||||
|
||||
- [ ] Make sure you reviewed
|
||||
[CONTRIBUTING.md](https://github.com/googleapis/genai-toolbox/blob/main/CONTRIBUTING.md)
|
||||
- [ ] Make sure to open an issue as a
|
||||
[bug/issue](https://github.com/googleapis/genai-toolbox/issues/new/choose)
|
||||
[bug/issue](https://github.com/googleapis/langchain-google-alloydb-pg-python/issues/new/choose)
|
||||
before writing your code! That way we can discuss the change, evaluate
|
||||
designs, and agree on the general idea
|
||||
- [ ] Ensure the tests and linter pass
|
||||
@@ -21,4 +18,4 @@
|
||||
- [ ] Appropriate docs were updated (if necessary)
|
||||
- [ ] Make sure to add `!` if this involve a breaking change
|
||||
|
||||
🛠️ Fixes #<issue_number_goes_here>
|
||||
🛠️ Fixes #<issue_number_goes_here>
|
||||
17
.github/blunderbuss.yml
vendored
17
.github/blunderbuss.yml
vendored
@@ -1,24 +1,7 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
assign_issues:
|
||||
- Yuan325
|
||||
- duwenxin99
|
||||
- averikitsch
|
||||
- anubhav756
|
||||
- dishaprakash
|
||||
- twishabansal
|
||||
assign_issues_by:
|
||||
- labels:
|
||||
- 'product: bigquery'
|
||||
|
||||
18
.github/release-please.yml
vendored
18
.github/release-please.yml
vendored
@@ -20,20 +20,26 @@ extraFiles: [
|
||||
"README.md",
|
||||
"docs/en/getting-started/colab_quickstart.ipynb",
|
||||
"docs/en/getting-started/introduction/_index.md",
|
||||
"docs/en/getting-started/local_quickstart.md",
|
||||
"docs/en/getting-started/local_quickstart_js.md",
|
||||
"docs/en/getting-started/local_quickstart_go.md",
|
||||
"docs/en/getting-started/mcp_quickstart/_index.md",
|
||||
"docs/en/getting-started/quickstart/shared/configure_toolbox.md",
|
||||
"docs/en/samples/alloydb/_index.md",
|
||||
"docs/en/samples/alloydb/mcp_quickstart.md",
|
||||
"docs/en/samples/alloydb/ai-nl/alloydb_ai_nl.ipynb",
|
||||
"docs/en/samples/bigquery/local_quickstart.md",
|
||||
"docs/en/samples/bigquery/mcp_quickstart/_index.md",
|
||||
"docs/en/samples/bigquery/colab_quickstart_bigquery.ipynb",
|
||||
"docs/en/samples/looker/looker_gemini.md",
|
||||
"docs/en/samples/looker/looker_gemini_oauth/_index.md",
|
||||
"docs/en/samples/looker/looker_mcp_inspector/_index.md",
|
||||
"docs/en/samples/looker/looker_mcp_inspector.md",
|
||||
"docs/en/how-to/connect-ide/alloydb_pg_mcp.md",
|
||||
"docs/en/how-to/connect-ide/alloydb_pg_admin_mcp.md",
|
||||
"docs/en/how-to/connect-ide/bigquery_mcp.md",
|
||||
"docs/en/how-to/connect-ide/cloud_sql_pg_mcp.md",
|
||||
"docs/en/how-to/connect-ide/cloud_sql_mssql_mcp.md",
|
||||
"docs/en/how-to/connect-ide/cloud_sql_mysql_mcp.md",
|
||||
"docs/en/how-to/connect-ide/firestore_mcp.md",
|
||||
"docs/en/how-to/connect-ide/looker_mcp.md",
|
||||
"docs/en/how-to/connect-ide/mysql_mcp.md",
|
||||
"docs/en/how-to/connect-ide/mssql_mcp.md",
|
||||
"docs/en/how-to/connect-ide/postgres_mcp.md",
|
||||
"docs/en/how-to/connect-ide/neo4j_mcp.md",
|
||||
"docs/en/how-to/connect-ide/spanner_mcp.md",
|
||||
]
|
||||
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
runs-on: 'ubuntu-latest'
|
||||
|
||||
steps:
|
||||
- uses: 'actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b' # v7
|
||||
- uses: 'actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea' # v7
|
||||
with:
|
||||
script: |-
|
||||
// parse test names
|
||||
|
||||
2
.github/workflows/docs_preview_clean.yaml
vendored
2
.github/workflows/docs_preview_clean.yaml
vendored
@@ -48,7 +48,7 @@ jobs:
|
||||
git push
|
||||
|
||||
- name: Comment
|
||||
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
|
||||
21
.github/workflows/docs_preview_deploy.yaml
vendored
21
.github/workflows/docs_preview_deploy.yaml
vendored
@@ -49,6 +49,23 @@ jobs:
|
||||
group: "preview-${{ github.event.number }}"
|
||||
cancel-in-progress: true
|
||||
steps:
|
||||
- name: Remove PR label
|
||||
if: "${{ github.event.action == 'labeled' && github.event.label.name == 'docs: deploy-preview' }}"
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
try {
|
||||
await github.rest.issues.removeLabel({
|
||||
name: 'docs: deploy-preview',
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.payload.pull_request.number
|
||||
});
|
||||
} catch (e) {
|
||||
console.log('Failed to remove label. Another job may have already removed it!');
|
||||
}
|
||||
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
|
||||
with:
|
||||
# Checkout the PR's HEAD commit (supports forks).
|
||||
@@ -90,7 +107,7 @@ jobs:
|
||||
commit_message: "stage: PR-${{ github.event.number }}: ${{ github.event.head_commit.message }}"
|
||||
|
||||
- name: Comment
|
||||
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
@@ -98,4 +115,4 @@ jobs:
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: "🔎 Preview at https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/previews/PR-${{ github.event.number }}/"
|
||||
})
|
||||
})
|
||||
2
.github/workflows/lint.yaml
vendored
2
.github/workflows/lint.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
steps:
|
||||
- name: Remove PR Label
|
||||
if: "${{ github.event.action == 'labeled' && github.event.label.name == 'tests: run' }}"
|
||||
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7.1.0
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
|
||||
2
.github/workflows/tests.yaml
vendored
2
.github/workflows/tests.yaml
vendored
@@ -41,7 +41,7 @@ jobs:
|
||||
steps:
|
||||
- name: Remove PR label
|
||||
if: "${{ github.event.action == 'labeled' && github.event.label.name == 'tests: run' }}"
|
||||
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7.1.0
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
|
||||
28
CHANGELOG.md
28
CHANGELOG.md
@@ -1,33 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## [0.14.0](https://github.com/googleapis/genai-toolbox/compare/v0.13.0...v0.14.0) (2025-09-05)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **bigquery:** Move `useClientOAuth` config from tool to source ([#1279](https://github.com/googleapis/genai-toolbox/issues/1279)) ([8d20a48](https://github.com/googleapis/genai-toolbox/commit/8d20a48f13bcda853d41bdf80a162de12b076d1b))
|
||||
* **tools/bigquerysql:** remove `useClientOAuth` from tools config ([#1312](https://github.com/googleapis/genai-toolbox/issues/1312))
|
||||
|
||||
### Features
|
||||
|
||||
* **clickhouse:** Add ClickHouse Source and Tools ([#1088](https://github.com/googleapis/genai-toolbox/issues/1088)) ([75a04a5](https://github.com/googleapis/genai-toolbox/commit/75a04a55dd2259bed72fe95119a7a51a906c0b21))
|
||||
* **prebuilt/alloydb-postgres:** Support ipType and IAM users ([#1324](https://github.com/googleapis/genai-toolbox/issues/1324)) ([0b2121e](https://github.com/googleapis/genai-toolbox/commit/0b2121ea72eb81348dcd9c740a62ccd32e71fe37))
|
||||
* **server/mcp:** Support toolbox auth in mcp ([#1140](https://github.com/googleapis/genai-toolbox/issues/1140)) ([ca353e0](https://github.com/googleapis/genai-toolbox/commit/ca353e0b66fedc00e9110df57db18632aef49018))
|
||||
* **source/mysql:** Support `queryParams` in MySQL source ([#1299](https://github.com/googleapis/genai-toolbox/issues/1299)) ([3ae2526](https://github.com/googleapis/genai-toolbox/commit/3ae2526e0fe36b57b05a9b54f1d99f3fc68d9657))
|
||||
* **tools/bigquery:** Support end-user credential passthrough on multiple BQ tools ([#1314](https://github.com/googleapis/genai-toolbox/issues/1314)) ([88f4b30](https://github.com/googleapis/genai-toolbox/commit/88f4b3028df3b6a400936cdf8a035bf55021924c))
|
||||
* **tools/looker:** Add description for looker-get-models tool ([#1266](https://github.com/googleapis/genai-toolbox/issues/1266)) ([89af3a4](https://github.com/googleapis/genai-toolbox/commit/89af3a4ca332f029615b2a739d1f6cd50519638d))
|
||||
* **tools/looker:** Authenticate via end user credentials ([#1257](https://github.com/googleapis/genai-toolbox/issues/1257)) ([8755e3d](https://github.com/googleapis/genai-toolbox/commit/8755e3db3476abb35629b3cca9c78db7366757a4))
|
||||
* **tools/looker:** Report field suggestions to agent ([#1267](https://github.com/googleapis/genai-toolbox/issues/1267)) ([2cad82e](https://github.com/googleapis/genai-toolbox/commit/2cad82e5107566dd6c9b75e34e9976af63af0bb5))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Do not print usage on runtime error ([#1315](https://github.com/googleapis/genai-toolbox/issues/1315)) ([afba7a5](https://github.com/googleapis/genai-toolbox/commit/afba7a57cdd4fe7c1b0741dbf8f8c78b14a68089))
|
||||
* Update env var to allow empty string ([#1260](https://github.com/googleapis/genai-toolbox/issues/1260)) ([03aa9fa](https://github.com/googleapis/genai-toolbox/commit/03aa9fabacda06f860c9f178485126bddb7d5782))
|
||||
* **tools/firestore:** Add document/collection path validation ([#1229](https://github.com/googleapis/genai-toolbox/issues/1229)) ([14c2249](https://github.com/googleapis/genai-toolbox/commit/14c224939a2f9bb349fa00a7d5227877198530c2))
|
||||
* **tools/looker-get-dashboards:** Fix Looker client OAuth check ([#1338](https://github.com/googleapis/genai-toolbox/issues/1338)) ([36225aa](https://github.com/googleapis/genai-toolbox/commit/36225aa6db7f8426ad87930866530fde4e9bf0cd))
|
||||
* **tools/oceanbase:** Fix encoded text with mysql driver ([#1283](https://github.com/googleapis/genai-toolbox/issues/1283)) ([d16f89f](https://github.com/googleapis/genai-toolbox/commit/d16f89fbb6e49c03998f114ef7dc2b584b5e4967)), closes [#1161](https://github.com/googleapis/genai-toolbox/issues/1161)
|
||||
|
||||
## [0.13.0](https://github.com/googleapis/genai-toolbox/compare/v0.12.0...v0.13.0) (2025-08-27)
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
@@ -25,42 +25,33 @@ This project follows
|
||||
|
||||
## Contribution process
|
||||
|
||||
> [!NOTE]
|
||||
> New contributions should always include both unit and integration tests.
|
||||
|
||||
### Code reviews
|
||||
|
||||
All submissions, including submissions by project members, require review. We
|
||||
use GitHub pull requests for this purpose. Consult
|
||||
[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
|
||||
information on using pull requests.
|
||||
|
||||
### Code reviews
|
||||
|
||||
* Within 2-5 days, a reviewer will review your PR. They may approve it, or request
|
||||
changes.
|
||||
* When requesting changes, reviewers should self-assign the PR to ensure
|
||||
Within 2-5 days, a reviewer will review your PR. They may approve it, or request
|
||||
changes. When requesting changes, reviewers should self-assign the PR to ensure
|
||||
they are aware of any updates.
|
||||
* If additional changes are needed, push additional commits to your PR branch -
|
||||
this helps the reviewer know which parts of the PR have changed.
|
||||
* Commits will be
|
||||
If additional changes are needed, push additional commits to your PR branch -
|
||||
this helps the reviewer know which parts of the PR have changed. Commits will be
|
||||
squashed when merged.
|
||||
* Please follow up with changes promptly.
|
||||
* If a PR is awaiting changes by the
|
||||
Please follow up with changes promptly. If a PR is awaiting changes by the
|
||||
author for more than 10 days, maintainers may mark that PR as Draft. PRs that
|
||||
are inactive for more than 30 days may be closed.
|
||||
|
||||
## Adding a New Database Source or Tool
|
||||
### Adding a New Database Source and Tool
|
||||
|
||||
Please create an
|
||||
We recommend creating an
|
||||
[issue](https://github.com/googleapis/genai-toolbox/issues) before
|
||||
implementation to ensure we can accept the contribution and no duplicated work. This issue
|
||||
should include an overview of the API design. If you have any questions, reach out on our
|
||||
[Discord](https://discord.gg/Dmm69peqjh) to chat directly with the team.
|
||||
implementation to ensure we can accept the contribution and no duplicated work.
|
||||
If you have any questions, reach out on our
|
||||
[Discord](https://discord.gg/Dmm69peqjh) to chat directly with the team. New
|
||||
contributions should be added with both unit tests and integration tests.
|
||||
|
||||
> [!NOTE]
|
||||
> New tools can be added for [pre-existing data sources](https://github.com/googleapis/genai-toolbox/tree/main/internal/sources). However, any new database source should also include at least one new tool type.
|
||||
|
||||
### Adding a New Database Source
|
||||
#### 1. Implement the New Data Source
|
||||
|
||||
We recommend looking at an [example source
|
||||
implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/sources/postgres/postgres.go).
|
||||
@@ -87,7 +78,7 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s
|
||||
* **Implement `init()`** to register the new Source.
|
||||
* **Implement Unit Tests** in a file named `newdb_test.go`.
|
||||
|
||||
### Adding a New Tool
|
||||
#### 2. Implement the New Tool
|
||||
|
||||
We recommend looking at an [example tool
|
||||
implementation](https://github.com/googleapis/genai-toolbox/tree/main/internal/tools/postgres/postgressql).
|
||||
@@ -120,7 +111,7 @@ tools.
|
||||
* **Implement `init()`** to register the new Tool.
|
||||
* **Implement Unit Tests** in a file named `newdb_test.go`.
|
||||
|
||||
### Adding Integration Tests
|
||||
#### 3. Add Integration Tests
|
||||
|
||||
* **Add a test file** under a new directory `tests/newdb`.
|
||||
* **Add pre-defined integration test suites** in the
|
||||
@@ -162,7 +153,7 @@ tools.
|
||||
[temp-param-doc]:
|
||||
https://googleapis.github.io/genai-toolbox/resources/tools/#template-parameters
|
||||
|
||||
### Adding Documentation
|
||||
#### 4. Add Documentation
|
||||
|
||||
* **Update the documentation** to include information about your new data source
|
||||
and tool. This includes:
|
||||
@@ -172,7 +163,7 @@ tools.
|
||||
|
||||
* **(Optional) Add samples** to the `docs/en/samples/<newdb>` directory.
|
||||
|
||||
### (Optional) Adding Prebuilt Tools
|
||||
#### (Optional) 5. Add Prebuilt Tools
|
||||
|
||||
You can provide developers with a set of "build-time" tools to aid common
|
||||
software development user journeys like viewing and creating tables/collections
|
||||
@@ -186,7 +177,7 @@ and data.
|
||||
[internal/prebuiltconfigs/prebuiltconfigs_test.go](internal/prebuiltconfigs/prebuiltconfigs_test.go)
|
||||
and [cmd/root_test.go](cmd/root_test.go).
|
||||
|
||||
## Submitting a Pull Request
|
||||
#### 6. Submit a Pull Request
|
||||
|
||||
Submit a pull request to the repository with your changes. Be sure to include a
|
||||
detailed description of your changes and any requests for long term testing
|
||||
@@ -227,4 +218,4 @@ resources.
|
||||
* **PR Description:** PR description should **always** be included. It should
|
||||
include a concise description of the changes, it's impact, along with a
|
||||
summary of the solution. If the PR is related to a specific issue, the issue
|
||||
number should be mentioned in the PR description (e.g. `Fixes #1`).
|
||||
number should be mentioned in the PR description (e.g. `Fixes #1`).
|
||||
61
README.md
61
README.md
@@ -117,7 +117,7 @@ To install Toolbox as a binary:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.14.0
|
||||
export VERSION=0.13.0
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -130,7 +130,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.14.0
|
||||
export VERSION=0.13.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -154,7 +154,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.14.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.13.0
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -165,66 +165,22 @@ go install github.com/googleapis/genai-toolbox@v0.14.0
|
||||
[Configure](#configuration) a `tools.yaml` to define your tools, and then
|
||||
execute `toolbox` to start the server:
|
||||
|
||||
<details open>
|
||||
<summary>Binary</summary>
|
||||
|
||||
To run Toolbox from binary:
|
||||
|
||||
```sh
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
ⓘ **NOTE:**
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
> [!NOTE]
|
||||
> Toolbox enables dynamic reloading by default. To disable, use the
|
||||
> `--disable-reload` flag.
|
||||
|
||||
</details>
|
||||
#### Homebrew Users
|
||||
|
||||
<details>
|
||||
|
||||
<summary>Container image</summary>
|
||||
|
||||
To run the server after pulling the [container image](#installing-the-server):
|
||||
|
||||
```sh
|
||||
export VERSION=0.11.0 # Use the version you pulled
|
||||
docker run -p 5000:5000 \
|
||||
-v $(pwd)/tools.yaml:/app/tools.yaml \
|
||||
us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION \
|
||||
--tools-file "/app/tools.yaml"
|
||||
```
|
||||
|
||||
ⓘ **NOTE:**
|
||||
The `-v` flag mounts your local `tools.yaml` into the container, and `-p` maps the container's port `5000` to your host's port `5000`.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
|
||||
<summary>Source</summary>
|
||||
|
||||
To run the server directly from source, navigate to the project root directory and run:
|
||||
|
||||
```sh
|
||||
go run .
|
||||
```
|
||||
|
||||
ⓘ **NOTE:**
|
||||
This command runs the project from source, and is more suitable for development and testing. It does **not** compile a binary into your `$GOPATH`. If you want to compile a binary instead, refer the [Developer Documentation](./DEVELOPER.md#building-the-binary).
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
|
||||
<summary>Homebrew</summary>
|
||||
|
||||
If you installed Toolbox using [Homebrew](https://brew.sh/), the `toolbox` binary is available in your system path. You can start the server with the same command:
|
||||
If you installed Toolbox using Homebrew, the `toolbox` binary is available in your system path. You can start the server with the same command:
|
||||
|
||||
```sh
|
||||
toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
You can use `toolbox help` for a full list of flags! To stop the server, send a
|
||||
terminate signal (`ctrl+c` on most platforms).
|
||||
|
||||
@@ -232,7 +188,6 @@ For more detailed documentation on deploying to different environments, check
|
||||
out the resources in the [How-to
|
||||
section](https://googleapis.github.io/genai-toolbox/how-to/)
|
||||
|
||||
|
||||
### Integrating your application
|
||||
|
||||
Once your server is up and running, you can load the tools into your
|
||||
|
||||
@@ -54,7 +54,6 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigtable"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouseexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhousesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/couchbase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexlookupentry"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchaspecttypes"
|
||||
@@ -67,7 +66,6 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetdocuments"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetrules"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorelistcollections"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequery"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequerycollection"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreupdatedocument"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules"
|
||||
@@ -118,7 +116,6 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/alloydbwaitforoperation"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/wait"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/valkey"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/yugabytedbsql"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
@@ -148,7 +145,6 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/tidb"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/trino"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/valkey"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/yugabytedb"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -223,9 +219,6 @@ func NewCommand(opts ...Option) *Command {
|
||||
o(cmd)
|
||||
}
|
||||
|
||||
// Do not print Usage on runtime error
|
||||
cmd.SilenceUsage = true
|
||||
|
||||
// Set server version
|
||||
cmd.cfg.Version = versionString
|
||||
|
||||
|
||||
@@ -1244,7 +1244,6 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
postgresconfig, _ := prebuiltconfigs.Get("postgres")
|
||||
spanner_config, _ := prebuiltconfigs.Get("spanner")
|
||||
spannerpg_config, _ := prebuiltconfigs.Get("spanner-postgres")
|
||||
neo4jconfig, _ := prebuiltconfigs.Get("neo4j")
|
||||
|
||||
// Set environment variables
|
||||
t.Setenv("API_KEY", "your_api_key")
|
||||
@@ -1319,11 +1318,6 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
t.Setenv("LOOKER_CLIENT_SECRET", "your_looker_client_secret")
|
||||
t.Setenv("LOOKER_VERIFY_SSL", "true")
|
||||
|
||||
t.Setenv("NEO4J_URI", "bolt://localhost:7687")
|
||||
t.Setenv("NEO4J_DATABASE", "neo4j")
|
||||
t.Setenv("NEO4J_USERNAME", "your_neo4j_user")
|
||||
t.Setenv("NEO4J_PASSWORD", "your_neo4j_password")
|
||||
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
@@ -1483,16 +1477,6 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "neo4j prebuilt tools",
|
||||
in: neo4jconfig,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"neo4j-database-tools": tools.ToolsetConfig{
|
||||
Name: "neo4j-database-tools",
|
||||
ToolNames: []string{"execute_cypher", "get_schema"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
|
||||
@@ -1 +1 @@
|
||||
0.14.0
|
||||
0.13.0
|
||||
|
||||
@@ -234,7 +234,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.14.0\" # x-release-please-version\n",
|
||||
"version = \"0.13.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -86,7 +86,7 @@ To install Toolbox as a binary:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.14.0
|
||||
export VERSION=0.13.0
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -97,7 +97,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.14.0
|
||||
export VERSION=0.13.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -115,7 +115,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.14.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.13.0
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
@@ -49,28 +49,614 @@ from Toolbox.
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="LangChain Go" lang="go" >}}
|
||||
|
||||
{{< include "quickstart/go/langchain/quickstart.go" >}}
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"github.com/tmc/langchaingo/llms"
|
||||
"github.com/tmc/langchaingo/llms/googleai"
|
||||
)
|
||||
|
||||
// ConvertToLangchainTool converts a generic core.ToolboxTool into a LangChainGo llms.Tool.
|
||||
func ConvertToLangchainTool(toolboxTool *core.ToolboxTool) llms.Tool {
|
||||
|
||||
// Fetch the tool's input schema
|
||||
inputschema, err := toolboxTool.InputSchema()
|
||||
if err != nil {
|
||||
return llms.Tool{}
|
||||
}
|
||||
|
||||
var paramsSchema map[string]any
|
||||
_ = json.Unmarshal(inputschema, ¶msSchema)
|
||||
|
||||
// Convert into LangChain's llms.Tool
|
||||
return llms.Tool{
|
||||
Type: "function",
|
||||
Function: &llms.FunctionDefinition{
|
||||
Name: toolboxTool.Name(),
|
||||
Description: toolboxTool.Description(),
|
||||
Parameters: paramsSchema,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it.",
|
||||
"Please book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
genaiKey := os.Getenv("GOOGLE_API_KEY")
|
||||
toolboxURL := "http://localhost:5000"
|
||||
ctx := context.Background()
|
||||
|
||||
// Initialize the Google AI client (LLM).
|
||||
llm, err := googleai.New(ctx, googleai.WithAPIKey(genaiKey), googleai.WithDefaultModel("gemini-1.5-flash"))
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Google AI client: %v", err)
|
||||
}
|
||||
|
||||
// Initialize the MCP Toolbox client.
|
||||
toolboxClient, err := core.NewToolboxClient(toolboxURL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tool using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
|
||||
|
||||
langchainTools := make([]llms.Tool, len(tools))
|
||||
// Convert the loaded ToolboxTools into the format LangChainGo requires.
|
||||
for i, tool := range tools {
|
||||
langchainTools[i] = ConvertToLangchainTool(tool)
|
||||
toolsMap[tool.Name()] = tool
|
||||
}
|
||||
|
||||
// Start the conversation history.
|
||||
messageHistory := []llms.MessageContent{
|
||||
llms.TextParts(llms.ChatMessageTypeSystem, systemPrompt),
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
messageHistory = append(messageHistory, llms.TextParts(llms.ChatMessageTypeHuman, query))
|
||||
|
||||
// Make the first call to the LLM, making it aware of the tool.
|
||||
resp, err := llm.GenerateContent(ctx, messageHistory, llms.WithTools(langchainTools))
|
||||
if err != nil {
|
||||
log.Fatalf("LLM call failed: %v", err)
|
||||
}
|
||||
respChoice := resp.Choices[0]
|
||||
|
||||
assistantResponse := llms.TextParts(llms.ChatMessageTypeAI, respChoice.Content)
|
||||
for _, tc := range respChoice.ToolCalls {
|
||||
assistantResponse.Parts = append(assistantResponse.Parts, tc)
|
||||
}
|
||||
messageHistory = append(messageHistory, assistantResponse)
|
||||
|
||||
// Process each tool call requested by the model.
|
||||
for _, tc := range respChoice.ToolCalls {
|
||||
toolName := tc.FunctionCall.Name
|
||||
tool := toolsMap[toolName]
|
||||
var args map[string]any
|
||||
if err := json.Unmarshal([]byte(tc.FunctionCall.Arguments), &args); err != nil {
|
||||
log.Fatalf("Failed to unmarshal arguments for tool '%s': %v", toolName, err)
|
||||
}
|
||||
toolResult, err := tool.Invoke(ctx, args)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to execute tool '%s': %v", toolName, err)
|
||||
}
|
||||
if toolResult == "" || toolResult == nil {
|
||||
toolResult = "Operation completed successfully with no specific return value."
|
||||
}
|
||||
|
||||
// Create the tool call response message and add it to the history.
|
||||
toolResponse := llms.MessageContent{
|
||||
Role: llms.ChatMessageTypeTool,
|
||||
Parts: []llms.ContentPart{
|
||||
llms.ToolCallResponse{
|
||||
Name: toolName,
|
||||
Content: fmt.Sprintf("%v", toolResult),
|
||||
},
|
||||
},
|
||||
}
|
||||
messageHistory = append(messageHistory, toolResponse)
|
||||
}
|
||||
finalResp, err := llm.GenerateContent(ctx, messageHistory)
|
||||
if err != nil {
|
||||
log.Fatalf("Final LLM call failed after tool execution: %v", err)
|
||||
}
|
||||
|
||||
// Add the final textual response from the LLM to the history
|
||||
messageHistory = append(messageHistory, llms.TextParts(llms.ChatMessageTypeAI, finalResp.Choices[0].Content))
|
||||
|
||||
fmt.Println(finalResp.Choices[0].Content)
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="Genkit Go" lang="go" >}}
|
||||
|
||||
{{< include "quickstart/go/genkit/quickstart.go" >}}
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/tbgenkit"
|
||||
|
||||
"github.com/firebase/genkit/go/ai"
|
||||
"github.com/firebase/genkit/go/genkit"
|
||||
"github.com/firebase/genkit/go/plugins/googlegenai"
|
||||
)
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
ctx := context.Background()
|
||||
|
||||
// Create Toolbox Client
|
||||
toolboxClient, err := core.NewToolboxClient("http://127.0.0.1:5000")
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tools using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
// Initialize Genkit
|
||||
g, err := genkit.Init(ctx,
|
||||
genkit.WithPlugins(&googlegenai.GoogleAI{}),
|
||||
genkit.WithDefaultModel("googleai/gemini-1.5-flash"),
|
||||
)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to init genkit: %v\n", err)
|
||||
}
|
||||
|
||||
// Create a conversation history
|
||||
conversationHistory := []*ai.Message{
|
||||
ai.NewSystemTextMessage(systemPrompt),
|
||||
}
|
||||
|
||||
// Convert your tool to a Genkit tool.
|
||||
genkitTools := make([]ai.Tool, len(tools))
|
||||
for i, tool := range tools {
|
||||
newTool, err := tbgenkit.ToGenkitTool(tool, g)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to convert tool: %v\n", err)
|
||||
}
|
||||
genkitTools[i] = newTool
|
||||
}
|
||||
|
||||
toolRefs := make([]ai.ToolRef, len(genkitTools))
|
||||
|
||||
for i, tool := range genkitTools {
|
||||
toolRefs[i] = tool
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
conversationHistory = append(conversationHistory, ai.NewUserTextMessage(query))
|
||||
response, err := genkit.Generate(ctx, g,
|
||||
ai.WithMessages(conversationHistory...),
|
||||
ai.WithTools(toolRefs...),
|
||||
ai.WithReturnToolRequests(true),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("%v\n", err)
|
||||
}
|
||||
conversationHistory = append(conversationHistory, response.Message)
|
||||
|
||||
parts := []*ai.Part{}
|
||||
|
||||
for _, req := range response.ToolRequests() {
|
||||
tool := genkit.LookupTool(g, req.Name)
|
||||
if tool == nil {
|
||||
log.Fatalf("tool %q not found", req.Name)
|
||||
}
|
||||
|
||||
output, err := tool.RunRaw(ctx, req.Input)
|
||||
if err != nil {
|
||||
log.Fatalf("tool %q execution failed: %v", tool.Name(), err)
|
||||
}
|
||||
|
||||
parts = append(parts,
|
||||
ai.NewToolResponsePart(&ai.ToolResponse{
|
||||
Name: req.Name,
|
||||
Ref: req.Ref,
|
||||
Output: output,
|
||||
}))
|
||||
|
||||
}
|
||||
|
||||
if len(parts) > 0 {
|
||||
resp, err := genkit.Generate(ctx, g,
|
||||
ai.WithMessages(append(response.History(), ai.NewMessage(ai.RoleTool, nil, parts...))...),
|
||||
ai.WithTools(toolRefs...),
|
||||
)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Println("\n", resp.Text())
|
||||
conversationHistory = append(conversationHistory, resp.Message)
|
||||
} else {
|
||||
fmt.Println("\n", response.Text())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="Go GenAI" lang="go" >}}
|
||||
|
||||
{{< include "quickstart/go/genAI/quickstart.go" >}}
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"google.golang.org/genai"
|
||||
)
|
||||
|
||||
// ConvertToGenaiTool translates a ToolboxTool into the genai.FunctionDeclaration format.
|
||||
func ConvertToGenaiTool(toolboxTool *core.ToolboxTool) *genai.Tool {
|
||||
|
||||
inputschema, err := toolboxTool.InputSchema()
|
||||
if err != nil {
|
||||
return &genai.Tool{}
|
||||
}
|
||||
|
||||
var paramsSchema *genai.Schema
|
||||
_ = json.Unmarshal(inputschema, ¶msSchema)
|
||||
// First, create the function declaration.
|
||||
funcDeclaration := &genai.FunctionDeclaration{
|
||||
Name: toolboxTool.Name(),
|
||||
Description: toolboxTool.Description(),
|
||||
Parameters: paramsSchema,
|
||||
}
|
||||
|
||||
// Then, wrap the function declaration in a genai.Tool struct.
|
||||
return &genai.Tool{
|
||||
FunctionDeclarations: []*genai.FunctionDeclaration{funcDeclaration},
|
||||
}
|
||||
}
|
||||
|
||||
func printResponse(resp *genai.GenerateContentResponse) {
|
||||
for _, cand := range resp.Candidates {
|
||||
if cand.Content != nil {
|
||||
for _, part := range cand.Content.Parts {
|
||||
fmt.Println(part.Text)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it.",
|
||||
"Please book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Setup
|
||||
ctx := context.Background()
|
||||
apiKey := os.Getenv("GOOGLE_API_KEY")
|
||||
toolboxURL := "http://localhost:5000"
|
||||
|
||||
// Initialize the Google GenAI client using the explicit ClientConfig.
|
||||
client, err := genai.NewClient(ctx, &genai.ClientConfig{
|
||||
APIKey: apiKey,
|
||||
})
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Google GenAI client: %v", err)
|
||||
}
|
||||
|
||||
// Initialize the MCP Toolbox client.
|
||||
toolboxClient, err := core.NewToolboxClient(toolboxURL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tool using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
genAITools := make([]*genai.Tool, len(tools))
|
||||
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
|
||||
|
||||
for i, tool := range tools {
|
||||
genAITools[i] = ConvertToGenaiTool(tool)
|
||||
toolsMap[tool.Name()] = tool
|
||||
}
|
||||
|
||||
// Set up the generative model with the available tool.
|
||||
modelName := "gemini-2.0-flash"
|
||||
|
||||
// Create the initial content prompt for the model.
|
||||
messageHistory := []*genai.Content{
|
||||
genai.NewContentFromText(systemPrompt, genai.RoleUser),
|
||||
}
|
||||
config := &genai.GenerateContentConfig{
|
||||
Tools: genAITools,
|
||||
ToolConfig: &genai.ToolConfig{
|
||||
FunctionCallingConfig: &genai.FunctionCallingConfig{
|
||||
Mode: genai.FunctionCallingConfigModeAny,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
|
||||
messageHistory = append(messageHistory, genai.NewContentFromText(query, genai.RoleUser))
|
||||
|
||||
genContentResp, err := client.Models.GenerateContent(ctx, modelName, messageHistory, config)
|
||||
if err != nil {
|
||||
log.Fatalf("LLM call failed for query '%s': %v", query, err)
|
||||
}
|
||||
|
||||
if len(genContentResp.Candidates) > 0 && genContentResp.Candidates[0].Content != nil {
|
||||
messageHistory = append(messageHistory, genContentResp.Candidates[0].Content)
|
||||
}
|
||||
|
||||
functionCalls := genContentResp.FunctionCalls()
|
||||
|
||||
toolResponseParts := []*genai.Part{}
|
||||
|
||||
for _, fc := range functionCalls {
|
||||
|
||||
toolToInvoke, found := toolsMap[fc.Name]
|
||||
if !found {
|
||||
log.Fatalf("Tool '%s' not found in loaded tools map. Check toolset configuration.", fc.Name)
|
||||
}
|
||||
|
||||
toolResult, invokeErr := toolToInvoke.Invoke(ctx, fc.Args)
|
||||
if invokeErr != nil {
|
||||
log.Fatalf("Failed to execute tool '%s': %v", fc.Name, invokeErr)
|
||||
}
|
||||
|
||||
// Enhanced Tool Result Handling (retained to prevent nil issues)
|
||||
toolResultString := ""
|
||||
if toolResult != nil {
|
||||
jsonBytes, marshalErr := json.Marshal(toolResult)
|
||||
if marshalErr == nil {
|
||||
toolResultString = string(jsonBytes)
|
||||
} else {
|
||||
toolResultString = fmt.Sprintf("%v", toolResult)
|
||||
}
|
||||
}
|
||||
|
||||
responseMap := map[string]any{"result": toolResultString}
|
||||
|
||||
toolResponseParts = append(toolResponseParts, genai.NewPartFromFunctionResponse(fc.Name, responseMap))
|
||||
}
|
||||
// Add all accumulated tool responses for this turn to the message history.
|
||||
toolResponseContent := genai.NewContentFromParts(toolResponseParts, "function")
|
||||
messageHistory = append(messageHistory, toolResponseContent)
|
||||
|
||||
finalResponse, err := client.Models.GenerateContent(ctx, modelName, messageHistory, &genai.GenerateContentConfig{})
|
||||
if err != nil {
|
||||
log.Fatalf("Error calling GenerateContent (with function result): %v", err)
|
||||
}
|
||||
|
||||
printResponse(finalResponse)
|
||||
// Add the final textual response from the LLM to the history
|
||||
if len(finalResponse.Candidates) > 0 && finalResponse.Candidates[0].Content != nil {
|
||||
messageHistory = append(messageHistory, finalResponse.Candidates[0].Content)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="OpenAI Go" lang="go" >}}
|
||||
|
||||
{{< include "quickstart/go/openAI/quickstart.go" >}}
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"log"
|
||||
"fmt
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
openai "github.com/openai/openai-go"
|
||||
)
|
||||
|
||||
// ConvertToOpenAITool converts a ToolboxTool into the go-openai library's Tool format.
|
||||
func ConvertToOpenAITool(toolboxTool *core.ToolboxTool) openai.ChatCompletionToolParam {
|
||||
// Get the input schema
|
||||
jsonSchemaBytes, err := toolboxTool.InputSchema()
|
||||
if err != nil {
|
||||
return openai.ChatCompletionToolParam{}
|
||||
}
|
||||
|
||||
// Unmarshal the JSON bytes into FunctionParameters
|
||||
var paramsSchema openai.FunctionParameters
|
||||
if err := json.Unmarshal(jsonSchemaBytes, ¶msSchema); err != nil {
|
||||
return openai.ChatCompletionToolParam{}
|
||||
}
|
||||
|
||||
// Create and return the final tool parameter struct.
|
||||
return openai.ChatCompletionToolParam{
|
||||
Function: openai.FunctionDefinitionParam{
|
||||
Name: toolboxTool.Name(),
|
||||
Description: openai.String(toolboxTool.Description()),
|
||||
Parameters: paramsSchema,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Setup
|
||||
ctx := context.Background()
|
||||
toolboxURL := "http://localhost:5000"
|
||||
openAIClient := openai.NewClient()
|
||||
|
||||
// Initialize the MCP Toolbox client.
|
||||
toolboxClient, err := core.NewToolboxClient(toolboxURL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tools using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tool : %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
openAITools := make([]openai.ChatCompletionToolParam, len(tools))
|
||||
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
|
||||
|
||||
for i, tool := range tools {
|
||||
// Convert the Toolbox tool into the openAI FunctionDeclaration format.
|
||||
openAITools[i] = ConvertToOpenAITool(tool)
|
||||
// Add tool to a map for lookup later
|
||||
toolsMap[tool.Name()] = tool
|
||||
|
||||
}
|
||||
|
||||
params := openai.ChatCompletionNewParams{
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{
|
||||
openai.SystemMessage(systemPrompt),
|
||||
},
|
||||
Tools: openAITools,
|
||||
Seed: openai.Int(0),
|
||||
Model: openai.ChatModelGPT4o,
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
|
||||
params.Messages = append(params.Messages, openai.UserMessage(query))
|
||||
|
||||
// Make initial chat completion request
|
||||
completion, err := openAIClient.Chat.Completions.New(ctx, params)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
toolCalls := completion.Choices[0].Message.ToolCalls
|
||||
|
||||
// Return early if there are no tool calls
|
||||
if len(toolCalls) == 0 {
|
||||
log.Println("No function call")
|
||||
}
|
||||
|
||||
// If there was a function call, continue the conversation
|
||||
params.Messages = append(params.Messages, completion.Choices[0].Message.ToParam())
|
||||
for _, toolCall := range toolCalls {
|
||||
|
||||
toolName := toolCall.Function.Name
|
||||
toolToInvoke := toolsMap[toolName]
|
||||
|
||||
var args map[string]any
|
||||
err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
result, err := toolToInvoke.Invoke(ctx, args)
|
||||
if err != nil {
|
||||
log.Fatal("Could not invoke tool", err)
|
||||
}
|
||||
|
||||
params.Messages = append(params.Messages, openai.ToolMessage(result.(string), toolCall.ID))
|
||||
}
|
||||
|
||||
completion, err = openAIClient.Chat.Completions.New(ctx, params)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
params.Messages = append(params.Messages, openai.AssistantMessage(query))
|
||||
|
||||
fmt.println("\n", completion.Choices[0].Message.Content)
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
@@ -64,26 +64,380 @@ npm install @google/genai
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="LangChain" lang="js" >}}
|
||||
|
||||
{{< include "quickstart/js/langchain/quickstart.js" >}}
|
||||
import { ChatGoogleGenerativeAI } from "@langchain/google-genai";
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
import { tool } from "@langchain/core/tools";
|
||||
import { createReactAgent } from "@langchain/langgraph/prebuilt";
|
||||
import { MemorySaver } from "@langchain/langgraph";
|
||||
|
||||
// Replace it with your API key
|
||||
process.env.GOOGLE_API_KEY = 'your-api-key';
|
||||
|
||||
const prompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
async function main() {
|
||||
const model = new ChatGoogleGenerativeAI({
|
||||
model: "gemini-2.0-flash",
|
||||
});
|
||||
|
||||
const client = new ToolboxClient("http://127.0.0.1:5000");
|
||||
const toolboxTools = await client.loadToolset("my-toolset");
|
||||
|
||||
// Define the basics of the tool: name, description, schema and core logic
|
||||
const getTool = (toolboxTool) => tool(toolboxTool, {
|
||||
name: toolboxTool.getName(),
|
||||
description: toolboxTool.getDescription(),
|
||||
schema: toolboxTool.getParamSchema()
|
||||
});
|
||||
const tools = toolboxTools.map(getTool);
|
||||
|
||||
const agent = createReactAgent({
|
||||
llm: model,
|
||||
tools: tools,
|
||||
checkpointer: new MemorySaver(),
|
||||
systemPrompt: prompt,
|
||||
});
|
||||
|
||||
const langGraphConfig = {
|
||||
configurable: {
|
||||
thread_id: "test-thread",
|
||||
},
|
||||
};
|
||||
|
||||
for (const query of queries) {
|
||||
const agentOutput = await agent.invoke(
|
||||
{
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: query,
|
||||
},
|
||||
],
|
||||
verbose: true,
|
||||
},
|
||||
langGraphConfig
|
||||
);
|
||||
const response = agentOutput.messages[agentOutput.messages.length - 1].content;
|
||||
console.log(response);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="GenkitJS" lang="js" >}}
|
||||
|
||||
{{< include "quickstart/js/genkit/quickstart.js" >}}
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
import { genkit } from "genkit";
|
||||
import { googleAI } from '@genkit-ai/googleai';
|
||||
|
||||
// Replace it with your API key
|
||||
process.env.GOOGLE_API_KEY = 'your-api-key';
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
async function main() {
|
||||
const toolboxClient = new ToolboxClient("http://127.0.0.1:5000");
|
||||
|
||||
const ai = genkit({
|
||||
plugins: [
|
||||
googleAI({
|
||||
apiKey: process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY
|
||||
})
|
||||
],
|
||||
model: googleAI.model('gemini-2.0-flash'),
|
||||
});
|
||||
|
||||
const toolboxTools = await toolboxClient.loadToolset("my-toolset");
|
||||
const toolMap = Object.fromEntries(
|
||||
toolboxTools.map((tool) => {
|
||||
const definedTool = ai.defineTool(
|
||||
{
|
||||
name: tool.getName(),
|
||||
description: tool.getDescription(),
|
||||
inputSchema: tool.getParamSchema(),
|
||||
},
|
||||
tool
|
||||
);
|
||||
return [tool.getName(), definedTool];
|
||||
})
|
||||
);
|
||||
const tools = Object.values(toolMap);
|
||||
|
||||
let conversationHistory = [{ role: "system", content: [{ text: systemPrompt }] }];
|
||||
|
||||
for (const query of queries) {
|
||||
conversationHistory.push({ role: "user", content: [{ text: query }] });
|
||||
const response = await ai.generate({
|
||||
messages: conversationHistory,
|
||||
tools: tools,
|
||||
});
|
||||
conversationHistory.push(response.message);
|
||||
|
||||
const toolRequests = response.toolRequests;
|
||||
if (toolRequests?.length > 0) {
|
||||
// Execute tools concurrently and collect their responses.
|
||||
const toolResponses = await Promise.all(
|
||||
toolRequests.map(async (call) => {
|
||||
try {
|
||||
const toolOutput = await toolMap[call.name].invoke(call.input);
|
||||
return { role: "tool", content: [{ toolResponse: { name: call.name, output: toolOutput } }] };
|
||||
} catch (e) {
|
||||
console.error(`Error executing tool ${call.name}:`, e);
|
||||
return { role: "tool", content: [{ toolResponse: { name: call.name, output: { error: e.message } } }] };
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
conversationHistory.push(...toolResponses);
|
||||
|
||||
// Call the AI again with the tool results.
|
||||
response = await ai.generate({ messages: conversationHistory, tools });
|
||||
conversationHistory.push(response.message);
|
||||
}
|
||||
|
||||
console.log(response.text);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="LlamaIndex" lang="js" >}}
|
||||
|
||||
{{< include "quickstart/js/llamaindex/quickstart.js" >}}
|
||||
import { gemini, GEMINI_MODEL } from "@llamaindex/google";
|
||||
import { agent } from "@llamaindex/workflow";
|
||||
import { createMemory, staticBlock, tool } from "llamaindex";
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
|
||||
const TOOLBOX_URL = "http://127.0.0.1:5000"; // Update if needed
|
||||
process.env.GOOGLE_API_KEY = 'your-api-key'; // Replace it with your API key
|
||||
|
||||
const prompt = `
|
||||
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and cancellations.
|
||||
When the user searches for a hotel, mention its name, id, location and price tier.
|
||||
Always mention hotel ids while performing any searches — this is very important for operations.
|
||||
For any bookings or cancellations, please provide the appropriate confirmation.
|
||||
Update check-in or check-out dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
async function main() {
|
||||
// Connect to MCP Toolbox
|
||||
const client = new ToolboxClient(TOOLBOX_URL);
|
||||
const toolboxTools = await client.loadToolset("my-toolset");
|
||||
const tools = toolboxTools.map((toolboxTool) => {
|
||||
return tool({
|
||||
name: toolboxTool.getName(),
|
||||
description: toolboxTool.getDescription(),
|
||||
parameters: toolboxTool.getParamSchema(),
|
||||
execute: toolboxTool,
|
||||
});
|
||||
});
|
||||
|
||||
// Initialize LLM
|
||||
const llm = gemini({
|
||||
model: GEMINI_MODEL.GEMINI_2_0_FLASH,
|
||||
apiKey: process.env.GOOGLE_API_KEY,
|
||||
});
|
||||
|
||||
const memory = createMemory({
|
||||
memoryBlocks: [
|
||||
staticBlock({
|
||||
content: prompt,
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
// Create the Agent
|
||||
const myAgent = agent({
|
||||
tools: tools,
|
||||
llm,
|
||||
memory,
|
||||
systemPrompt: prompt,
|
||||
});
|
||||
|
||||
for (const query of queries) {
|
||||
const result = await myAgent.run(query);
|
||||
const output = result.data.result;
|
||||
|
||||
console.log(`\nUser: ${query}`);
|
||||
if (typeof output === "string") {
|
||||
console.log(output.trim());
|
||||
} else if (typeof output === "object" && "text" in output) {
|
||||
console.log(output.text.trim());
|
||||
} else {
|
||||
console.log(JSON.stringify(output));
|
||||
}
|
||||
}
|
||||
//You may observe some extra logs during execution due to the run method provided by Llama.
|
||||
console.log("Agent run finished.");
|
||||
}
|
||||
|
||||
main();
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="GoogleGenAI" lang="js" >}}
|
||||
import { GoogleGenAI } from "@google/genai";
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
|
||||
{{< include "quickstart/js/genAI/quickstart.js" >}}
|
||||
|
||||
const TOOLBOX_URL = "http://127.0.0.1:5000"; // Update if needed
|
||||
const GOOGLE_API_KEY = 'enter your api here'; // Replace it with your API key
|
||||
|
||||
const prompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, you MUST use the available tools to find information. Mention its name, id,
|
||||
location and price tier. Always mention hotel id while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
function mapZodTypeToOpenAPIType(zodTypeName) {
|
||||
|
||||
console.log(zodTypeName)
|
||||
const typeMap = {
|
||||
'ZodString': 'string',
|
||||
'ZodNumber': 'number',
|
||||
'ZodBoolean': 'boolean',
|
||||
'ZodArray': 'array',
|
||||
'ZodObject': 'object',
|
||||
};
|
||||
return typeMap[zodTypeName] || 'string';
|
||||
}
|
||||
|
||||
async function main() {
|
||||
|
||||
const toolboxClient = new ToolboxClient(TOOLBOX_URL);
|
||||
const toolboxTools = await toolboxClient.loadToolset("my-toolset");
|
||||
|
||||
const geminiTools = [{
|
||||
functionDeclarations: toolboxTools.map(tool => {
|
||||
|
||||
const schema = tool.getParamSchema();
|
||||
const properties = {};
|
||||
const required = [];
|
||||
|
||||
|
||||
for (const [key, param] of Object.entries(schema.shape)) {
|
||||
properties[key] = {
|
||||
type: mapZodTypeToOpenAPIType(param.constructor.name),
|
||||
description: param.description || '',
|
||||
};
|
||||
required.push(key)
|
||||
}
|
||||
|
||||
return {
|
||||
name: tool.getName(),
|
||||
description: tool.getDescription(),
|
||||
parameters: { type: 'object', properties, required },
|
||||
};
|
||||
})
|
||||
}];
|
||||
|
||||
|
||||
const genAI = new GoogleGenAI({ apiKey: GOOGLE_API_KEY });
|
||||
|
||||
const chat = genAI.chats.create({
|
||||
model: "gemini-2.5-flash",
|
||||
config: {
|
||||
systemInstruction: prompt,
|
||||
tools: geminiTools,
|
||||
}
|
||||
});
|
||||
|
||||
for (const query of queries) {
|
||||
|
||||
let currentResult = await chat.sendMessage({ message: query });
|
||||
|
||||
let finalResponseGiven = false
|
||||
while (!finalResponseGiven) {
|
||||
|
||||
const response = currentResult;
|
||||
const functionCalls = response.functionCalls || [];
|
||||
|
||||
if (functionCalls.length === 0) {
|
||||
console.log(response.text)
|
||||
finalResponseGiven = true;
|
||||
} else {
|
||||
const toolResponses = [];
|
||||
for (const call of functionCalls) {
|
||||
const toolName = call.name
|
||||
const toolToExecute = toolboxTools.find(t => t.getName() === toolName);
|
||||
|
||||
if (toolToExecute) {
|
||||
try {
|
||||
const functionResult = await toolToExecute(call.args);
|
||||
toolResponses.push({
|
||||
functionResponse: { name: call.name, response: { result: functionResult } }
|
||||
});
|
||||
} catch (e) {
|
||||
console.error(`Error executing tool '${toolName}':`, e);
|
||||
toolResponses.push({
|
||||
functionResponse: { name: call.name, response: { error: e.message } }
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
currentResult = await chat.sendMessage({ message: toolResponses });
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
{{< /tab >}}
|
||||
|
||||
{{< /tabpane >}}
|
||||
|
||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
module genai-quickstart
|
||||
|
||||
go 1.24.6
|
||||
|
||||
require (
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0
|
||||
google.golang.org/genai v1.21.0
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go v0.121.1 // indirect
|
||||
cloud.google.com/go/auth v0.16.2 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.7.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/google/go-cmp v0.7.0 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
|
||||
go.opentelemetry.io/otel v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.36.0 // indirect
|
||||
golang.org/x/crypto v0.39.0 // indirect
|
||||
golang.org/x/net v0.41.0 // indirect
|
||||
golang.org/x/oauth2 v0.30.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/text v0.26.0 // indirect
|
||||
google.golang.org/api v0.242.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/grpc v1.73.0 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
)
|
||||
@@ -1,174 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"google.golang.org/genai"
|
||||
)
|
||||
|
||||
// ConvertToGenaiTool translates a ToolboxTool into the genai.FunctionDeclaration format.
|
||||
func ConvertToGenaiTool(toolboxTool *core.ToolboxTool) *genai.Tool {
|
||||
|
||||
inputschema, err := toolboxTool.InputSchema()
|
||||
if err != nil {
|
||||
return &genai.Tool{}
|
||||
}
|
||||
|
||||
var paramsSchema *genai.Schema
|
||||
_ = json.Unmarshal(inputschema, ¶msSchema)
|
||||
// First, create the function declaration.
|
||||
funcDeclaration := &genai.FunctionDeclaration{
|
||||
Name: toolboxTool.Name(),
|
||||
Description: toolboxTool.Description(),
|
||||
Parameters: paramsSchema,
|
||||
}
|
||||
|
||||
// Then, wrap the function declaration in a genai.Tool struct.
|
||||
return &genai.Tool{
|
||||
FunctionDeclarations: []*genai.FunctionDeclaration{funcDeclaration},
|
||||
}
|
||||
}
|
||||
|
||||
func printResponse(resp *genai.GenerateContentResponse) {
|
||||
for _, cand := range resp.Candidates {
|
||||
if cand.Content != nil {
|
||||
for _, part := range cand.Content.Parts {
|
||||
fmt.Println(part.Text)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it.",
|
||||
"Please book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Setup
|
||||
ctx := context.Background()
|
||||
apiKey := os.Getenv("GOOGLE_API_KEY")
|
||||
toolboxURL := "http://localhost:5000"
|
||||
|
||||
// Initialize the Google GenAI client using the explicit ClientConfig.
|
||||
client, err := genai.NewClient(ctx, &genai.ClientConfig{
|
||||
APIKey: apiKey,
|
||||
})
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Google GenAI client: %v", err)
|
||||
}
|
||||
|
||||
// Initialize the MCP Toolbox client.
|
||||
toolboxClient, err := core.NewToolboxClient(toolboxURL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tool using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
genAITools := make([]*genai.Tool, len(tools))
|
||||
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
|
||||
|
||||
for i, tool := range tools {
|
||||
genAITools[i] = ConvertToGenaiTool(tool)
|
||||
toolsMap[tool.Name()] = tool
|
||||
}
|
||||
|
||||
// Set up the generative model with the available tool.
|
||||
modelName := "gemini-2.0-flash"
|
||||
|
||||
// Create the initial content prompt for the model.
|
||||
messageHistory := []*genai.Content{
|
||||
genai.NewContentFromText(systemPrompt, genai.RoleUser),
|
||||
}
|
||||
config := &genai.GenerateContentConfig{
|
||||
Tools: genAITools,
|
||||
ToolConfig: &genai.ToolConfig{
|
||||
FunctionCallingConfig: &genai.FunctionCallingConfig{
|
||||
Mode: genai.FunctionCallingConfigModeAny,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
|
||||
messageHistory = append(messageHistory, genai.NewContentFromText(query, genai.RoleUser))
|
||||
|
||||
genContentResp, err := client.Models.GenerateContent(ctx, modelName, messageHistory, config)
|
||||
if err != nil {
|
||||
log.Fatalf("LLM call failed for query '%s': %v", query, err)
|
||||
}
|
||||
|
||||
if len(genContentResp.Candidates) > 0 && genContentResp.Candidates[0].Content != nil {
|
||||
messageHistory = append(messageHistory, genContentResp.Candidates[0].Content)
|
||||
}
|
||||
|
||||
functionCalls := genContentResp.FunctionCalls()
|
||||
|
||||
toolResponseParts := []*genai.Part{}
|
||||
|
||||
for _, fc := range functionCalls {
|
||||
|
||||
toolToInvoke, found := toolsMap[fc.Name]
|
||||
if !found {
|
||||
log.Fatalf("Tool '%s' not found in loaded tools map. Check toolset configuration.", fc.Name)
|
||||
}
|
||||
|
||||
toolResult, invokeErr := toolToInvoke.Invoke(ctx, fc.Args)
|
||||
if invokeErr != nil {
|
||||
log.Fatalf("Failed to execute tool '%s': %v", fc.Name, invokeErr)
|
||||
}
|
||||
|
||||
// Enhanced Tool Result Handling (retained to prevent nil issues)
|
||||
toolResultString := ""
|
||||
if toolResult != nil {
|
||||
jsonBytes, marshalErr := json.Marshal(toolResult)
|
||||
if marshalErr == nil {
|
||||
toolResultString = string(jsonBytes)
|
||||
} else {
|
||||
toolResultString = fmt.Sprintf("%v", toolResult)
|
||||
}
|
||||
}
|
||||
|
||||
responseMap := map[string]any{"result": toolResultString}
|
||||
|
||||
toolResponseParts = append(toolResponseParts, genai.NewPartFromFunctionResponse(fc.Name, responseMap))
|
||||
}
|
||||
// Add all accumulated tool responses for this turn to the message history.
|
||||
toolResponseContent := genai.NewContentFromParts(toolResponseParts, "function")
|
||||
messageHistory = append(messageHistory, toolResponseContent)
|
||||
|
||||
finalResponse, err := client.Models.GenerateContent(ctx, modelName, messageHistory, &genai.GenerateContentConfig{})
|
||||
if err != nil {
|
||||
log.Fatalf("Error calling GenerateContent (with function result): %v", err)
|
||||
}
|
||||
|
||||
printResponse(finalResponse)
|
||||
// Add the final textual response from the LLM to the history
|
||||
if len(finalResponse.Candidates) > 0 && finalResponse.Candidates[0].Content != nil {
|
||||
messageHistory = append(messageHistory, finalResponse.Candidates[0].Content)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
module genkit-quickstart
|
||||
|
||||
go 1.24.6
|
||||
|
||||
require (
|
||||
github.com/firebase/genkit/go v0.6.2
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go v0.121.1 // indirect
|
||||
cloud.google.com/go/auth v0.16.2 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.7.0 // indirect
|
||||
github.com/bahlo/generic-list-go v0.2.0 // indirect
|
||||
github.com/buger/jsonparser v1.1.1 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/goccy/go-yaml v1.17.1 // indirect
|
||||
github.com/google/dotprompt/go v0.0.0-20250611200215-bb73406b05ca // indirect
|
||||
github.com/google/go-cmp v0.7.0 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
github.com/invopop/jsonschema v0.13.0 // indirect
|
||||
github.com/mailru/easyjson v0.9.0 // indirect
|
||||
github.com/mbleigh/raymond v0.0.0-20250414171441-6b3a58ab9e0a // indirect
|
||||
github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
|
||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
|
||||
github.com/xeipuuv/gojsonschema v1.2.0 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
|
||||
go.opentelemetry.io/otel v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.36.0 // indirect
|
||||
golang.org/x/crypto v0.39.0 // indirect
|
||||
golang.org/x/net v0.41.0 // indirect
|
||||
golang.org/x/oauth2 v0.30.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/text v0.26.0 // indirect
|
||||
google.golang.org/api v0.242.0 // indirect
|
||||
google.golang.org/genai v1.11.1 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/grpc v1.73.0 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
@@ -1,129 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/tbgenkit"
|
||||
|
||||
"github.com/firebase/genkit/go/ai"
|
||||
"github.com/firebase/genkit/go/genkit"
|
||||
"github.com/firebase/genkit/go/plugins/googlegenai"
|
||||
)
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
ctx := context.Background()
|
||||
|
||||
// Create Toolbox Client
|
||||
toolboxClient, err := core.NewToolboxClient("http://127.0.0.1:5000")
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tools using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
// Initialize Genkit
|
||||
g, err := genkit.Init(ctx,
|
||||
genkit.WithPlugins(&googlegenai.GoogleAI{}),
|
||||
genkit.WithDefaultModel("googleai/gemini-1.5-flash"),
|
||||
)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to init genkit: %v\n", err)
|
||||
}
|
||||
|
||||
// Create a conversation history
|
||||
conversationHistory := []*ai.Message{
|
||||
ai.NewSystemTextMessage(systemPrompt),
|
||||
}
|
||||
|
||||
// Convert your tool to a Genkit tool.
|
||||
genkitTools := make([]ai.Tool, len(tools))
|
||||
for i, tool := range tools {
|
||||
newTool, err := tbgenkit.ToGenkitTool(tool, g)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to convert tool: %v\n", err)
|
||||
}
|
||||
genkitTools[i] = newTool
|
||||
}
|
||||
|
||||
toolRefs := make([]ai.ToolRef, len(genkitTools))
|
||||
|
||||
for i, tool := range genkitTools {
|
||||
toolRefs[i] = tool
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
conversationHistory = append(conversationHistory, ai.NewUserTextMessage(query))
|
||||
response, err := genkit.Generate(ctx, g,
|
||||
ai.WithMessages(conversationHistory...),
|
||||
ai.WithTools(toolRefs...),
|
||||
ai.WithReturnToolRequests(true),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("%v\n", err)
|
||||
}
|
||||
conversationHistory = append(conversationHistory, response.Message)
|
||||
|
||||
parts := []*ai.Part{}
|
||||
|
||||
for _, req := range response.ToolRequests() {
|
||||
tool := genkit.LookupTool(g, req.Name)
|
||||
if tool == nil {
|
||||
log.Fatalf("tool %q not found", req.Name)
|
||||
}
|
||||
|
||||
output, err := tool.RunRaw(ctx, req.Input)
|
||||
if err != nil {
|
||||
log.Fatalf("tool %q execution failed: %v", tool.Name(), err)
|
||||
}
|
||||
|
||||
parts = append(parts,
|
||||
ai.NewToolResponsePart(&ai.ToolResponse{
|
||||
Name: req.Name,
|
||||
Ref: req.Ref,
|
||||
Output: output,
|
||||
}))
|
||||
|
||||
}
|
||||
|
||||
if len(parts) > 0 {
|
||||
resp, err := genkit.Generate(ctx, g,
|
||||
ai.WithMessages(append(response.History(), ai.NewMessage(ai.RoleTool, nil, parts...))...),
|
||||
ai.WithTools(toolRefs...),
|
||||
)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Println("\n", resp.Text())
|
||||
conversationHistory = append(conversationHistory, resp.Message)
|
||||
} else {
|
||||
fmt.Println("\n", response.Text())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
module langchan-quickstart
|
||||
|
||||
go 1.24.6
|
||||
|
||||
require (
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0
|
||||
github.com/tmc/langchaingo v0.1.13
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go v0.121.1 // indirect
|
||||
cloud.google.com/go/ai v0.7.0 // indirect
|
||||
cloud.google.com/go/aiplatform v1.85.0 // indirect
|
||||
cloud.google.com/go/auth v0.16.2 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.7.0 // indirect
|
||||
cloud.google.com/go/iam v1.5.2 // indirect
|
||||
cloud.google.com/go/longrunning v0.6.7 // indirect
|
||||
cloud.google.com/go/vertexai v0.12.0 // indirect
|
||||
github.com/dlclark/regexp2 v1.10.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/google/generative-ai-go v0.15.1 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/pkoukk/tiktoken-go v0.1.6 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
|
||||
go.opentelemetry.io/otel v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.36.0 // indirect
|
||||
golang.org/x/crypto v0.39.0 // indirect
|
||||
golang.org/x/net v0.41.0 // indirect
|
||||
golang.org/x/oauth2 v0.30.0 // indirect
|
||||
golang.org/x/sync v0.15.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/text v0.26.0 // indirect
|
||||
golang.org/x/time v0.12.0 // indirect
|
||||
google.golang.org/api v0.242.0 // indirect
|
||||
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/grpc v1.73.0 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
)
|
||||
@@ -1,149 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"github.com/tmc/langchaingo/llms"
|
||||
"github.com/tmc/langchaingo/llms/googleai"
|
||||
)
|
||||
|
||||
// ConvertToLangchainTool converts a generic core.ToolboxTool into a LangChainGo llms.Tool.
|
||||
func ConvertToLangchainTool(toolboxTool *core.ToolboxTool) llms.Tool {
|
||||
|
||||
// Fetch the tool's input schema
|
||||
inputschema, err := toolboxTool.InputSchema()
|
||||
if err != nil {
|
||||
return llms.Tool{}
|
||||
}
|
||||
|
||||
var paramsSchema map[string]any
|
||||
_ = json.Unmarshal(inputschema, ¶msSchema)
|
||||
|
||||
// Convert into LangChain's llms.Tool
|
||||
return llms.Tool{
|
||||
Type: "function",
|
||||
Function: &llms.FunctionDefinition{
|
||||
Name: toolboxTool.Name(),
|
||||
Description: toolboxTool.Description(),
|
||||
Parameters: paramsSchema,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it.",
|
||||
"Please book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
genaiKey := os.Getenv("GOOGLE_API_KEY")
|
||||
toolboxURL := "http://localhost:5000"
|
||||
ctx := context.Background()
|
||||
|
||||
// Initialize the Google AI client (LLM).
|
||||
llm, err := googleai.New(ctx, googleai.WithAPIKey(genaiKey), googleai.WithDefaultModel("gemini-1.5-flash"))
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Google AI client: %v", err)
|
||||
}
|
||||
|
||||
// Initialize the MCP Toolbox client.
|
||||
toolboxClient, err := core.NewToolboxClient(toolboxURL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tool using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
|
||||
|
||||
langchainTools := make([]llms.Tool, len(tools))
|
||||
// Convert the loaded ToolboxTools into the format LangChainGo requires.
|
||||
for i, tool := range tools {
|
||||
langchainTools[i] = ConvertToLangchainTool(tool)
|
||||
toolsMap[tool.Name()] = tool
|
||||
}
|
||||
|
||||
// Start the conversation history.
|
||||
messageHistory := []llms.MessageContent{
|
||||
llms.TextParts(llms.ChatMessageTypeSystem, systemPrompt),
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
messageHistory = append(messageHistory, llms.TextParts(llms.ChatMessageTypeHuman, query))
|
||||
|
||||
// Make the first call to the LLM, making it aware of the tool.
|
||||
resp, err := llm.GenerateContent(ctx, messageHistory, llms.WithTools(langchainTools))
|
||||
if err != nil {
|
||||
log.Fatalf("LLM call failed: %v", err)
|
||||
}
|
||||
respChoice := resp.Choices[0]
|
||||
|
||||
assistantResponse := llms.TextParts(llms.ChatMessageTypeAI, respChoice.Content)
|
||||
for _, tc := range respChoice.ToolCalls {
|
||||
assistantResponse.Parts = append(assistantResponse.Parts, tc)
|
||||
}
|
||||
messageHistory = append(messageHistory, assistantResponse)
|
||||
|
||||
// Process each tool call requested by the model.
|
||||
for _, tc := range respChoice.ToolCalls {
|
||||
toolName := tc.FunctionCall.Name
|
||||
tool := toolsMap[toolName]
|
||||
var args map[string]any
|
||||
if err := json.Unmarshal([]byte(tc.FunctionCall.Arguments), &args); err != nil {
|
||||
log.Fatalf("Failed to unmarshal arguments for tool '%s': %v", toolName, err)
|
||||
}
|
||||
toolResult, err := tool.Invoke(ctx, args)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to execute tool '%s': %v", toolName, err)
|
||||
}
|
||||
if toolResult == "" || toolResult == nil {
|
||||
toolResult = "Operation completed successfully with no specific return value."
|
||||
}
|
||||
|
||||
// Create the tool call response message and add it to the history.
|
||||
toolResponse := llms.MessageContent{
|
||||
Role: llms.ChatMessageTypeTool,
|
||||
Parts: []llms.ContentPart{
|
||||
llms.ToolCallResponse{
|
||||
Name: toolName,
|
||||
Content: fmt.Sprintf("%v", toolResult),
|
||||
},
|
||||
},
|
||||
}
|
||||
messageHistory = append(messageHistory, toolResponse)
|
||||
}
|
||||
finalResp, err := llm.GenerateContent(ctx, messageHistory)
|
||||
if err != nil {
|
||||
log.Fatalf("Final LLM call failed after tool execution: %v", err)
|
||||
}
|
||||
|
||||
// Add the final textual response from the LLM to the history
|
||||
messageHistory = append(messageHistory, llms.TextParts(llms.ChatMessageTypeAI, finalResp.Choices[0].Content))
|
||||
|
||||
fmt.Println(finalResp.Choices[0].Content)
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
module openai-quickstart
|
||||
|
||||
go 1.24.6
|
||||
|
||||
require (
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0
|
||||
github.com/openai/openai-go v1.12.0
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go/auth v0.16.2 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.7.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/tidwall/gjson v1.14.4 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.1 // indirect
|
||||
github.com/tidwall/sjson v1.2.5 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
|
||||
go.opentelemetry.io/otel v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.36.0 // indirect
|
||||
golang.org/x/crypto v0.39.0 // indirect
|
||||
golang.org/x/net v0.41.0 // indirect
|
||||
golang.org/x/oauth2 v0.30.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/text v0.26.0 // indirect
|
||||
google.golang.org/api v0.242.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/grpc v1.73.0 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
)
|
||||
@@ -1,141 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
openai "github.com/openai/openai-go"
|
||||
)
|
||||
|
||||
// ConvertToOpenAITool converts a ToolboxTool into the go-openai library's Tool format.
|
||||
func ConvertToOpenAITool(toolboxTool *core.ToolboxTool) openai.ChatCompletionToolParam {
|
||||
// Get the input schema
|
||||
jsonSchemaBytes, err := toolboxTool.InputSchema()
|
||||
if err != nil {
|
||||
return openai.ChatCompletionToolParam{}
|
||||
}
|
||||
|
||||
// Unmarshal the JSON bytes into FunctionParameters
|
||||
var paramsSchema openai.FunctionParameters
|
||||
if err := json.Unmarshal(jsonSchemaBytes, ¶msSchema); err != nil {
|
||||
return openai.ChatCompletionToolParam{}
|
||||
}
|
||||
|
||||
// Create and return the final tool parameter struct.
|
||||
return openai.ChatCompletionToolParam{
|
||||
Function: openai.FunctionDefinitionParam{
|
||||
Name: toolboxTool.Name(),
|
||||
Description: openai.String(toolboxTool.Description()),
|
||||
Parameters: paramsSchema,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`
|
||||
|
||||
var queries = []string{
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the hotel Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Setup
|
||||
ctx := context.Background()
|
||||
toolboxURL := "http://localhost:5000"
|
||||
openAIClient := openai.NewClient()
|
||||
|
||||
// Initialize the MCP Toolbox client.
|
||||
toolboxClient, err := core.NewToolboxClient(toolboxURL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Load the tools using the MCP Toolbox SDK.
|
||||
tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tool : %v\nMake sure your Toolbox server is running and the tool is configured.", err)
|
||||
}
|
||||
|
||||
openAITools := make([]openai.ChatCompletionToolParam, len(tools))
|
||||
toolsMap := make(map[string]*core.ToolboxTool, len(tools))
|
||||
|
||||
for i, tool := range tools {
|
||||
// Convert the Toolbox tool into the openAI FunctionDeclaration format.
|
||||
openAITools[i] = ConvertToOpenAITool(tool)
|
||||
// Add tool to a map for lookup later
|
||||
toolsMap[tool.Name()] = tool
|
||||
|
||||
}
|
||||
|
||||
params := openai.ChatCompletionNewParams{
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{
|
||||
openai.SystemMessage(systemPrompt),
|
||||
},
|
||||
Tools: openAITools,
|
||||
Seed: openai.Int(0),
|
||||
Model: openai.ChatModelGPT4o,
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
|
||||
params.Messages = append(params.Messages, openai.UserMessage(query))
|
||||
|
||||
// Make initial chat completion request
|
||||
completion, err := openAIClient.Chat.Completions.New(ctx, params)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
toolCalls := completion.Choices[0].Message.ToolCalls
|
||||
|
||||
// Return early if there are no tool calls
|
||||
if len(toolCalls) == 0 {
|
||||
log.Println("No function call")
|
||||
}
|
||||
|
||||
// If there was a function call, continue the conversation
|
||||
params.Messages = append(params.Messages, completion.Choices[0].Message.ToParam())
|
||||
for _, toolCall := range toolCalls {
|
||||
|
||||
toolName := toolCall.Function.Name
|
||||
toolToInvoke := toolsMap[toolName]
|
||||
|
||||
var args map[string]any
|
||||
err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
result, err := toolToInvoke.Invoke(ctx, args)
|
||||
if err != nil {
|
||||
log.Fatal("Could not invoke tool", err)
|
||||
}
|
||||
|
||||
params.Messages = append(params.Messages, openai.ToolMessage(result.(string), toolCall.ID))
|
||||
}
|
||||
|
||||
completion, err = openAIClient.Chat.Completions.New(ctx, params)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
params.Messages = append(params.Messages, openai.AssistantMessage(query))
|
||||
|
||||
fmt.Println("\n", completion.Choices[0].Message.Content)
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,119 +0,0 @@
|
||||
import { GoogleGenAI } from "@google/genai";
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
|
||||
|
||||
const TOOLBOX_URL = "http://127.0.0.1:5000"; // Update if needed
|
||||
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || 'your-api-key'; // Replace it with your API key
|
||||
|
||||
const prompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, you MUST use the available tools to find information. Mention its name, id,
|
||||
location and price tier. Always mention hotel id while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
function mapZodTypeToOpenAPIType(zodTypeName) {
|
||||
|
||||
console.log(zodTypeName)
|
||||
const typeMap = {
|
||||
'ZodString': 'string',
|
||||
'ZodNumber': 'number',
|
||||
'ZodBoolean': 'boolean',
|
||||
'ZodArray': 'array',
|
||||
'ZodObject': 'object',
|
||||
};
|
||||
return typeMap[zodTypeName] || 'string';
|
||||
}
|
||||
|
||||
async function main() {
|
||||
|
||||
const toolboxClient = new ToolboxClient(TOOLBOX_URL);
|
||||
const toolboxTools = await toolboxClient.loadToolset("my-toolset");
|
||||
|
||||
const geminiTools = [{
|
||||
functionDeclarations: toolboxTools.map(tool => {
|
||||
|
||||
const schema = tool.getParamSchema();
|
||||
const properties = {};
|
||||
const required = [];
|
||||
|
||||
|
||||
for (const [key, param] of Object.entries(schema.shape)) {
|
||||
properties[key] = {
|
||||
type: mapZodTypeToOpenAPIType(param.constructor.name),
|
||||
description: param.description || '',
|
||||
};
|
||||
required.push(key)
|
||||
}
|
||||
|
||||
return {
|
||||
name: tool.getName(),
|
||||
description: tool.getDescription(),
|
||||
parameters: { type: 'object', properties, required },
|
||||
};
|
||||
})
|
||||
}];
|
||||
|
||||
|
||||
const genAI = new GoogleGenAI({ apiKey: GOOGLE_API_KEY });
|
||||
|
||||
const chat = genAI.chats.create({
|
||||
model: "gemini-2.5-flash",
|
||||
config: {
|
||||
systemInstruction: prompt,
|
||||
tools: geminiTools,
|
||||
}
|
||||
});
|
||||
|
||||
for (const query of queries) {
|
||||
|
||||
let currentResult = await chat.sendMessage({ message: query });
|
||||
|
||||
let finalResponseGiven = false
|
||||
while (!finalResponseGiven) {
|
||||
|
||||
const response = currentResult;
|
||||
const functionCalls = response.functionCalls || [];
|
||||
|
||||
if (functionCalls.length === 0) {
|
||||
console.log(response.text)
|
||||
finalResponseGiven = true;
|
||||
} else {
|
||||
const toolResponses = [];
|
||||
for (const call of functionCalls) {
|
||||
const toolName = call.name
|
||||
const toolToExecute = toolboxTools.find(t => t.getName() === toolName);
|
||||
|
||||
if (toolToExecute) {
|
||||
try {
|
||||
const functionResult = await toolToExecute(call.args);
|
||||
toolResponses.push({
|
||||
functionResponse: { name: call.name, response: { result: functionResult } }
|
||||
});
|
||||
} catch (e) {
|
||||
console.error(`Error executing tool '${toolName}':`, e);
|
||||
toolResponses.push({
|
||||
functionResponse: { name: call.name, response: { error: e.message } }
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
currentResult = await chat.sendMessage({ message: toolResponses });
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -1,88 +0,0 @@
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
import { genkit } from "genkit";
|
||||
import { googleAI } from '@genkit-ai/googleai';
|
||||
|
||||
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || 'your-api-key'; // Replace it with your API key
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
async function main() {
|
||||
const toolboxClient = new ToolboxClient("http://127.0.0.1:5000");
|
||||
|
||||
const ai = genkit({
|
||||
plugins: [
|
||||
googleAI({
|
||||
apiKey: process.env.GEMINI_API_KEY || GOOGLE_API_KEY
|
||||
})
|
||||
],
|
||||
model: googleAI.model('gemini-2.0-flash'),
|
||||
});
|
||||
|
||||
const toolboxTools = await toolboxClient.loadToolset("my-toolset");
|
||||
const toolMap = Object.fromEntries(
|
||||
toolboxTools.map((tool) => {
|
||||
const definedTool = ai.defineTool(
|
||||
{
|
||||
name: tool.getName(),
|
||||
description: tool.getDescription(),
|
||||
inputSchema: tool.getParamSchema(),
|
||||
},
|
||||
tool
|
||||
);
|
||||
return [tool.getName(), definedTool];
|
||||
})
|
||||
);
|
||||
const tools = Object.values(toolMap);
|
||||
|
||||
let conversationHistory = [{ role: "system", content: [{ text: systemPrompt }] }];
|
||||
|
||||
for (const query of queries) {
|
||||
conversationHistory.push({ role: "user", content: [{ text: query }] });
|
||||
const response = await ai.generate({
|
||||
messages: conversationHistory,
|
||||
tools: tools,
|
||||
});
|
||||
conversationHistory.push(response.message);
|
||||
|
||||
const toolRequests = response.toolRequests;
|
||||
if (toolRequests?.length > 0) {
|
||||
// Execute tools concurrently and collect their responses.
|
||||
const toolResponses = await Promise.all(
|
||||
toolRequests.map(async (call) => {
|
||||
try {
|
||||
const toolOutput = await toolMap[call.name].invoke(call.input);
|
||||
return { role: "tool", content: [{ toolResponse: { name: call.name, output: toolOutput } }] };
|
||||
} catch (e) {
|
||||
console.error(`Error executing tool ${call.name}:`, e);
|
||||
return { role: "tool", content: [{ toolResponse: { name: call.name, output: { error: e.message } } }] };
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
conversationHistory.push(...toolResponses);
|
||||
|
||||
// Call the AI again with the tool results.
|
||||
response = await ai.generate({ messages: conversationHistory, tools });
|
||||
conversationHistory.push(response.message);
|
||||
}
|
||||
|
||||
console.log(response.text);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -1,73 +0,0 @@
|
||||
import { ChatGoogleGenerativeAI } from "@langchain/google-genai";
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
import { tool } from "@langchain/core/tools";
|
||||
import { createReactAgent } from "@langchain/langgraph/prebuilt";
|
||||
import { MemorySaver } from "@langchain/langgraph";
|
||||
|
||||
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || 'your-api-key'; // Replace it with your API key
|
||||
|
||||
const prompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
async function main() {
|
||||
const model = new ChatGoogleGenerativeAI({
|
||||
model: "gemini-2.0-flash",
|
||||
});
|
||||
|
||||
const client = new ToolboxClient("http://127.0.0.1:5000");
|
||||
const toolboxTools = await client.loadToolset("my-toolset");
|
||||
|
||||
// Define the basics of the tool: name, description, schema and core logic
|
||||
const getTool = (toolboxTool) => tool(toolboxTool, {
|
||||
name: toolboxTool.getName(),
|
||||
description: toolboxTool.getDescription(),
|
||||
schema: toolboxTool.getParamSchema()
|
||||
});
|
||||
const tools = toolboxTools.map(getTool);
|
||||
|
||||
const agent = createReactAgent({
|
||||
llm: model,
|
||||
tools: tools,
|
||||
checkpointer: new MemorySaver(),
|
||||
systemPrompt: prompt,
|
||||
});
|
||||
|
||||
const langGraphConfig = {
|
||||
configurable: {
|
||||
thread_id: "test-thread",
|
||||
},
|
||||
};
|
||||
|
||||
for (const query of queries) {
|
||||
const agentOutput = await agent.invoke(
|
||||
{
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: query,
|
||||
},
|
||||
],
|
||||
verbose: true,
|
||||
},
|
||||
langGraphConfig
|
||||
);
|
||||
const response = agentOutput.messages[agentOutput.messages.length - 1].content;
|
||||
console.log(response);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -1,79 +0,0 @@
|
||||
import { gemini, GEMINI_MODEL } from "@llamaindex/google";
|
||||
import { agent } from "@llamaindex/workflow";
|
||||
import { createMemory, staticBlock, tool } from "llamaindex";
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
|
||||
const TOOLBOX_URL = "http://127.0.0.1:5000"; // Update if needed
|
||||
const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || 'your-api-key'; // Replace it with your API key
|
||||
|
||||
const prompt = `
|
||||
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and cancellations.
|
||||
When the user searches for a hotel, mention its name, id, location and price tier.
|
||||
Always mention hotel ids while performing any searches — this is very important for operations.
|
||||
For any bookings or cancellations, please provide the appropriate confirmation.
|
||||
Update check-in or check-out dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
async function main() {
|
||||
// Connect to MCP Toolbox
|
||||
const client = new ToolboxClient(TOOLBOX_URL);
|
||||
const toolboxTools = await client.loadToolset("my-toolset");
|
||||
const tools = toolboxTools.map((toolboxTool) => {
|
||||
return tool({
|
||||
name: toolboxTool.getName(),
|
||||
description: toolboxTool.getDescription(),
|
||||
parameters: toolboxTool.getParamSchema(),
|
||||
execute: toolboxTool,
|
||||
});
|
||||
});
|
||||
|
||||
// Initialize LLM
|
||||
const llm = gemini({
|
||||
model: GEMINI_MODEL.GEMINI_2_0_FLASH,
|
||||
apiKey: GOOGLE_API_KEY,
|
||||
});
|
||||
|
||||
const memory = createMemory({
|
||||
memoryBlocks: [
|
||||
staticBlock({
|
||||
content: prompt,
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
// Create the Agent
|
||||
const myAgent = agent({
|
||||
tools: tools,
|
||||
llm,
|
||||
memory,
|
||||
systemPrompt: prompt,
|
||||
});
|
||||
|
||||
for (const query of queries) {
|
||||
const result = await myAgent.run(query);
|
||||
const output = result.data.result;
|
||||
|
||||
console.log(`\nUser: ${query}`);
|
||||
if (typeof output === "string") {
|
||||
console.log(output.trim());
|
||||
} else if (typeof output === "object" && "text" in output) {
|
||||
console.log(output.text.trim());
|
||||
} else {
|
||||
console.log(JSON.stringify(output));
|
||||
}
|
||||
}
|
||||
//You may observe some extra logs during execution due to the run method provided by Llama.
|
||||
console.log("Agent run finished.");
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.12.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -5,10 +5,9 @@ weight: 2
|
||||
description: >
|
||||
Connect your IDE to Firestore using Toolbox.
|
||||
---
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://cloud.google.com/firestore/native/docs/connect-ide-using-mcp-toolbox"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/firestore/native/docs/connect-ide-using-mcp-toolbox"/>
|
||||
</head>
|
||||
</html>
|
||||
</html>
|
||||
@@ -48,19 +48,19 @@ to expose your developer assistant tools to a Looker instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
@@ -235,7 +235,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
|
||||
|
||||
```json
|
||||
{
|
||||
"servers": {
|
||||
"mcpServers": {
|
||||
"looker-toolbox": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--stdio", "--prebuilt", "looker"],
|
||||
|
||||
@@ -37,19 +37,19 @@ description: "Connect your IDE to SQL Server using Toolbox."
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
@@ -182,17 +182,19 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
|
||||
|
||||
```json
|
||||
{
|
||||
"servers": {
|
||||
"mssql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
"mcp" : {
|
||||
"servers": {
|
||||
"cloud-sql-sqlserver": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","cloud-sql-mssql","--stdio"],
|
||||
"env": {
|
||||
"MSSQL_HOST": "",
|
||||
"MSSQL_PORT": "",
|
||||
"MSSQL_DATABASE": "",
|
||||
"MSSQL_USER": "",
|
||||
"MSSQL_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -284,4 +286,4 @@ The following tools are available to the LLM:
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
|
||||
{{< /notice >}}
|
||||
{{< /notice >}}
|
||||
@@ -37,19 +37,19 @@ description: "Connect your IDE to MySQL using Toolbox."
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
@@ -182,7 +182,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
|
||||
|
||||
```json
|
||||
{
|
||||
"servers": {
|
||||
"mcpServers": {
|
||||
"mysql": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","mysql","--stdio"],
|
||||
|
||||
@@ -1,281 +0,0 @@
|
||||
---
|
||||
title: Neo4j using MCP
|
||||
type: docs
|
||||
weight: 2
|
||||
description: "Connect your IDE to Neo4j using Toolbox."
|
||||
---
|
||||
|
||||
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like Neo4j. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a Neo4j instance:
|
||||
|
||||
* [Cursor][cursor]
|
||||
* [Windsurf][windsurf] (Codium)
|
||||
* [Visual Studio Code][vscode] (Copilot)
|
||||
* [Cline][cline] (VS Code extension)
|
||||
* [Claude desktop][claudedesktop]
|
||||
* [Claude code][claudecode]
|
||||
* [Gemini CLI][geminicli]
|
||||
* [Gemini Code Assist][geminicodeassist]
|
||||
|
||||
[toolbox]: https://github.com/googleapis/genai-toolbox
|
||||
[cursor]: #configure-your-mcp-client
|
||||
[windsurf]: #configure-your-mcp-client
|
||||
[vscode]: #configure-your-mcp-client
|
||||
[cline]: #configure-your-mcp-client
|
||||
[claudedesktop]: #configure-your-mcp-client
|
||||
[claudecode]: #configure-your-mcp-client
|
||||
[geminicli]: #configure-your-mcp-client
|
||||
[geminicodeassist]: #configure-your-mcp-client
|
||||
|
||||
## Set up the database
|
||||
|
||||
1. [Create or select a Neo4j instance.](https://neo4j.com/cloud/platform/aura-graph-database/)
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version v0.15.0+:
|
||||
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Verify the installation:
|
||||
|
||||
```bash
|
||||
./toolbox --version
|
||||
```
|
||||
|
||||
## Configure your MCP Client
|
||||
|
||||
{{< tabpane text=true >}}
|
||||
{{% tab header="Claude code" lang="en" %}}
|
||||
|
||||
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
|
||||
1. Create a `.mcp.json` file in your project root if it doesn't exist.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude code to apply the new configuration.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Claude desktop" lang="en" %}}
|
||||
|
||||
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
|
||||
1. Under the Developer tab, tap Edit Config to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Restart Claude desktop.
|
||||
1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cline" lang="en" %}}
|
||||
|
||||
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
|
||||
1. Tap Configure MCP Servers to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Cursor" lang="en" %}}
|
||||
|
||||
1. Create a `.cursor` directory in your project root if it doesn't exist.
|
||||
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
|
||||
{{% /tab %}}
|
||||
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
|
||||
|
||||
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
|
||||
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcp" : {
|
||||
"servers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Windsurf" lang="en" %}}
|
||||
|
||||
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
|
||||
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini CLI" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{% tab header="Gemini Code Assist" lang="en" %}}
|
||||
|
||||
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
|
||||
1. Enable Agent Mode in Gemini Code Assist chat.
|
||||
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
|
||||
1. Add the following configuration, replace the environment variables with your values, and then save:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"neo4j": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","neo4j","--stdio"],
|
||||
"env": {
|
||||
"NEO4J_URI": "",
|
||||
"NEO4J_DATABASE": "",
|
||||
"NEO4J_USERNAME": "",
|
||||
"NEO4J_PASSWORD": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
## Use Tools
|
||||
|
||||
Your AI tool is now connected to Neo4j using MCP. Try asking your AI assistant to get the graph schema or execute Cypher statements.
|
||||
|
||||
The following tools are available to the LLM:
|
||||
|
||||
1. **get_schema**: extracts the complete database schema, including details about node labels, relationships, properties, constraints, and indexes.
|
||||
1. **execute_cypher**: executes any arbitrary Cypher statement.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
|
||||
{{< /notice >}}
|
||||
@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
@@ -217,7 +217,7 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
|
||||
|
||||
```json
|
||||
{
|
||||
"servers": {
|
||||
"mcpServers": {
|
||||
"postgres": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","postgres","--stdio"],
|
||||
|
||||
@@ -19,9 +19,8 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
|
||||
* `ALLOYDB_POSTGRES_CLUSTER`: The ID of your AlloyDB cluster.
|
||||
* `ALLOYDB_POSTGRES_INSTANCE`: The ID of your AlloyDB instance.
|
||||
* `ALLOYDB_POSTGRES_DATABASE`: The name of the database to connect to.
|
||||
* `ALLOYDB_POSTGRES_USER`: (Optional) The database username. Defaults to IAM authentication if unspecified.
|
||||
* `ALLOYDB_POSTGRES_PASSWORD`: (Optional) The password for the database user. Defaults to IAM authentication if unspecified.
|
||||
* `ALLOYDB_POSTGRES_IP_TYPE`: (Optional) The IP type i.e. "Public" or "Private" (Default: Public).
|
||||
* `ALLOYDB_POSTGRES_USER`: The database username.
|
||||
* `ALLOYDB_POSTGRES_PASSWORD`: The password for the database user.
|
||||
* **Permissions:**
|
||||
* **AlloyDB Client** (`roles/alloydb.client`) to connect to the instance.
|
||||
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
|
||||
@@ -50,7 +49,6 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
|
||||
* `--prebuilt` value: `bigquery`
|
||||
* **Environment Variables:**
|
||||
* `BIGQUERY_PROJECT`: The GCP project ID.
|
||||
* `BIGQUERY_LOCATION`: (Optional) The dataset location.
|
||||
* **Permissions:**
|
||||
* **BigQuery User** (`roles/bigquery.user`) to execute queries and view metadata.
|
||||
* **BigQuery Metadata Viewer** (`roles/bigquery.metadataViewer`) to view all datasets.
|
||||
@@ -90,9 +88,8 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
|
||||
* `CLOUD_SQL_POSTGRES_REGION`: The region of your Cloud SQL instance.
|
||||
* `CLOUD_SQL_POSTGRES_INSTANCE`: The ID of your Cloud SQL instance.
|
||||
* `CLOUD_SQL_POSTGRES_DATABASE`: The name of the database to connect to.
|
||||
* `CLOUD_SQL_POSTGRES_USER`: (Optional) The database username. Defaults to IAM authentication if unspecified.
|
||||
* `CLOUD_SQL_POSTGRES_PASSWORD`: (Optional) The password for the database user. Defaults to IAM authentication if unspecified.
|
||||
* `CLOUD_SQL_POSTGRES_IP_TYPE`: (Optional) The IP type i.e. "Public" or "Private" (Default: Public).
|
||||
* `CLOUD_SQL_POSTGRES_USER`: The database username.
|
||||
* `CLOUD_SQL_POSTGRES_PASSWORD`: The password for the database user.
|
||||
* **Permissions:**
|
||||
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the instance.
|
||||
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
|
||||
@@ -111,7 +108,6 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
|
||||
* `CLOUD_SQL_MSSQL_IP_ADDRESS`: The IP address of the Cloud SQL instance.
|
||||
* `CLOUD_SQL_MSSQL_USER`: The database username.
|
||||
* `CLOUD_SQL_MSSQL_PASSWORD`: The password for the database user.
|
||||
* `CLOUD_SQL_MSSQL_IP_TYPE`: (Optional) The IP type i.e. "Public" or "Private" (Default: Public).
|
||||
* **Permissions:**
|
||||
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the instance.
|
||||
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
|
||||
@@ -137,7 +133,7 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
|
||||
* `--prebuilt` value: `firestore`
|
||||
* **Environment Variables:**
|
||||
* `FIRESTORE_PROJECT`: The GCP project ID.
|
||||
* `FIRESTORE_DATABASE`: (Optional) The Firestore database ID. Defaults to "(default)".
|
||||
* `FIRESTORE_DATABASE`: The Firestore database ID.
|
||||
* **Permissions:**
|
||||
* **Cloud Datastore User** (`roles/datastore.user`) to get documents, list collections, and query collections.
|
||||
* **Firebase Rules Viewer** (`roles/firebaserules.viewer`) to get and validate Firestore rules.
|
||||
@@ -230,7 +226,6 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
|
||||
* `POSTGRES_DATABASE`: The name of the database to connect to.
|
||||
* `POSTGRES_USER`: The database username.
|
||||
* `POSTGRES_PASSWORD`: The password for the database user.
|
||||
* `POSTGRES_QUERY_PARAMS`: (Optional) Raw query to be added to the db connection string.
|
||||
* **Permissions:**
|
||||
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
|
||||
* **Tools:**
|
||||
@@ -266,17 +261,3 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
|
||||
* `execute_sql`: Executes a DML SQL query using the PostgreSQL interface for Spanner.
|
||||
* `execute_sql_dql`: Executes a DQL SQL query using the PostgreSQL interface for Spanner.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
|
||||
## Neo4j
|
||||
|
||||
* `--prebuilt` value: `neo4j`
|
||||
* **Environment Variables:**
|
||||
* `NEO4J_URI`: The URI of the Neo4j instance (e.g., `bolt://localhost:7687`).
|
||||
* `NEO4J_DATABASE`: The name of the Neo4j database to connect to.
|
||||
* `NEO4J_USERNAME`: The username for the Neo4j instance.
|
||||
* `NEO4J_PASSWORD`: The password for the Neo4j instance.
|
||||
* **Permissions:**
|
||||
* **Database-level permissions** are required to execute Cypher queries.
|
||||
* **Tools:**
|
||||
* `execute_cypher`: Executes a Cypher query.
|
||||
* `get_schema`: Retrieves the schema of the Neo4j database.
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
---
|
||||
title: "YugabyteDB"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
YugabyteDB is a high-performance, distributed SQL database.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
[YugabyteDB][yugabytedb] is a high-performance, distributed SQL database designed for global, internet-scale applications, with full PostgreSQL compatibility.
|
||||
|
||||
[yugabytedb]: https://www.yugabyte.com/
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-yb-source:
|
||||
kind: yugabytedb
|
||||
host: 127.0.0.1
|
||||
port: 5433
|
||||
database: yugabyte
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
loadBalance: true
|
||||
topologyKeys: cloud.region.zone1:1,cloud.region.zone2:2
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------------------------------|:--------:|:------------:|------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "yugabytedb". |
|
||||
| host | string | true | IP address to connect to. |
|
||||
| port | integer | true | Port to connect to. The default port is 5433. |
|
||||
| database | string | true | Name of the YugabyteDB database to connect to. The default database name is yugabyte. |
|
||||
| user | string | true | Name of the YugabyteDB user to connect as. The default user is yugabyte. |
|
||||
| password | string | true | Password of the YugabyteDB user. The default password is yugabyte. |
|
||||
| loadBalance | boolean | false | If true, enable uniform load balancing. The default loadBalance value is false. |
|
||||
| topologyKeys | string | false | Comma-separated geo-locations in the form cloud.region.zone:priority to enable topology-aware load balancing. Ignored if loadBalance is false. It is null by default. |
|
||||
| ybServersRefreshInterval | integer | false | The interval (in seconds) to refresh the servers list; ignored if loadBalance is false. The default value of ybServersRefreshInterval is 300. |
|
||||
| fallbackToTopologyKeysOnly | boolean | false | If set to true and topologyKeys are specified, only connect to nodes specified in topologyKeys. By defualt, this is set to false. |
|
||||
| failedHostReconnectDelaySecs | integer | false | Time (in seconds) to wait before trying to connect to failed nodes. The default value of is 5. |
|
||||
@@ -1,7 +0,0 @@
|
||||
---
|
||||
title: "Cloud SQL"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with Cloud SQL Control Plane.
|
||||
---
|
||||
@@ -1,43 +0,0 @@
|
||||
---
|
||||
title: "cloud-sql-wait-for-operation"
|
||||
type: docs
|
||||
weight: 10
|
||||
description: >
|
||||
Wait for a long-running Cloud SQL operation to complete.
|
||||
---
|
||||
|
||||
The `cloud-sql-wait-for-operation` tool is a utility tool that waits for a
|
||||
long-running Cloud SQL operation to complete. It does this by polling the Cloud
|
||||
SQL Admin API operation status endpoint until the operation is finished, using
|
||||
exponential backoff.
|
||||
|
||||
{{< notice info >}}
|
||||
This tool is intended for developer assistant workflows with human-in-the-loop
|
||||
and shouldn't be used for production agents.
|
||||
{{< /notice >}}
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
cloudsql-operations-get:
|
||||
kind: cloud-sql-wait-for-operation
|
||||
source: some-http-source
|
||||
description: "This will poll on operations API until the operation is done. For checking operation status we need projectId and operationId. Once instance is created give follow up steps on how to use the variables to bring data plane MCP server up in local and remote setup."
|
||||
delay: 1s
|
||||
maxDelay: 4m
|
||||
multiplier: 2
|
||||
maxRetries: 10
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ----------- | :------: | :----------: | ---------------------------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "cloud-sql-wait-for-operation". |
|
||||
| source | string | true | The name of an `http` source to use for authentication. |
|
||||
| description | string | true | A description of the tool. |
|
||||
| delay | duration | false | The initial delay between polling requests (e.g., `3s`). Defaults to 3 seconds. |
|
||||
| maxDelay | duration | false | The maximum delay between polling requests (e.g., `4m`). Defaults to 4 minutes. |
|
||||
| multiplier | float | false | The multiplier for the polling delay. The delay is multiplied by this value after each request. Defaults to 2.0. |
|
||||
| maxRetries | int | false | The maximum number of polling attempts before giving up. Defaults to 10. |
|
||||
@@ -1,412 +0,0 @@
|
||||
---
|
||||
title: "firestore-query"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Query a Firestore collection with parameterizable filters and Firestore native JSON value types
|
||||
aliases:
|
||||
- /resources/tools/firestore-query
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
The `firestore-query` tool allows you to query Firestore collections with dynamic, parameterizable filters that support Firestore's native JSON value types. This tool is designed for querying single collection, which is the standard pattern in Firestore. The collection path itself can be parameterized, making it flexible for various use cases. This tool is particularly useful when you need to create reusable query templates with parameters that can be substituted at runtime.
|
||||
|
||||
**Developer Note**: This tool serves as the general querying foundation that developers can use to create custom tools with specific query patterns.
|
||||
|
||||
## Key Features
|
||||
|
||||
- **Parameterizable Queries**: Use Go template syntax to create dynamic queries
|
||||
- **Dynamic Collection Paths**: The collection path can be parameterized for flexibility
|
||||
- **Native JSON Value Types**: Support for Firestore's typed values (stringValue, integerValue, doubleValue, etc.)
|
||||
- **Complex Filter Logic**: Support for AND/OR logical operators in filters
|
||||
- **Template Substitution**: Dynamic collection paths, filters, and ordering
|
||||
- **Query Analysis**: Optional query performance analysis with explain metrics (non-parameterizable)
|
||||
|
||||
## Configuration
|
||||
|
||||
### Basic Configuration
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
query_countries:
|
||||
kind: firestore-query
|
||||
source: my-firestore-source
|
||||
description: Query countries with dynamic filters
|
||||
collectionPath: "countries"
|
||||
filters: |
|
||||
{
|
||||
"field": "continent",
|
||||
"op": "==",
|
||||
"value": {"stringValue": "{{.continent}}"}
|
||||
}
|
||||
parameters:
|
||||
- name: continent
|
||||
type: string
|
||||
description: Continent to filter by
|
||||
required: true
|
||||
```
|
||||
|
||||
### Advanced Configuration with Complex Filters
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
advanced_query:
|
||||
kind: firestore-query
|
||||
source: my-firestore-source
|
||||
description: Advanced query with complex filters
|
||||
collectionPath: "{{.collection}}"
|
||||
filters: |
|
||||
{
|
||||
"or": [
|
||||
{"field": "status", "op": "==", "value": {"stringValue": "{{.status}}"}},
|
||||
{
|
||||
"and": [
|
||||
{"field": "priority", "op": ">", "value": {"integerValue": "{{.priority}}"}},
|
||||
{"field": "area", "op": "<", "value": {"doubleValue": {{.maxArea}}}},
|
||||
{"field": "active", "op": "==", "value": {"booleanValue": {{.isActive}}}}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
select:
|
||||
- name
|
||||
- status
|
||||
- priority
|
||||
orderBy:
|
||||
field: "{{.sortField}}"
|
||||
direction: "{{.sortDirection}}"
|
||||
limit: 100
|
||||
analyzeQuery: true
|
||||
parameters:
|
||||
- name: collection
|
||||
type: string
|
||||
description: Collection to query
|
||||
required: true
|
||||
- name: status
|
||||
type: string
|
||||
description: Status to filter by
|
||||
required: true
|
||||
- name: priority
|
||||
type: string
|
||||
description: Minimum priority value
|
||||
required: true
|
||||
- name: maxArea
|
||||
type: float
|
||||
description: Maximum area value
|
||||
required: true
|
||||
- name: isActive
|
||||
type: boolean
|
||||
description: Filter by active status
|
||||
required: true
|
||||
- name: sortField
|
||||
type: string
|
||||
description: Field to sort by
|
||||
required: false
|
||||
default: "createdAt"
|
||||
- name: sortDirection
|
||||
type: string
|
||||
description: Sort direction (ASCENDING or DESCENDING)
|
||||
required: false
|
||||
default: "DESCENDING"
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
### Configuration Parameters
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
|-----------|------|----------|-------------|
|
||||
| `kind` | string | Yes | Must be `firestore-query` |
|
||||
| `source` | string | Yes | Name of the Firestore source to use |
|
||||
| `description` | string | Yes | Description of what this tool does |
|
||||
| `collectionPath` | string | Yes | Path to the collection to query (supports templates) |
|
||||
| `filters` | string | No | JSON string defining query filters (supports templates) |
|
||||
| `select` | array | No | Fields to select from documents(supports templates - string or array) |
|
||||
| `orderBy` | object | No | Ordering configuration with `field` and `direction`(supports templates for the value of field or direction) |
|
||||
| `limit` | integer | No | Maximum number of documents to return (default: 100) (supports templates) |
|
||||
| `analyzeQuery` | boolean | No | Whether to analyze query performance (default: false) |
|
||||
| `parameters` | array | Yes | Parameter definitions for template substitution |
|
||||
|
||||
### Runtime Parameters
|
||||
|
||||
Runtime parameters are defined in the `parameters` array and can be used in templates throughout the configuration.
|
||||
|
||||
## Filter Format
|
||||
|
||||
### Simple Filter
|
||||
```json
|
||||
{
|
||||
"field": "age",
|
||||
"op": ">",
|
||||
"value": {"integerValue": "25"}
|
||||
}
|
||||
```
|
||||
|
||||
### AND Filter
|
||||
```json
|
||||
{
|
||||
"and": [
|
||||
{"field": "status", "op": "==", "value": {"stringValue": "active"}},
|
||||
{"field": "age", "op": ">=", "value": {"integerValue": "18"}}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### OR Filter
|
||||
```json
|
||||
{
|
||||
"or": [
|
||||
{"field": "role", "op": "==", "value": {"stringValue": "admin"}},
|
||||
{"field": "role", "op": "==", "value": {"stringValue": "moderator"}}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Nested Filters
|
||||
```json
|
||||
{
|
||||
"or": [
|
||||
{"field": "type", "op": "==", "value": {"stringValue": "premium"}},
|
||||
{
|
||||
"and": [
|
||||
{"field": "type", "op": "==", "value": {"stringValue": "standard"}},
|
||||
{"field": "credits", "op": ">", "value": {"integerValue": "1000"}}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Firestore Native Value Types
|
||||
|
||||
The tool supports all Firestore native JSON value types:
|
||||
|
||||
| Type | Format | Example |
|
||||
|------|--------|---------|
|
||||
| String | `{"stringValue": "text"}` | `{"stringValue": "{{.name}}"}` |
|
||||
| Integer | `{"integerValue": "123"}` or `{"integerValue": 123}` | `{"integerValue": "{{.age}}"}` or `{"integerValue": {{.age}}}` |
|
||||
| Double | `{"doubleValue": 45.67}` | `{"doubleValue": {{.price}}}` |
|
||||
| Boolean | `{"booleanValue": true}` | `{"booleanValue": {{.active}}}` |
|
||||
| Null | `{"nullValue": null}` | `{"nullValue": null}` |
|
||||
| Timestamp | `{"timestampValue": "RFC3339"}` | `{"timestampValue": "{{.date}}"}` |
|
||||
| GeoPoint | `{"geoPointValue": {"latitude": 0, "longitude": 0}}` | See below |
|
||||
| Array | `{"arrayValue": {"values": [...]}}` | See below |
|
||||
| Map | `{"mapValue": {"fields": {...}}}` | See below |
|
||||
|
||||
### Complex Type Examples
|
||||
|
||||
**GeoPoint:**
|
||||
```json
|
||||
{
|
||||
"field": "location",
|
||||
"op": "==",
|
||||
"value": {
|
||||
"geoPointValue": {
|
||||
"latitude": 37.7749,
|
||||
"longitude": -122.4194
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Array:**
|
||||
```json
|
||||
{
|
||||
"field": "tags",
|
||||
"op": "array-contains",
|
||||
"value": {"stringValue": "{{.tag}}"}
|
||||
}
|
||||
```
|
||||
|
||||
## Supported Operators
|
||||
|
||||
- `<` - Less than
|
||||
- `<=` - Less than or equal
|
||||
- `>` - Greater than
|
||||
- `>=` - Greater than or equal
|
||||
- `==` - Equal
|
||||
- `!=` - Not equal
|
||||
- `array-contains` - Array contains value
|
||||
- `array-contains-any` - Array contains any of the values
|
||||
- `in` - Value is in array
|
||||
- `not-in` - Value is not in array
|
||||
|
||||
## Examples
|
||||
|
||||
### Example 1: Query with Dynamic Collection Path
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
user_documents:
|
||||
kind: firestore-query
|
||||
source: my-firestore
|
||||
description: Query user-specific documents
|
||||
collectionPath: "users/{{.userId}}/documents"
|
||||
filters: |
|
||||
{
|
||||
"field": "type",
|
||||
"op": "==",
|
||||
"value": {"stringValue": "{{.docType}}"}
|
||||
}
|
||||
parameters:
|
||||
- name: userId
|
||||
type: string
|
||||
description: User ID
|
||||
required: true
|
||||
- name: docType
|
||||
type: string
|
||||
description: Document type to filter
|
||||
required: true
|
||||
```
|
||||
|
||||
### Example 2: Complex Geographic Query
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
location_search:
|
||||
kind: firestore-query
|
||||
source: my-firestore
|
||||
description: Search locations by area and population
|
||||
collectionPath: "cities"
|
||||
filters: |
|
||||
{
|
||||
"and": [
|
||||
{"field": "country", "op": "==", "value": {"stringValue": "{{.country}}"}},
|
||||
{"field": "population", "op": ">", "value": {"integerValue": "{{.minPopulation}}"}},
|
||||
{"field": "area", "op": "<", "value": {"doubleValue": {{.maxArea}}}}
|
||||
]
|
||||
}
|
||||
orderBy:
|
||||
field: "population"
|
||||
direction: "DESCENDING"
|
||||
limit: 50
|
||||
parameters:
|
||||
- name: country
|
||||
type: string
|
||||
description: Country code
|
||||
required: true
|
||||
- name: minPopulation
|
||||
type: string
|
||||
description: Minimum population (as string for large numbers)
|
||||
required: true
|
||||
- name: maxArea
|
||||
type: float
|
||||
description: Maximum area in square kilometers
|
||||
required: true
|
||||
```
|
||||
|
||||
### Example 3: Time-based Query with Analysis
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
activity_log:
|
||||
kind: firestore-query
|
||||
source: my-firestore
|
||||
description: Query activity logs within time range
|
||||
collectionPath: "logs"
|
||||
filters: |
|
||||
{
|
||||
"and": [
|
||||
{"field": "timestamp", "op": ">=", "value": {"timestampValue": "{{.startTime}}"}},
|
||||
{"field": "timestamp", "op": "<=", "value": {"timestampValue": "{{.endTime}}"}},
|
||||
{"field": "severity", "op": "in", "value": {"arrayValue": {"values": [
|
||||
{"stringValue": "ERROR"},
|
||||
{"stringValue": "CRITICAL"}
|
||||
]}}}
|
||||
]
|
||||
}
|
||||
select:
|
||||
- timestamp
|
||||
- message
|
||||
- severity
|
||||
- userId
|
||||
orderBy:
|
||||
field: "timestamp"
|
||||
direction: "DESCENDING"
|
||||
analyzeQuery: true
|
||||
parameters:
|
||||
- name: startTime
|
||||
type: string
|
||||
description: Start time in RFC3339 format
|
||||
required: true
|
||||
- name: endTime
|
||||
type: string
|
||||
description: End time in RFC3339 format
|
||||
required: true
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Invoking the Tool
|
||||
|
||||
```bash
|
||||
# Using curl
|
||||
curl -X POST http://localhost:5000/api/tool/your-tool-name/invoke \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"continent": "Europe",
|
||||
"minPopulation": "1000000",
|
||||
"maxArea": 500000.5,
|
||||
"isActive": true
|
||||
}'
|
||||
```
|
||||
|
||||
### Response Format
|
||||
|
||||
**Without analyzeQuery:**
|
||||
```json
|
||||
[
|
||||
{
|
||||
"id": "doc1",
|
||||
"path": "countries/doc1",
|
||||
"data": {
|
||||
"name": "France",
|
||||
"continent": "Europe",
|
||||
"population": 67000000,
|
||||
"area": 551695
|
||||
},
|
||||
"createTime": "2024-01-01T00:00:00Z",
|
||||
"updateTime": "2024-01-15T10:30:00Z"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
**With analyzeQuery:**
|
||||
```json
|
||||
{
|
||||
"documents": [...],
|
||||
"explainMetrics": {
|
||||
"planSummary": {
|
||||
"indexesUsed": [...]
|
||||
},
|
||||
"executionStats": {
|
||||
"resultsReturned": 10,
|
||||
"executionDuration": "15ms",
|
||||
"readOperations": 10
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Use Typed Values**: Always use Firestore's native JSON value types for proper type handling
|
||||
2. **String Numbers for Large Integers**: Use string representation for large integers to avoid precision loss
|
||||
3. **Template Security**: Validate all template parameters to prevent injection attacks
|
||||
4. **Index Optimization**: Use `analyzeQuery` to identify missing indexes
|
||||
5. **Limit Results**: Always set a reasonable `limit` to prevent excessive data retrieval
|
||||
6. **Field Selection**: Use `select` to retrieve only necessary fields
|
||||
|
||||
## Technical Notes
|
||||
|
||||
- Queries operate on a single collection (the standard Firestore pattern)
|
||||
- Maximum of 100 filters per query (configurable)
|
||||
- Template parameters must be properly escaped in JSON contexts
|
||||
- Complex nested queries may require composite indexes
|
||||
|
||||
## See Also
|
||||
|
||||
- [firestore-query-collection](firestore-query-collection.md) - Non-parameterizable query tool
|
||||
- [Firestore Source Configuration](../../sources/firestore.md)
|
||||
- [Firestore Query Documentation](https://firebase.google.com/docs/firestore/query-data/queries)
|
||||
@@ -1,101 +0,0 @@
|
||||
---
|
||||
title: "yugabytedb-sql"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "yugabytedb-sql" tool executes a pre-defined SQL statement against a YugabyteDB
|
||||
database.
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `yugabytedb-sql` tool executes a pre-defined SQL statement against a YugabyteDB
|
||||
database.
|
||||
|
||||
The specified SQL statement is executed as a prepared statement,
|
||||
and specified parameters will inserted according to their position: e.g. `1`
|
||||
will be the first parameter specified, `$@` will be the second parameter, and so
|
||||
on. If template parameters are included, they will be resolved before execution
|
||||
of the prepared statement.
|
||||
|
||||
## Example
|
||||
|
||||
> **Note:** This tool uses parameterized queries to prevent SQL injections.
|
||||
> Query parameters can be used as substitutes for arbitrary expressions.
|
||||
> Parameters cannot be used as substitutes for identifiers, column names, table
|
||||
> names, or other parts of the query.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
search_flights_by_number:
|
||||
kind: yugabytedb-sql
|
||||
source: my-yb-instance
|
||||
statement: |
|
||||
SELECT * FROM flights
|
||||
WHERE airline = $1
|
||||
AND flight_number = $2
|
||||
LIMIT 10
|
||||
description: |
|
||||
Use this tool to get information for a specific flight.
|
||||
Takes an airline code and flight number and returns info on the flight.
|
||||
Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number.
|
||||
A airline code is a code for an airline service consisting of two-character
|
||||
airline designator and followed by flight number, which is 1 to 4 digit number.
|
||||
For example, if given CY 0123, the airline is "CY", and flight_number is "123".
|
||||
Another example for this is DL 1234, the airline is "DL", and flight_number is "1234".
|
||||
If the tool returns more than one option choose the date closes to today.
|
||||
Example:
|
||||
{{
|
||||
"airline": "CY",
|
||||
"flight_number": "888",
|
||||
}}
|
||||
Example:
|
||||
{{
|
||||
"airline": "DL",
|
||||
"flight_number": "1234",
|
||||
}}
|
||||
parameters:
|
||||
- name: airline
|
||||
type: string
|
||||
description: Airline unique 2 letter identifier
|
||||
- name: flight_number
|
||||
type: string
|
||||
description: 1 to 4 digit number
|
||||
```
|
||||
|
||||
### Example with Template Parameters
|
||||
|
||||
> **Note:** This tool allows direct modifications to the SQL statement, including identifiers, column names,
|
||||
> and table names. **This makes it more vulnerable to SQL injections**. Using basic parameters
|
||||
> only (see above) is recommended for performance and safety reasons. For more details, please
|
||||
> check [templateParameters](_index#template-parameters).
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_table:
|
||||
kind: yugabytedb-sql
|
||||
source: my-yb-instance
|
||||
statement: |
|
||||
SELECT * FROM {{.tableName}}
|
||||
description: |
|
||||
Use this tool to list all information from a specific table.
|
||||
Example:
|
||||
{{
|
||||
"tableName": "flights",
|
||||
}}
|
||||
templateParameters:
|
||||
- name: tableName
|
||||
type: string
|
||||
description: Table to select from
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|---------------------|:---------------------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "yugabytedb-sql". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| statement | string | true | SQL statement to execute on. |
|
||||
| parameters | [parameters](_index#specifying-parameters) | false | List of [parameters](_index#specifying-parameters) that will be inserted into the SQL statement. |
|
||||
| templateParameters | [templateParameters](_index#template-parameters) | false | List of [templateParameters](_index#template-parameters) that will be inserted into the SQL statement before executing prepared statement. |
|
||||
@@ -79,7 +79,7 @@
|
||||
"# @markdown Please fill in the value below with your GCP project ID and then run the cell.\n",
|
||||
"\n",
|
||||
"# Please fill in these values.\n",
|
||||
"project_id = \"\" # @param {type:\"string\"}\n",
|
||||
"project_id = \"project-id\" # @param {type:\"string\"}\n",
|
||||
"\n",
|
||||
"# Quick input validations.\n",
|
||||
"assert project_id, \"⚠️ Please provide a Google Cloud project ID\"\n",
|
||||
@@ -767,7 +767,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.14.0\" # x-release-please-version\n",
|
||||
"version = \"0.11.0\" # x-release-please-version\n",
|
||||
"! curl -L -o /content/toolbox https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -114,7 +114,7 @@ In this section, we will download and install the Toolbox binary.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
export VERSION="0.14.0"
|
||||
export VERSION="0.11.0"
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -220,7 +220,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.14.0\" # x-release-please-version\n",
|
||||
"version = \"0.13.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -179,7 +179,7 @@ to use BigQuery, and then run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.13.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.12.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
11
go.mod
11
go.mod
@@ -2,11 +2,11 @@ module github.com/googleapis/genai-toolbox
|
||||
|
||||
go 1.24.0
|
||||
|
||||
toolchain go1.25.1
|
||||
toolchain go1.25.0
|
||||
|
||||
require (
|
||||
cloud.google.com/go/alloydbconn v1.15.5
|
||||
cloud.google.com/go/bigquery v1.70.0
|
||||
cloud.google.com/go/bigquery v1.69.0
|
||||
cloud.google.com/go/bigtable v1.39.0
|
||||
cloud.google.com/go/cloudsqlconn v1.18.1
|
||||
cloud.google.com/go/dataplex v1.26.0
|
||||
@@ -28,18 +28,17 @@ require (
|
||||
github.com/goccy/go-yaml v1.18.0
|
||||
github.com/google/go-cmp v0.7.0
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/jackc/pgx/v5 v5.7.6
|
||||
github.com/jackc/pgx/v5 v5.7.5
|
||||
github.com/json-iterator/go v1.1.12
|
||||
github.com/looker-open-source/sdk-codegen/go v0.25.10
|
||||
github.com/microsoft/go-mssqldb v1.9.3
|
||||
github.com/nakagami/firebirdsql v0.9.15
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.3
|
||||
github.com/redis/go-redis/v9 v9.13.0
|
||||
github.com/redis/go-redis/v9 v9.12.1
|
||||
github.com/spf13/cobra v1.9.1
|
||||
github.com/thlib/go-timezone-local v0.0.7
|
||||
github.com/trinodb/trino-go-client v0.328.0
|
||||
github.com/valkey-io/valkey-go v1.0.64
|
||||
github.com/yugabyte/pgx/v5 v5.5.3-yb-5
|
||||
go.mongodb.org/mongo-driver v1.17.4
|
||||
go.opentelemetry.io/contrib/propagators/autoprop v0.62.0
|
||||
go.opentelemetry.io/otel v1.37.0
|
||||
@@ -49,7 +48,7 @@ require (
|
||||
go.opentelemetry.io/otel/sdk v1.37.0
|
||||
go.opentelemetry.io/otel/sdk/metric v1.37.0
|
||||
go.opentelemetry.io/otel/trace v1.37.0
|
||||
golang.org/x/oauth2 v0.31.0
|
||||
golang.org/x/oauth2 v0.30.0
|
||||
google.golang.org/api v0.248.0
|
||||
google.golang.org/genproto v0.0.0-20250826171959-ef028d996bc1
|
||||
modernc.org/sqlite v1.38.2
|
||||
|
||||
18
go.sum
18
go.sum
@@ -137,8 +137,8 @@ cloud.google.com/go/bigquery v1.47.0/go.mod h1:sA9XOgy0A8vQK9+MWhEQTY6Tix87M/Zur
|
||||
cloud.google.com/go/bigquery v1.48.0/go.mod h1:QAwSz+ipNgfL5jxiaK7weyOhzdoAy1zFm0Nf1fysJac=
|
||||
cloud.google.com/go/bigquery v1.49.0/go.mod h1:Sv8hMmTFFYBlt/ftw2uN6dFdQPzBlREY9yBh7Oy7/4Q=
|
||||
cloud.google.com/go/bigquery v1.50.0/go.mod h1:YrleYEh2pSEbgTBZYMJ5SuSr0ML3ypjRB1zgf7pvQLU=
|
||||
cloud.google.com/go/bigquery v1.70.0 h1:V1OIhhOSionCOXWMmypXOvZu/ogkzosa7s1ArWJO/Yg=
|
||||
cloud.google.com/go/bigquery v1.70.0/go.mod h1:6lEAkgTJN+H2JcaX1eKiuEHTKyqBaJq5U3SpLGbSvwI=
|
||||
cloud.google.com/go/bigquery v1.69.0 h1:rZvHnjSUs5sHK3F9awiuFk2PeOaB8suqNuim21GbaTc=
|
||||
cloud.google.com/go/bigquery v1.69.0/go.mod h1:TdGLquA3h/mGg+McX+GsqG9afAzTAcldMjqhdjHTLew=
|
||||
cloud.google.com/go/bigtable v1.39.0 h1:NF0aaSend+Z5CKND2vWY9fgDwaeZ4bDgzUdgw8rk75Y=
|
||||
cloud.google.com/go/bigtable v1.39.0/go.mod h1:zgL2Vxux9Bx+TcARDJDUxVyE+BCUfP2u4Zm9qeHF+g0=
|
||||
cloud.google.com/go/billing v1.4.0/go.mod h1:g9IdKBEFlItS8bTtlrZdVLWSSdSyFUZKXNS02zKMOZY=
|
||||
@@ -1069,8 +1069,8 @@ github.com/jackc/pgtype v1.14.4 h1:fKuNiCumbKTAIxQwXfB/nsrnkEI6bPJrrSiMKgbJ2j8=
|
||||
github.com/jackc/pgtype v1.14.4/go.mod h1:aKeozOde08iifGosdJpz9MBZonJOUJxqNpPBcMJTlVA=
|
||||
github.com/jackc/pgx/v4 v4.18.3 h1:dE2/TrEsGX3RBprb3qryqSV9Y60iZN1C6i8IrmW9/BA=
|
||||
github.com/jackc/pgx/v4 v4.18.3/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw=
|
||||
github.com/jackc/pgx/v5 v5.7.6 h1:rWQc5FwZSPX58r1OQmkuaNicxdmExaEz5A2DO2hUuTk=
|
||||
github.com/jackc/pgx/v5 v5.7.6/go.mod h1:aruU7o91Tc2q2cFp5h4uP3f6ztExVpyVv88Xl/8Vl8M=
|
||||
github.com/jackc/pgx/v5 v5.7.5 h1:JHGfMnQY+IEtGM63d+NGMjoRpysB2JBwDr5fsngwmJs=
|
||||
github.com/jackc/pgx/v5 v5.7.5/go.mod h1:aruU7o91Tc2q2cFp5h4uP3f6ztExVpyVv88Xl/8Vl8M=
|
||||
github.com/jackc/puddle v1.3.0 h1:eHK/5clGOatcjX3oWGBO/MpxpbHzSwud5EWTSCI+MX0=
|
||||
github.com/jackc/puddle v1.3.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
|
||||
@@ -1195,8 +1195,8 @@ github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w=
|
||||
github.com/redis/go-redis/v9 v9.13.0 h1:PpmlVykE0ODh8P43U0HqC+2NXHXwG+GUtQyz+MPKGRg=
|
||||
github.com/redis/go-redis/v9 v9.13.0/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw=
|
||||
github.com/redis/go-redis/v9 v9.12.1 h1:k5iquqv27aBtnTm2tIkROUDp8JBXhXZIVu1InSgvovg=
|
||||
github.com/redis/go-redis/v9 v9.12.1/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||
@@ -1270,8 +1270,6 @@ github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3i
|
||||
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM=
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI=
|
||||
github.com/yugabyte/pgx/v5 v5.5.3-yb-5 h1:MV66FoH4HFsA9IC+h1hRY/+9Rmo040zVyZovOX7zpuk=
|
||||
github.com/yugabyte/pgx/v5 v5.5.3-yb-5/go.mod h1:2SxizGfDY7UDCRTtbI/xd98C/oGN7S/3YoGF8l9gx/c=
|
||||
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
@@ -1523,8 +1521,8 @@ golang.org/x/oauth2 v0.4.0/go.mod h1:RznEsdpjGAINPTOF0UH/t+xJ75L18YO3Ho6Pyn+uRec
|
||||
golang.org/x/oauth2 v0.5.0/go.mod h1:9/XBHVqLaWO3/BRHs5jbpYCnOZVjj5V0ndyaAM7KB4I=
|
||||
golang.org/x/oauth2 v0.6.0/go.mod h1:ycmewcwgD4Rpr3eZJLSB4Kyyljb3qDh40vJ8STE5HKw=
|
||||
golang.org/x/oauth2 v0.7.0/go.mod h1:hPLQkd9LyjfXTiRohC/41GhcFqxisoUQ99sCUOHO9x4=
|
||||
golang.org/x/oauth2 v0.31.0 h1:8Fq0yVZLh4j4YA47vHKFTa9Ew5XIrCP8LC6UeNZnLxo=
|
||||
golang.org/x/oauth2 v0.31.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
||||
golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=
|
||||
golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
|
||||
@@ -33,7 +33,6 @@ var expectedToolSources = []string{
|
||||
"looker",
|
||||
"mssql",
|
||||
"mysql",
|
||||
"neo4j",
|
||||
"oceanbase",
|
||||
"postgres",
|
||||
"spanner-postgres",
|
||||
@@ -99,7 +98,6 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
postgresconfig, _ := Get("postgres")
|
||||
spanner_config, _ := Get("spanner")
|
||||
spannerpg_config, _ := Get("spanner-postgres")
|
||||
neo4jconfig, _ := Get("neo4j")
|
||||
if len(alloydb_admin_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch alloydb prebuilt tools yaml")
|
||||
}
|
||||
@@ -145,9 +143,6 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
if len(spannerpg_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch spanner pg prebuilt tools yaml")
|
||||
}
|
||||
if len(neo4jconfig) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch neo4j prebuilt tools yaml")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFailGetPrebuiltTool(t *testing.T) {
|
||||
|
||||
@@ -1,16 +1,3 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
sources:
|
||||
alloydb-pg-source:
|
||||
kind: "alloydb-postgres"
|
||||
@@ -19,9 +6,8 @@ sources:
|
||||
cluster: ${ALLOYDB_POSTGRES_CLUSTER}
|
||||
instance: ${ALLOYDB_POSTGRES_INSTANCE}
|
||||
database: ${ALLOYDB_POSTGRES_DATABASE}
|
||||
user: ${ALLOYDB_POSTGRES_USER:}
|
||||
password: ${ALLOYDB_POSTGRES_PASSWORD:}
|
||||
ipType: ${ALLOYDB_POSTGRES_IP_TYPE:public}
|
||||
user: ${ALLOYDB_POSTGRES_USER}
|
||||
password: ${ALLOYDB_POSTGRES_PASSWORD}
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
@@ -106,7 +92,7 @@ tools:
|
||||
'constraints', COALESCE((SELECT json_agg(json_build_object('constraint_name',cons.constraint_name,'constraint_type',cons.constraint_type,'constraint_definition',cons.constraint_definition,'constraint_columns',cons.constraint_columns,'foreign_key_referenced_table',cons.foreign_key_referenced_table,'foreign_key_referenced_columns',cons.foreign_key_referenced_columns)) FROM constraints_info cons WHERE cons.table_oid = ti.table_oid), '[]'::json),
|
||||
'indexes', COALESCE((SELECT json_agg(json_build_object('index_name',ii.index_name,'index_definition',ii.index_definition,'is_unique',ii.is_unique,'is_primary',ii.is_primary,'index_method',ii.index_method,'index_columns',ii.index_columns)) FROM indexes_info ii WHERE ii.table_oid = ti.table_oid), '[]'::json),
|
||||
'triggers', COALESCE((SELECT json_agg(json_build_object('trigger_name',tri.trigger_name,'trigger_definition',tri.trigger_definition,'trigger_enabled_state',tri.trigger_enabled_state)) FROM triggers_info tri WHERE tri.table_oid = ti.table_oid), '[]'::json)
|
||||
)
|
||||
)
|
||||
END AS object_details
|
||||
FROM table_info ti ORDER BY ti.schema_name, ti.table_name;
|
||||
parameters:
|
||||
|
||||
@@ -1,21 +1,7 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
sources:
|
||||
bigquery-source:
|
||||
kind: "bigquery"
|
||||
project: ${BIGQUERY_PROJECT}
|
||||
location: ${BIGQUERY_LOCATION:}
|
||||
|
||||
tools:
|
||||
ask_data_insights:
|
||||
|
||||
@@ -1,16 +1,3 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
sources:
|
||||
cloudsql-mssql-source:
|
||||
kind: cloud-sql-mssql
|
||||
@@ -21,7 +8,6 @@ sources:
|
||||
ipAddress: ${CLOUD_SQL_MSSQL_IP_ADDRESS}
|
||||
user: ${CLOUD_SQL_MSSQL_USER}
|
||||
password: ${CLOUD_SQL_MSSQL_PASSWORD}
|
||||
ipType: ${CLOUD_SQL_MSSQL_IP_TYPE:public}
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: mssql-execute-sql
|
||||
|
||||
@@ -1,16 +1,3 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
sources:
|
||||
cloudsql-pg-source:
|
||||
kind: cloud-sql-postgres
|
||||
@@ -18,9 +5,8 @@ sources:
|
||||
region: ${CLOUD_SQL_POSTGRES_REGION}
|
||||
instance: ${CLOUD_SQL_POSTGRES_INSTANCE}
|
||||
database: ${CLOUD_SQL_POSTGRES_DATABASE}
|
||||
user: ${CLOUD_SQL_POSTGRES_USER:}
|
||||
password: ${CLOUD_SQL_POSTGRES_PASSWORD:}
|
||||
ipType: ${CLOUD_SQL_POSTGRES_IP_TYPE:public}
|
||||
user: ${CLOUD_SQL_POSTGRES_USER}
|
||||
password: ${CLOUD_SQL_POSTGRES_PASSWORD}
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
@@ -105,7 +91,7 @@ tools:
|
||||
'constraints', COALESCE((SELECT json_agg(json_build_object('constraint_name',cons.constraint_name,'constraint_type',cons.constraint_type,'constraint_definition',cons.constraint_definition,'constraint_columns',cons.constraint_columns,'foreign_key_referenced_table',cons.foreign_key_referenced_table,'foreign_key_referenced_columns',cons.foreign_key_referenced_columns)) FROM constraints_info cons WHERE cons.table_oid = ti.table_oid), '[]'::json),
|
||||
'indexes', COALESCE((SELECT json_agg(json_build_object('index_name',ii.index_name,'index_definition',ii.index_definition,'is_unique',ii.is_unique,'is_primary',ii.is_primary,'index_method',ii.index_method,'index_columns',ii.index_columns)) FROM indexes_info ii WHERE ii.table_oid = ti.table_oid), '[]'::json),
|
||||
'triggers', COALESCE((SELECT json_agg(json_build_object('trigger_name',tri.trigger_name,'trigger_definition',tri.trigger_definition,'trigger_enabled_state',tri.trigger_enabled_state)) FROM triggers_info tri WHERE tri.table_oid = ti.table_oid), '[]'::json)
|
||||
)
|
||||
)
|
||||
END AS object_details
|
||||
FROM table_info ti ORDER BY ti.schema_name, ti.table_name;
|
||||
parameters:
|
||||
|
||||
@@ -1,21 +1,8 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
sources:
|
||||
firestore-source:
|
||||
kind: firestore
|
||||
project: ${FIRESTORE_PROJECT}
|
||||
database: ${FIRESTORE_DATABASE:}
|
||||
database: ${FIRESTORE_DATABASE}
|
||||
|
||||
tools:
|
||||
firestore-get-documents:
|
||||
@@ -26,26 +13,26 @@ tools:
|
||||
kind: firestore-add-documents
|
||||
source: firestore-source
|
||||
description: |
|
||||
Adds a new document to a Firestore collection. Please follow the best practices :
|
||||
1. Always use typed values in the documentData: Every field must be wrapped with its appropriate type indicator (e.g., {"stringValue": "text"})
|
||||
2. Integer values can be strings in the documentData: The tool accepts integer values as strings (e.g., {"integerValue": "1500"})
|
||||
3. Use returnData sparingly: Only set to true when you need to verify the exact data that was written
|
||||
4. Validate data before sending: Ensure your data matches Firestore's native JSON format
|
||||
5. Handle timestamps properly: Use RFC3339 format for timestamp strings
|
||||
6. Base64 encode binary data: Binary data must be base64 encoded in the bytesValue field
|
||||
7. Consider security rules: Ensure your Firestore security rules allow document creation in the target collection
|
||||
Adds a new document to a Firestore collection. Please follow the best practices :
|
||||
1. Always use typed values in the documentData: Every field must be wrapped with its appropriate type indicator (e.g., {"stringValue": "text"})
|
||||
2. Integer values can be strings in the documentData: The tool accepts integer values as strings (e.g., {"integerValue": "1500"})
|
||||
3. Use returnData sparingly: Only set to true when you need to verify the exact data that was written
|
||||
4. Validate data before sending: Ensure your data matches Firestore's native JSON format
|
||||
5. Handle timestamps properly: Use RFC3339 format for timestamp strings
|
||||
6. Base64 encode binary data: Binary data must be base64 encoded in the bytesValue field
|
||||
7. Consider security rules: Ensure your Firestore security rules allow document creation in the target collection
|
||||
firestore-update-document:
|
||||
kind: firestore-update-document
|
||||
source: firestore-source
|
||||
description: |
|
||||
Updates an existing document in Firestore. Supports both full document updates and selective field updates using an update mask. Please follow the best practices:
|
||||
1. Use update masks for precision: When you only need to update specific fields, use the updateMask parameter to avoid unintended changes
|
||||
2. Always use typed values in the documentData: Every field must be wrapped with its appropriate type indicator (e.g., {"stringValue": "text"})
|
||||
3. Delete fields using update mask: To delete fields, include them in the updateMask but omit them from documentData
|
||||
4. Integer values can be strings: The tool accepts integer values as strings (e.g., {"integerValue": "1500"})
|
||||
5. Use returnData sparingly: Only set to true when you need to verify the exact data after the update
|
||||
6. Handle timestamps properly: Use RFC3339 format for timestamp strings
|
||||
7. Consider security rules: Ensure your Firestore security rules allow document updates
|
||||
Updates an existing document in Firestore. Supports both full document updates and selective field updates using an update mask. Please follow the best practices:
|
||||
1. Use update masks for precision: When you only need to update specific fields, use the updateMask parameter to avoid unintended changes
|
||||
2. Always use typed values in the documentData: Every field must be wrapped with its appropriate type indicator (e.g., {"stringValue": "text"})
|
||||
3. Delete fields using update mask: To delete fields, include them in the updateMask but omit them from documentData
|
||||
4. Integer values can be strings: The tool accepts integer values as strings (e.g., {"integerValue": "1500"})
|
||||
5. Use returnData sparingly: Only set to true when you need to verify the exact data after the update
|
||||
6. Handle timestamps properly: Use RFC3339 format for timestamp strings
|
||||
7. Consider security rules: Ensure your Firestore security rules allow document updates
|
||||
firestore-list-collections:
|
||||
kind: firestore-list-collections
|
||||
source: firestore-source
|
||||
@@ -57,8 +44,8 @@ tools:
|
||||
firestore-query-collection:
|
||||
kind: firestore-query-collection
|
||||
source: firestore-source
|
||||
description: |
|
||||
Retrieves one or more Firestore documents from a collection in a database in the current project by a collection with a full document path.
|
||||
description: |
|
||||
Retrieves one or more Firestore documents from a collection in a database in the current project by a collection with a full document path.
|
||||
Use this if you know the exact path of a collection and the filtering clause you would like for the document.
|
||||
firestore-get-rules:
|
||||
kind: firestore-get-rules
|
||||
|
||||
@@ -1,17 +1,3 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
sources:
|
||||
mysql-source:
|
||||
kind: mysql
|
||||
@@ -20,11 +6,6 @@ sources:
|
||||
database: ${MYSQL_DATABASE}
|
||||
user: ${MYSQL_USER}
|
||||
password: ${MYSQL_PASSWORD}
|
||||
# Optional: supply additional DSN parameters (e.g. TLS, charset) via env var.
|
||||
# Provide a YAML-encoded map in MYSQL_QUERY_PARAMS, for example:
|
||||
# export MYSQL_QUERY_PARAMS="{tls: preferred, charset: utf8mb4}"
|
||||
# When the variable is empty/undefined, queryParams will be treated as nil.
|
||||
queryParams: ${MYSQL_QUERY_PARAMS:}
|
||||
queryTimeout: 30s # Optional
|
||||
tools:
|
||||
execute_sql:
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
sources:
|
||||
neo4j-source:
|
||||
kind: neo4j
|
||||
uri: ${NEO4J_URI}
|
||||
database: ${NEO4J_DATABASE}
|
||||
user: ${NEO4J_USERNAME}
|
||||
password: ${NEO4J_PASSWORD}
|
||||
|
||||
tools:
|
||||
execute_cypher:
|
||||
kind: neo4j-execute-cypher
|
||||
source: neo4j-source
|
||||
description: Use this tool to execute Cypher queries.
|
||||
|
||||
get_schema:
|
||||
kind: neo4j-schema
|
||||
source: neo4j-source
|
||||
description: Use this tool to get the database schema.
|
||||
|
||||
toolsets:
|
||||
neo4j-database-tools:
|
||||
- execute_cypher
|
||||
- get_schema
|
||||
@@ -1,16 +1,3 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
sources:
|
||||
postgresql-source:
|
||||
kind: postgres
|
||||
@@ -19,7 +6,6 @@ sources:
|
||||
database: ${POSTGRES_DATABASE}
|
||||
user: ${POSTGRES_USER}
|
||||
password: ${POSTGRES_PASSWORD}
|
||||
queryParams: ${POSTGRES_QUERY_PARAMS:}
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
@@ -104,7 +90,7 @@ tools:
|
||||
'constraints', COALESCE((SELECT json_agg(json_build_object('constraint_name',cons.constraint_name,'constraint_type',cons.constraint_type,'constraint_definition',cons.constraint_definition,'constraint_columns',cons.constraint_columns,'foreign_key_referenced_table',cons.foreign_key_referenced_table,'foreign_key_referenced_columns',cons.foreign_key_referenced_columns)) FROM constraints_info cons WHERE cons.table_oid = ti.table_oid), '[]'::json),
|
||||
'indexes', COALESCE((SELECT json_agg(json_build_object('index_name',ii.index_name,'index_definition',ii.index_definition,'is_unique',ii.is_unique,'is_primary',ii.is_primary,'index_method',ii.index_method,'index_columns',ii.index_columns)) FROM indexes_info ii WHERE ii.table_oid = ti.table_oid), '[]'::json),
|
||||
'triggers', COALESCE((SELECT json_agg(json_build_object('trigger_name',tri.trigger_name,'trigger_definition',tri.trigger_definition,'trigger_enabled_state',tri.trigger_enabled_state)) FROM triggers_info tri WHERE tri.table_oid = ti.table_oid), '[]'::json)
|
||||
)
|
||||
)
|
||||
END AS object_details
|
||||
FROM table_info ti ORDER BY ti.schema_name, ti.table_name;
|
||||
parameters:
|
||||
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
bigqueryapi "cloud.google.com/go/bigquery"
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
"golang.org/x/oauth2"
|
||||
@@ -34,7 +35,7 @@ const SourceKind string = "bigquery"
|
||||
// validate interface
|
||||
var _ sources.SourceConfig = Config{}
|
||||
|
||||
type BigqueryClientCreator func(tokenString string, wantRestService bool) (*bigqueryapi.Client, *bigqueryrestapi.Service, error)
|
||||
type BigqueryClientCreator func(tokenString tools.AccessToken) (*bigqueryapi.Client, *bigqueryrestapi.Service, error)
|
||||
|
||||
func init() {
|
||||
if !sources.Register(SourceKind, newConfig) {
|
||||
@@ -87,8 +88,6 @@ func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.So
|
||||
s := &Source{
|
||||
Name: r.Name,
|
||||
Kind: SourceKind,
|
||||
Project: r.Project,
|
||||
Location: r.Location,
|
||||
Client: client,
|
||||
RestService: restService,
|
||||
TokenSource: tokenSource,
|
||||
@@ -106,8 +105,6 @@ type Source struct {
|
||||
// BigQuery Google SQL struct with client
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
Project string
|
||||
Location string
|
||||
Client *bigqueryapi.Client
|
||||
RestService *bigqueryrestapi.Service
|
||||
TokenSource oauth2.TokenSource
|
||||
@@ -133,14 +130,6 @@ func (s *Source) UseClientAuthorization() bool {
|
||||
return s.UseClientOAuth
|
||||
}
|
||||
|
||||
func (s *Source) BigQueryProject() string {
|
||||
return s.Project
|
||||
}
|
||||
|
||||
func (s *Source) BigQueryLocation() string {
|
||||
return s.Location
|
||||
}
|
||||
|
||||
func (s *Source) BigQueryTokenSource() oauth2.TokenSource {
|
||||
return s.TokenSource
|
||||
}
|
||||
@@ -198,8 +187,7 @@ func initBigQueryConnectionWithOAuthToken(
|
||||
location string,
|
||||
name string,
|
||||
userAgent string,
|
||||
tokenString string,
|
||||
wantRestService bool,
|
||||
tokenString tools.AccessToken,
|
||||
) (*bigqueryapi.Client, *bigqueryrestapi.Service, error) {
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
defer span.End()
|
||||
@@ -216,16 +204,13 @@ func initBigQueryConnectionWithOAuthToken(
|
||||
}
|
||||
client.Location = location
|
||||
|
||||
if wantRestService {
|
||||
// Initialize the low-level BigQuery REST service using the same credentials
|
||||
restService, err := bigqueryrestapi.NewService(ctx, option.WithUserAgent(userAgent), option.WithTokenSource(ts))
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to create BigQuery v2 service: %w", err)
|
||||
}
|
||||
return client, restService, nil
|
||||
// Initialize the low-level BigQuery REST service using the same credentials
|
||||
restService, err := bigqueryrestapi.NewService(ctx, option.WithUserAgent(userAgent), option.WithTokenSource(ts))
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to create BigQuery v2 service: %w", err)
|
||||
}
|
||||
|
||||
return client, nil, nil
|
||||
return client, restService, nil
|
||||
}
|
||||
|
||||
// newBigQueryClientCreator sets the project parameters for the init helper
|
||||
@@ -237,13 +222,13 @@ func newBigQueryClientCreator(
|
||||
project string,
|
||||
location string,
|
||||
name string,
|
||||
) (func(string, bool) (*bigqueryapi.Client, *bigqueryrestapi.Service, error), error) {
|
||||
) (func(tools.AccessToken) (*bigqueryapi.Client, *bigqueryrestapi.Service, error), error) {
|
||||
userAgent, err := util.UserAgentFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return func(tokenString string, wantRestService bool) (*bigqueryapi.Client, *bigqueryrestapi.Service, error) {
|
||||
return initBigQueryConnectionWithOAuthToken(ctx, tracer, project, location, name, userAgent, tokenString, wantRestService)
|
||||
return func(tokenString tools.AccessToken) (*bigqueryapi.Client, *bigqueryrestapi.Service, error) {
|
||||
return initBigQueryConnectionWithOAuthToken(ctx, tracer, project, location, name, userAgent, tokenString)
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -1,127 +0,0 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package yugabytedb
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/yugabyte/pgx/v5/pgxpool"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
const SourceKind string = "yugabytedb"
|
||||
|
||||
// validate interface
|
||||
var _ sources.SourceConfig = Config{}
|
||||
|
||||
func init() {
|
||||
if !sources.Register(SourceKind, newConfig) {
|
||||
panic(fmt.Sprintf("source kind %q already registered", SourceKind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources.SourceConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Host string `yaml:"host" validate:"required"`
|
||||
Port string `yaml:"port" validate:"required"`
|
||||
User string `yaml:"user" validate:"required"`
|
||||
Password string `yaml:"password" validate:"required"`
|
||||
Database string `yaml:"database" validate:"required"`
|
||||
LoadBalance string `yaml:"loadBalance"`
|
||||
TopologyKeys string `yaml:"topologyKeys"`
|
||||
YBServersRefreshInterval string `yaml:"ybServersRefreshInterval"`
|
||||
FallBackToTopologyKeysOnly string `yaml:"fallbackToTopologyKeysOnly"`
|
||||
FailedHostReconnectDelaySeconds string `yaml:"failedHostReconnectDelaySecs"`
|
||||
}
|
||||
|
||||
func (r Config) SourceConfigKind() string {
|
||||
return SourceKind
|
||||
}
|
||||
|
||||
func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
|
||||
pool, err := initYugabyteDBConnectionPool(ctx, tracer, r.Name, r.Host, r.Port, r.User, r.Password, r.Database, r.LoadBalance, r.TopologyKeys, r.YBServersRefreshInterval, r.FallBackToTopologyKeysOnly, r.FailedHostReconnectDelaySeconds)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to create pool: %w", err)
|
||||
}
|
||||
|
||||
err = pool.Ping(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to connect successfully: %w", err)
|
||||
}
|
||||
|
||||
s := &Source{
|
||||
Name: r.Name,
|
||||
Kind: SourceKind,
|
||||
Pool: pool,
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
var _ sources.Source = &Source{}
|
||||
|
||||
type Source struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
Pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
func (s *Source) SourceKind() string {
|
||||
return SourceKind
|
||||
}
|
||||
|
||||
func (s *Source) YugabyteDBPool() *pgxpool.Pool {
|
||||
return s.Pool
|
||||
}
|
||||
|
||||
func initYugabyteDBConnectionPool(ctx context.Context, tracer trace.Tracer, name, host, port, user, pass, dbname, loadBalance, topologyKeys, refreshInterval, explicitFallback, failedHostTTL string) (*pgxpool.Pool, error) {
|
||||
//nolint:all // Reassigned ctx
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
defer span.End()
|
||||
// urlExample := "postgres://username:password@localhost:5433/database_name"
|
||||
i := fmt.Sprintf("postgres://%s:%s@%s:%s/%s", user, pass, host, port, dbname)
|
||||
if loadBalance == "true" {
|
||||
i = fmt.Sprintf("%s?load_balance=%s", i, loadBalance)
|
||||
if topologyKeys != "" {
|
||||
i = fmt.Sprintf("%s&topology_keys=%s", i, topologyKeys)
|
||||
if explicitFallback == "true" {
|
||||
i = fmt.Sprintf("%s&fallback_to_topology_keys_only=%s", i, explicitFallback)
|
||||
}
|
||||
}
|
||||
if refreshInterval != "" {
|
||||
i = fmt.Sprintf("%s&yb_servers_refresh_interval=%s", i, refreshInterval)
|
||||
}
|
||||
if failedHostTTL != "" {
|
||||
i = fmt.Sprintf("%s&failed_host_reconnect_delay_secs=%s", i, failedHostTTL)
|
||||
}
|
||||
}
|
||||
pool, err := pgxpool.New(ctx, i)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to create connection pool: %w", err)
|
||||
}
|
||||
|
||||
return pool, nil
|
||||
}
|
||||
@@ -1,299 +0,0 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package yugabytedb_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"strings"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources/yugabytedb"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
)
|
||||
|
||||
// Basic config parse
|
||||
func TestParseFromYamlYugabyteDB(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.SourceConfigs
|
||||
}{
|
||||
{
|
||||
desc: "only required fields",
|
||||
in: `
|
||||
sources:
|
||||
my-yb-instance:
|
||||
kind: yugabytedb
|
||||
name: my-yb-instance
|
||||
host: yb-host
|
||||
port: yb-port
|
||||
user: yb_user
|
||||
password: yb_pass
|
||||
database: yb_db
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-yb-instance": yugabytedb.Config{
|
||||
Name: "my-yb-instance",
|
||||
Kind: "yugabytedb",
|
||||
Host: "yb-host",
|
||||
Port: "yb-port",
|
||||
User: "yb_user",
|
||||
Password: "yb_pass",
|
||||
Database: "yb_db",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with loadBalance only",
|
||||
in: `
|
||||
sources:
|
||||
my-yb-instance:
|
||||
kind: yugabytedb
|
||||
name: my-yb-instance
|
||||
host: yb-host
|
||||
port: yb-port
|
||||
user: yb_user
|
||||
password: yb_pass
|
||||
database: yb_db
|
||||
loadBalance: true
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-yb-instance": yugabytedb.Config{
|
||||
Name: "my-yb-instance",
|
||||
Kind: "yugabytedb",
|
||||
Host: "yb-host",
|
||||
Port: "yb-port",
|
||||
User: "yb_user",
|
||||
Password: "yb_pass",
|
||||
Database: "yb_db",
|
||||
LoadBalance: "true",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "loadBalance with topologyKeys",
|
||||
in: `
|
||||
sources:
|
||||
my-yb-instance:
|
||||
kind: yugabytedb
|
||||
name: my-yb-instance
|
||||
host: yb-host
|
||||
port: yb-port
|
||||
user: yb_user
|
||||
password: yb_pass
|
||||
database: yb_db
|
||||
loadBalance: true
|
||||
topologyKeys: zone1,zone2
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-yb-instance": yugabytedb.Config{
|
||||
Name: "my-yb-instance",
|
||||
Kind: "yugabytedb",
|
||||
Host: "yb-host",
|
||||
Port: "yb-port",
|
||||
User: "yb_user",
|
||||
Password: "yb_pass",
|
||||
Database: "yb_db",
|
||||
LoadBalance: "true",
|
||||
TopologyKeys: "zone1,zone2",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with fallback only",
|
||||
in: `
|
||||
sources:
|
||||
my-yb-instance:
|
||||
kind: yugabytedb
|
||||
name: my-yb-instance
|
||||
host: yb-host
|
||||
port: yb-port
|
||||
user: yb_user
|
||||
password: yb_pass
|
||||
database: yb_db
|
||||
loadBalance: true
|
||||
topologyKeys: zone1
|
||||
fallbackToTopologyKeysOnly: true
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-yb-instance": yugabytedb.Config{
|
||||
Name: "my-yb-instance",
|
||||
Kind: "yugabytedb",
|
||||
Host: "yb-host",
|
||||
Port: "yb-port",
|
||||
User: "yb_user",
|
||||
Password: "yb_pass",
|
||||
Database: "yb_db",
|
||||
LoadBalance: "true",
|
||||
TopologyKeys: "zone1",
|
||||
FallBackToTopologyKeysOnly: "true",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with refresh interval and reconnect delay",
|
||||
in: `
|
||||
sources:
|
||||
my-yb-instance:
|
||||
kind: yugabytedb
|
||||
name: my-yb-instance
|
||||
host: yb-host
|
||||
port: yb-port
|
||||
user: yb_user
|
||||
password: yb_pass
|
||||
database: yb_db
|
||||
loadBalance: true
|
||||
ybServersRefreshInterval: 20
|
||||
failedHostReconnectDelaySecs: 5
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-yb-instance": yugabytedb.Config{
|
||||
Name: "my-yb-instance",
|
||||
Kind: "yugabytedb",
|
||||
Host: "yb-host",
|
||||
Port: "yb-port",
|
||||
User: "yb_user",
|
||||
Password: "yb_pass",
|
||||
Database: "yb_db",
|
||||
LoadBalance: "true",
|
||||
YBServersRefreshInterval: "20",
|
||||
FailedHostReconnectDelaySeconds: "5",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "all fields set",
|
||||
in: `
|
||||
sources:
|
||||
my-yb-instance:
|
||||
kind: yugabytedb
|
||||
name: my-yb-instance
|
||||
host: yb-host
|
||||
port: yb-port
|
||||
user: yb_user
|
||||
password: yb_pass
|
||||
database: yb_db
|
||||
loadBalance: true
|
||||
topologyKeys: zone1,zone2
|
||||
fallbackToTopologyKeysOnly: true
|
||||
ybServersRefreshInterval: 30
|
||||
failedHostReconnectDelaySecs: 10
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-yb-instance": yugabytedb.Config{
|
||||
Name: "my-yb-instance",
|
||||
Kind: "yugabytedb",
|
||||
Host: "yb-host",
|
||||
Port: "yb-port",
|
||||
User: "yb_user",
|
||||
Password: "yb_pass",
|
||||
Database: "yb_db",
|
||||
LoadBalance: "true",
|
||||
TopologyKeys: "zone1,zone2",
|
||||
FallBackToTopologyKeysOnly: "true",
|
||||
YBServersRefreshInterval: "30",
|
||||
FailedHostReconnectDelaySeconds: "10",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Sources server.SourceConfigs `yaml:"sources"`
|
||||
}{}
|
||||
|
||||
err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if !cmp.Equal(tc.want, got.Sources) {
|
||||
t.Fatalf("incorrect parse (-want +got):\n%s", cmp.Diff(tc.want, got.Sources))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestFailParseFromYamlYugabyteDB(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
err string
|
||||
}{
|
||||
{
|
||||
desc: "extra field",
|
||||
in: `
|
||||
sources:
|
||||
my-yb-source:
|
||||
kind: yugabytedb
|
||||
name: my-yb-source
|
||||
host: yb-host
|
||||
port: yb-port
|
||||
database: yb_db
|
||||
user: yb_user
|
||||
password: yb_pass
|
||||
foo: bar
|
||||
`,
|
||||
err: "unable to parse source \"my-yb-source\" as \"yugabytedb\": [2:1] unknown field \"foo\"",
|
||||
},
|
||||
{
|
||||
desc: "missing required field (password)",
|
||||
in: `
|
||||
sources:
|
||||
my-yb-source:
|
||||
kind: yugabytedb
|
||||
name: my-yb-source
|
||||
host: yb-host
|
||||
port: yb-port
|
||||
database: yb_db
|
||||
user: yb_user
|
||||
`,
|
||||
err: "unable to parse source \"my-yb-source\" as \"yugabytedb\": Key: 'Config.Password' Error:Field validation for 'Password' failed on the 'required' tag",
|
||||
},
|
||||
{
|
||||
desc: "missing required field (host)",
|
||||
in: `
|
||||
sources:
|
||||
my-yb-source:
|
||||
kind: yugabytedb
|
||||
name: my-yb-source
|
||||
port: yb-port
|
||||
database: yb_db
|
||||
user: yb_user
|
||||
password: yb_pass
|
||||
`,
|
||||
err: "unable to parse source \"my-yb-source\" as \"yugabytedb\": Key: 'Config.Host' Error:Field validation for 'Host' failed on the 'required' tag",
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Sources server.SourceConfigs `yaml:"sources"`
|
||||
}{}
|
||||
err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
|
||||
if err == nil {
|
||||
t.Fatalf("expected parsing to fail")
|
||||
}
|
||||
errStr := err.Error()
|
||||
if !strings.Contains(errStr, tc.err) {
|
||||
t.Fatalf("unexpected error:\nGot: %q\nWant: %q", errStr, tc.err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -55,10 +55,7 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.T
|
||||
type compatibleSource interface {
|
||||
BigQueryClient() *bigqueryapi.Client
|
||||
BigQueryTokenSource() oauth2.TokenSource
|
||||
BigQueryProject() string
|
||||
BigQueryLocation() string
|
||||
GetMaxQueryResultRows() int
|
||||
UseClientAuthorization() bool
|
||||
}
|
||||
|
||||
type BQTableReference struct {
|
||||
@@ -148,12 +145,9 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Project: s.BigQueryProject(),
|
||||
Location: s.BigQueryLocation(),
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
Client: s.BigQueryClient(),
|
||||
UseClientOAuth: s.UseClientAuthorization(),
|
||||
TokenSource: s.BigQueryTokenSource(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
@@ -166,14 +160,10 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
UseClientOAuth bool `yaml:"useClientOAuth"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
|
||||
Project string
|
||||
Location string
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
Client *bigqueryapi.Client
|
||||
TokenSource oauth2.TokenSource
|
||||
manifest tools.Manifest
|
||||
@@ -182,29 +172,14 @@ type Tool struct {
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
var tokenStr string
|
||||
var err error
|
||||
|
||||
// Get credentials for the API call
|
||||
if t.UseClientOAuth {
|
||||
// Use client-side access token
|
||||
if accessToken == "" {
|
||||
return nil, fmt.Errorf("tool is configured for client OAuth but no token was provided in the request header: %w", tools.ErrUnauthorized)
|
||||
}
|
||||
tokenStr, err = accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
} else {
|
||||
// Use ADC
|
||||
if t.TokenSource == nil {
|
||||
return nil, fmt.Errorf("ADC is missing a valid token source")
|
||||
}
|
||||
token, err := t.TokenSource.Token()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get token from ADC: %w", err)
|
||||
}
|
||||
tokenStr = token.AccessToken
|
||||
if t.TokenSource == nil {
|
||||
return nil, fmt.Errorf("authentication error: found credentials but they are missing a valid token source")
|
||||
}
|
||||
|
||||
token, err := t.TokenSource.Token()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get token from credentials: %w", err)
|
||||
}
|
||||
|
||||
// Extract parameters from the map
|
||||
@@ -222,15 +197,15 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
}
|
||||
|
||||
// Construct URL, headers, and payload
|
||||
projectID := t.Project
|
||||
location := t.Location
|
||||
projectID := t.Client.Project()
|
||||
location := t.Client.Location
|
||||
if location == "" {
|
||||
location = "us"
|
||||
}
|
||||
caURL := fmt.Sprintf("https://geminidataanalytics.googleapis.com/v1alpha/projects/%s/locations/%s:chat", projectID, location)
|
||||
|
||||
headers := map[string]string{
|
||||
"Authorization": fmt.Sprintf("Bearer %s", tokenStr),
|
||||
"Authorization": fmt.Sprintf("Bearer %s", token.AccessToken),
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
@@ -271,7 +246,7 @@ func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
}
|
||||
|
||||
func (t Tool) RequiresClientAuthorization() bool {
|
||||
return t.UseClientOAuth
|
||||
return false
|
||||
}
|
||||
|
||||
// StreamMessage represents a single message object from the streaming API response.
|
||||
|
||||
@@ -48,8 +48,6 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.T
|
||||
type compatibleSource interface {
|
||||
BigQueryClient() *bigqueryapi.Client
|
||||
BigQueryRestService() *bigqueryrestapi.Service
|
||||
BigQueryClientCreator() bigqueryds.BigqueryClientCreator
|
||||
UseClientAuthorization() bool
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
@@ -102,16 +100,14 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
|
||||
// finish tool setup
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
UseClientOAuth: s.UseClientAuthorization(),
|
||||
ClientCreator: s.BigQueryClientCreator(),
|
||||
Client: s.BigQueryClient(),
|
||||
RestService: s.BigQueryRestService(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
Client: s.BigQueryClient(),
|
||||
RestService: s.BigQueryRestService(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
@@ -120,17 +116,14 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
UseClientOAuth bool `yaml:"useClientOAuth"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
|
||||
Client *bigqueryapi.Client
|
||||
RestService *bigqueryrestapi.Service
|
||||
ClientCreator bigqueryds.BigqueryClientCreator
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
Client *bigqueryapi.Client
|
||||
RestService *bigqueryrestapi.Service
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
@@ -144,23 +137,7 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
return nil, fmt.Errorf("unable to cast dry_run parameter %s", paramsMap["dry_run"])
|
||||
}
|
||||
|
||||
bqClient := t.Client
|
||||
restService := t.RestService
|
||||
|
||||
var err error
|
||||
// Initialize new client if using user OAuth token
|
||||
if t.UseClientOAuth {
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
bqClient, restService, err = t.ClientCreator(tokenStr, true)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating client from OAuth access token: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
dryRunJob, err := dryRunQuery(ctx, restService, bqClient.Project(), bqClient.Location, sql)
|
||||
dryRunJob, err := dryRunQuery(ctx, t.RestService, t.Client.Project(), t.Client.Location, sql)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("query validation failed during dry run: %w", err)
|
||||
}
|
||||
@@ -179,8 +156,8 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
|
||||
statementType := dryRunJob.Statistics.Query.StatementType
|
||||
// JobStatistics.QueryStatistics.StatementType
|
||||
query := bqClient.Query(sql)
|
||||
query.Location = bqClient.Location
|
||||
query := t.Client.Query(sql)
|
||||
query.Location = t.Client.Location
|
||||
|
||||
// Log the query executed for debugging.
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
@@ -246,7 +223,7 @@ func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
}
|
||||
|
||||
func (t Tool) RequiresClientAuthorization() bool {
|
||||
return t.UseClientOAuth
|
||||
return false
|
||||
}
|
||||
|
||||
// dryRunQuery performs a dry run of the SQL query to validate it and get metadata.
|
||||
|
||||
@@ -48,8 +48,6 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.T
|
||||
type compatibleSource interface {
|
||||
BigQueryClient() *bigqueryapi.Client
|
||||
BigQueryRestService() *bigqueryrestapi.Service
|
||||
BigQueryClientCreator() bigqueryds.BigqueryClientCreator
|
||||
UseClientAuthorization() bool
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
@@ -106,16 +104,14 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
|
||||
// finish tool setup
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
UseClientOAuth: s.UseClientAuthorization(),
|
||||
ClientCreator: s.BigQueryClientCreator(),
|
||||
Client: s.BigQueryClient(),
|
||||
RestService: s.BigQueryRestService(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
Client: s.BigQueryClient(),
|
||||
RestService: s.BigQueryRestService(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
@@ -124,17 +120,14 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
UseClientOAuth bool `yaml:"useClientOAuth"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
|
||||
Client *bigqueryapi.Client
|
||||
RestService *bigqueryrestapi.Service
|
||||
ClientCreator bigqueryds.BigqueryClientCreator
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
Client *bigqueryapi.Client
|
||||
RestService *bigqueryrestapi.Service
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
@@ -194,24 +187,9 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
horizon => %d%s)`,
|
||||
historyDataSource, dataCol, timestampCol, horizon, idColsArg)
|
||||
|
||||
bqClient := t.Client
|
||||
var err error
|
||||
|
||||
// Initialize new client if using user OAuth token
|
||||
if t.UseClientOAuth {
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
bqClient, _, err = t.ClientCreator(tokenStr, false)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating client from OAuth access token: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// JobStatistics.QueryStatistics.StatementType
|
||||
query := bqClient.Query(sql)
|
||||
query.Location = bqClient.Location
|
||||
query := t.Client.Query(sql)
|
||||
query.Location = t.Client.Location
|
||||
|
||||
// Log the query executed for debugging.
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
@@ -269,5 +247,5 @@ func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
}
|
||||
|
||||
func (t Tool) RequiresClientAuthorization() bool {
|
||||
return t.UseClientOAuth
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -44,10 +44,7 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.T
|
||||
}
|
||||
|
||||
type compatibleSource interface {
|
||||
BigQueryProject() string
|
||||
BigQueryClient() *bigqueryapi.Client
|
||||
BigQueryClientCreator() bigqueryds.BigqueryClientCreator
|
||||
UseClientAuthorization() bool
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
@@ -83,7 +80,7 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
projectParameter := tools.NewStringParameterWithDefault(projectKey, s.BigQueryProject(), "The Google Cloud project ID containing the dataset.")
|
||||
projectParameter := tools.NewStringParameterWithDefault(projectKey, s.BigQueryClient().Project(), "The Google Cloud project ID containing the dataset.")
|
||||
datasetParameter := tools.NewStringParameter(datasetKey, "The dataset to get metadata information.")
|
||||
parameters := tools.Parameters{projectParameter, datasetParameter}
|
||||
|
||||
@@ -95,15 +92,13 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
|
||||
// finish tool setup
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
UseClientOAuth: s.UseClientAuthorization(),
|
||||
ClientCreator: s.BigQueryClientCreator(),
|
||||
Client: s.BigQueryClient(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
Client: s.BigQueryClient(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
@@ -112,17 +107,15 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
UseClientOAuth bool `yaml:"useClientOAuth"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
|
||||
Client *bigqueryapi.Client
|
||||
ClientCreator bigqueryds.BigqueryClientCreator
|
||||
Statement string
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
Client *bigqueryapi.Client
|
||||
Statement string
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
@@ -137,25 +130,11 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", datasetKey)
|
||||
}
|
||||
|
||||
bqClient := t.Client
|
||||
var err error
|
||||
|
||||
// Initialize new client if using user OAuth token
|
||||
if t.UseClientOAuth {
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
bqClient, _, err = t.ClientCreator(tokenStr, false)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating client from OAuth access token: %w", err)
|
||||
}
|
||||
}
|
||||
dsHandle := bqClient.DatasetInProject(projectId, datasetId)
|
||||
dsHandle := t.Client.DatasetInProject(projectId, datasetId)
|
||||
|
||||
metadata, err := dsHandle.Metadata(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get metadata for dataset %s (in project %s): %w", datasetId, bqClient.Project(), err)
|
||||
return nil, fmt.Errorf("failed to get metadata for dataset %s (in project %s): %w", datasetId, t.Client.Project(), err)
|
||||
}
|
||||
|
||||
return metadata, nil
|
||||
@@ -178,5 +157,5 @@ func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
}
|
||||
|
||||
func (t Tool) RequiresClientAuthorization() bool {
|
||||
return t.UseClientOAuth
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -45,10 +45,7 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.T
|
||||
}
|
||||
|
||||
type compatibleSource interface {
|
||||
BigQueryProject() string
|
||||
BigQueryClient() *bigqueryapi.Client
|
||||
BigQueryClientCreator() bigqueryds.BigqueryClientCreator
|
||||
UseClientAuthorization() bool
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
@@ -84,7 +81,7 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
projectParameter := tools.NewStringParameterWithDefault(projectKey, s.BigQueryProject(), "The Google Cloud project ID containing the dataset and table.")
|
||||
projectParameter := tools.NewStringParameterWithDefault(projectKey, s.BigQueryClient().Project(), "The Google Cloud project ID containing the dataset and table.")
|
||||
datasetParameter := tools.NewStringParameter(datasetKey, "The table's parent dataset.")
|
||||
tableParameter := tools.NewStringParameter(tableKey, "The table to get metadata information.")
|
||||
parameters := tools.Parameters{projectParameter, datasetParameter, tableParameter}
|
||||
@@ -97,15 +94,13 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
|
||||
// finish tool setup
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
UseClientOAuth: s.UseClientAuthorization(),
|
||||
ClientCreator: s.BigQueryClientCreator(),
|
||||
Client: s.BigQueryClient(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
Client: s.BigQueryClient(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
@@ -114,17 +109,15 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
UseClientOAuth bool `yaml:"useClientOAuth"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
|
||||
Client *bigqueryapi.Client
|
||||
ClientCreator bigqueryds.BigqueryClientCreator
|
||||
Statement string
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
Client *bigqueryapi.Client
|
||||
Statement string
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
@@ -144,22 +137,7 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", tableKey)
|
||||
}
|
||||
|
||||
bqClient := t.Client
|
||||
|
||||
var err error
|
||||
// Initialize new client if using user OAuth token
|
||||
if t.UseClientOAuth {
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
bqClient, _, err = t.ClientCreator(tokenStr, false)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating client from OAuth access token: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
dsHandle := bqClient.DatasetInProject(projectId, datasetId)
|
||||
dsHandle := t.Client.DatasetInProject(projectId, datasetId)
|
||||
tableHandle := dsHandle.Table(tableId)
|
||||
|
||||
metadata, err := tableHandle.Metadata(ctx)
|
||||
@@ -187,5 +165,5 @@ func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
}
|
||||
|
||||
func (t Tool) RequiresClientAuthorization() bool {
|
||||
return t.UseClientOAuth
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -44,10 +44,7 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.T
|
||||
}
|
||||
|
||||
type compatibleSource interface {
|
||||
BigQueryProject() string
|
||||
BigQueryClient() *bigqueryapi.Client
|
||||
BigQueryClientCreator() bigqueryds.BigqueryClientCreator
|
||||
UseClientAuthorization() bool
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
@@ -83,7 +80,7 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
projectParameter := tools.NewStringParameterWithDefault(projectKey, s.BigQueryProject(), "The Google Cloud project to list dataset ids.")
|
||||
projectParameter := tools.NewStringParameterWithDefault(projectKey, s.BigQueryClient().Project(), "The Google Cloud project to list dataset ids.")
|
||||
|
||||
parameters := tools.Parameters{projectParameter}
|
||||
|
||||
@@ -95,15 +92,13 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
|
||||
// finish tool setup
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
UseClientOAuth: s.UseClientAuthorization(),
|
||||
ClientCreator: s.BigQueryClientCreator(),
|
||||
Client: s.BigQueryClient(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
Client: s.BigQueryClient(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
@@ -112,17 +107,15 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
UseClientOAuth bool `yaml:"useClientOAuth"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
|
||||
Client *bigqueryapi.Client
|
||||
ClientCreator bigqueryds.BigqueryClientCreator
|
||||
Statement string
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
Client *bigqueryapi.Client
|
||||
Statement string
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
@@ -131,20 +124,7 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", projectKey)
|
||||
}
|
||||
|
||||
bqClient := t.Client
|
||||
// Initialize new client if using user OAuth token
|
||||
if t.UseClientOAuth {
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
bqClient, _, err = t.ClientCreator(tokenStr, false)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating client from OAuth access token: %w", err)
|
||||
}
|
||||
}
|
||||
datasetIterator := bqClient.Datasets(ctx)
|
||||
datasetIterator := t.Client.Datasets(ctx)
|
||||
datasetIterator.ProjectID = projectId
|
||||
|
||||
var datasetIds []any
|
||||
@@ -185,5 +165,5 @@ func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
}
|
||||
|
||||
func (t Tool) RequiresClientAuthorization() bool {
|
||||
return t.UseClientOAuth
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -46,9 +46,6 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.T
|
||||
|
||||
type compatibleSource interface {
|
||||
BigQueryClient() *bigqueryapi.Client
|
||||
BigQueryClientCreator() bigqueryds.BigqueryClientCreator
|
||||
BigQueryProject() string
|
||||
UseClientAuthorization() bool
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
@@ -84,7 +81,7 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
projectParameter := tools.NewStringParameterWithDefault(projectKey, s.BigQueryProject(), "The Google Cloud project ID containing the dataset.")
|
||||
projectParameter := tools.NewStringParameterWithDefault(projectKey, s.BigQueryClient().Project(), "The Google Cloud project ID containing the dataset.")
|
||||
datasetParameter := tools.NewStringParameter(datasetKey, "The dataset to list table ids.")
|
||||
parameters := tools.Parameters{projectParameter, datasetParameter}
|
||||
|
||||
@@ -96,15 +93,13 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
|
||||
// finish tool setup
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
UseClientOAuth: s.UseClientAuthorization(),
|
||||
ClientCreator: s.BigQueryClientCreator(),
|
||||
Client: s.BigQueryClient(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
Client: s.BigQueryClient(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
@@ -113,17 +108,15 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
UseClientOAuth bool `yaml:"useClientOAuth"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
|
||||
Client *bigqueryapi.Client
|
||||
ClientCreator bigqueryds.BigqueryClientCreator
|
||||
Statement string
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
Client *bigqueryapi.Client
|
||||
Statement string
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
@@ -138,20 +131,7 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", datasetKey)
|
||||
}
|
||||
|
||||
bqClient := t.Client
|
||||
// Initialize new client if using user OAuth token
|
||||
if t.UseClientOAuth {
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
bqClient, _, err = t.ClientCreator(tokenStr, false)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating client from OAuth access token: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
dsHandle := bqClient.DatasetInProject(projectId, datasetId)
|
||||
dsHandle := t.Client.DatasetInProject(projectId, datasetId)
|
||||
|
||||
var tableIds []any
|
||||
tableIterator := dsHandle.Tables(ctx)
|
||||
@@ -161,7 +141,7 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to iterate through tables in dataset %s.%s: %w", bqClient.Project(), datasetId, err)
|
||||
return nil, fmt.Errorf("failed to iterate through tables in dataset %s.%s: %w", t.Client.Project(), datasetId, err)
|
||||
}
|
||||
|
||||
// Remove leading and trailing quotes
|
||||
@@ -192,5 +172,5 @@ func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
}
|
||||
|
||||
func (t Tool) RequiresClientAuthorization() bool {
|
||||
return t.UseClientOAuth
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -219,20 +219,17 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
|
||||
bqClient := t.Client
|
||||
restService := t.RestService
|
||||
var query *bigqueryapi.Query
|
||||
|
||||
// Initialize new client if using user OAuth token
|
||||
if t.UseClientOAuth {
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
bqClient, restService, err = t.ClientCreator(tokenStr, true)
|
||||
bqClient, restService, err = t.ClientCreator(accessToken)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating client from OAuth access token: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
query := bqClient.Query(newStatement)
|
||||
query = bqClient.Query(newStatement)
|
||||
query.Parameters = highLevelParams
|
||||
query.Location = bqClient.Location
|
||||
|
||||
|
||||
@@ -1,440 +0,0 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package cloudsqlwaitforoperation
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strings"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
httpsrc "github.com/googleapis/genai-toolbox/internal/sources/http"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"golang.org/x/oauth2/google"
|
||||
)
|
||||
|
||||
const kind string = "cloud-sql-wait-for-operation"
|
||||
|
||||
var cloudSQLConnectionMessageTemplate = `Your Cloud SQL resource is ready.
|
||||
|
||||
To connect, please configure your environment. The method depends on how you are running the toolbox:
|
||||
|
||||
**If running locally via stdio:**
|
||||
Update the MCP server configuration with the following environment variables:
|
||||
` + "```json" + `
|
||||
{
|
||||
"mcpServers": {
|
||||
"cloud-sql-{{.DBType}}": {
|
||||
"command": "./PATH/TO/toolbox",
|
||||
"args": ["--prebuilt","cloud-sql-{{.DBType}}","--stdio"],
|
||||
"env": {
|
||||
"CLOUD_SQL_{{.DBTypeUpper}}_PROJECT": "{{.Project}}",
|
||||
"CLOUD_SQL_{{.DBTypeUpper}}_REGION": "{{.Region}}",
|
||||
"CLOUD_SQL_{{.DBTypeUpper}}_INSTANCE": "{{.Instance}}",
|
||||
"CLOUD_SQL_{{.DBTypeUpper}}_DATABASE": "{{.Database}}",
|
||||
"CLOUD_SQL_{{.DBTypeUpper}}_USER": "<your-user>",
|
||||
"CLOUD_SQL_{{.DBTypeUpper}}_PASSWORD": "<your-password>"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
` + "```" + `
|
||||
|
||||
**If running remotely:**
|
||||
For remote deployments, you will need to set the following environment variables in your deployment configuration:
|
||||
` + "```" + `
|
||||
CLOUD_SQL_{{.DBTypeUpper}}_PROJECT={{.Project}}
|
||||
CLOUD_SQL_{{.DBTypeUpper}}_REGION={{.Region}}
|
||||
CLOUD_SQL_{{.DBTypeUpper}}_INSTANCE={{.Instance}}
|
||||
CLOUD_SQL_{{.DBTypeUpper}}_DATABASE={{.Database}}
|
||||
CLOUD_SQL_{{.DBTypeUpper}}_USER=<your-user>
|
||||
CLOUD_SQL_{{.DBTypeUpper}}_PASSWORD=<your-password>
|
||||
` + "```" + `
|
||||
|
||||
Please refer to the official documentation for guidance on deploying the toolbox:
|
||||
- Deploying the Toolbox: https://googleapis.github.io/genai-toolbox/how-to/deploy_toolbox/
|
||||
- Deploying on GKE: https://googleapis.github.io/genai-toolbox/how-to/deploy_gke/
|
||||
`
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("tool kind %q already registered", kind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
// Config defines the configuration for the wait-for-operation tool.
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Source string `yaml:"source" validate:"required"`
|
||||
Description string `yaml:"description" validate:"required"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
BaseURL string `yaml:"baseURL"`
|
||||
|
||||
// Polling configuration
|
||||
Delay string `yaml:"delay"`
|
||||
MaxDelay string `yaml:"maxDelay"`
|
||||
Multiplier float64 `yaml:"multiplier"`
|
||||
MaxRetries int `yaml:"maxRetries"`
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.ToolConfig = Config{}
|
||||
|
||||
// ToolConfigKind returns the kind of the tool.
|
||||
func (cfg Config) ToolConfigKind() string {
|
||||
return kind
|
||||
}
|
||||
|
||||
// Initialize initializes the tool from the configuration.
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
rawS, ok := srcs[cfg.Source]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
|
||||
}
|
||||
|
||||
s, ok := rawS.(*httpsrc.Source)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be `http`", kind)
|
||||
}
|
||||
|
||||
if s.BaseURL != "https://sqladmin.googleapis.com" && !strings.HasPrefix(s.BaseURL, "http://127.0.0.1") {
|
||||
return nil, fmt.Errorf("invalid source for %q tool: baseUrl must be `https://sqladmin.googleapis.com`", kind)
|
||||
}
|
||||
|
||||
allParameters := tools.Parameters{
|
||||
tools.NewStringParameter("project", "The project ID"),
|
||||
tools.NewStringParameter("operation", "The operation ID"),
|
||||
}
|
||||
paramManifest := allParameters.Manifest()
|
||||
|
||||
inputSchema := allParameters.McpManifest()
|
||||
inputSchema.Required = []string{"project", "operation"}
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
Description: cfg.Description,
|
||||
InputSchema: inputSchema,
|
||||
}
|
||||
|
||||
baseURL := cfg.BaseURL
|
||||
if baseURL == "" {
|
||||
baseURL = "https://sqladmin.googleapis.com"
|
||||
}
|
||||
|
||||
var delay time.Duration
|
||||
if cfg.Delay == "" {
|
||||
delay = 3 * time.Second
|
||||
} else {
|
||||
var err error
|
||||
delay, err = time.ParseDuration(cfg.Delay)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid value for delay: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
var maxDelay time.Duration
|
||||
if cfg.MaxDelay == "" {
|
||||
maxDelay = 4 * time.Minute
|
||||
} else {
|
||||
var err error
|
||||
maxDelay, err = time.ParseDuration(cfg.MaxDelay)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid value for maxDelay: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
multiplier := cfg.Multiplier
|
||||
if multiplier == 0 {
|
||||
multiplier = 2.0
|
||||
}
|
||||
|
||||
maxRetries := cfg.MaxRetries
|
||||
if maxRetries == 0 {
|
||||
maxRetries = 10
|
||||
}
|
||||
|
||||
return Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
BaseURL: baseURL,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
Client: s.Client,
|
||||
AllParams: allParameters,
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: paramManifest, AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
Delay: delay,
|
||||
MaxDelay: maxDelay,
|
||||
Multiplier: multiplier,
|
||||
MaxRetries: maxRetries,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Tool represents the wait-for-operation tool.
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
Description string `yaml:"description"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
|
||||
BaseURL string `yaml:"baseURL"`
|
||||
AllParams tools.Parameters `yaml:"allParams"`
|
||||
|
||||
// Polling configuration
|
||||
Delay time.Duration
|
||||
MaxDelay time.Duration
|
||||
Multiplier float64
|
||||
MaxRetries int
|
||||
|
||||
Client *http.Client
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
// Invoke executes the tool's logic.
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
paramsMap := params.AsMap()
|
||||
|
||||
project, ok := paramsMap["project"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("missing 'project' parameter")
|
||||
}
|
||||
operationID, ok := paramsMap["operation"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("missing 'operation' parameter")
|
||||
}
|
||||
|
||||
urlString := fmt.Sprintf("%s/v1/projects/%s/operations/%s", t.BaseURL, project, operationID)
|
||||
|
||||
ctx, cancel := context.WithTimeout(ctx, 30*time.Minute)
|
||||
defer cancel()
|
||||
|
||||
delay := t.Delay
|
||||
maxDelay := t.MaxDelay
|
||||
multiplier := t.Multiplier
|
||||
maxRetries := t.MaxRetries
|
||||
retries := 0
|
||||
|
||||
for retries < maxRetries {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil, fmt.Errorf("timed out waiting for operation: %w", ctx.Err())
|
||||
default:
|
||||
}
|
||||
|
||||
req, _ := http.NewRequest(http.MethodGet, urlString, nil)
|
||||
|
||||
tokenSource, err := google.DefaultTokenSource(ctx, "https://www.googleapis.com/auth/sqlservice.admin")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating token source: %w", err)
|
||||
}
|
||||
token, err := tokenSource.Token()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error retrieving token: %w", err)
|
||||
}
|
||||
req.Header.Set("Authorization", "Bearer "+token.AccessToken)
|
||||
|
||||
resp, err := t.Client.Do(req)
|
||||
if err != nil {
|
||||
fmt.Printf("error making HTTP request during polling: %s, retrying in %v\n", err, delay)
|
||||
} else {
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
resp.Body.Close()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error reading response body during polling: %w", err)
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("unexpected status code during polling: %d, response body: %s", resp.StatusCode, string(body))
|
||||
}
|
||||
|
||||
var data map[string]any
|
||||
if err := json.Unmarshal(body, &data); err == nil {
|
||||
if val, ok := data["status"]; ok {
|
||||
if fmt.Sprintf("%v", val) == "DONE" {
|
||||
if _, ok := data["error"]; ok {
|
||||
return nil, fmt.Errorf("operation finished with error: %s", string(body))
|
||||
}
|
||||
|
||||
if msg, ok := t.generateCloudSQLConnectionMessage(data); ok {
|
||||
return msg, nil
|
||||
}
|
||||
return string(body), nil
|
||||
}
|
||||
}
|
||||
}
|
||||
fmt.Printf("Operation not complete, retrying in %v\n", delay)
|
||||
}
|
||||
|
||||
time.Sleep(delay)
|
||||
delay = time.Duration(float64(delay) * multiplier)
|
||||
if delay > maxDelay {
|
||||
delay = maxDelay
|
||||
}
|
||||
retries++
|
||||
}
|
||||
return nil, fmt.Errorf("exceeded max retries waiting for operation")
|
||||
}
|
||||
|
||||
// ParseParams parses the parameters for the tool.
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
return tools.ParseParams(t.AllParams, data, claims)
|
||||
}
|
||||
|
||||
// Manifest returns the tool's manifest.
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
// McpManifest returns the tool's MCP manifest.
|
||||
func (t Tool) McpManifest() tools.McpManifest {
|
||||
return t.mcpManifest
|
||||
}
|
||||
|
||||
// Authorized checks if the tool is authorized.
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (t Tool) RequiresClientAuthorization() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (t Tool) generateCloudSQLConnectionMessage(opResponse map[string]any) (string, bool) {
|
||||
operationType, ok := opResponse["operationType"].(string)
|
||||
if !ok || operationType != "CREATE_DATABASE" {
|
||||
return "", false
|
||||
}
|
||||
|
||||
targetLink, ok := opResponse["targetLink"].(string)
|
||||
if !ok {
|
||||
return "", false
|
||||
}
|
||||
|
||||
r := regexp.MustCompile(`/projects/([^/]+)/instances/([^/]+)/databases/([^/]+)`)
|
||||
matches := r.FindStringSubmatch(targetLink)
|
||||
if len(matches) < 4 {
|
||||
return "", false
|
||||
}
|
||||
project := matches[1]
|
||||
instance := matches[2]
|
||||
database := matches[3]
|
||||
|
||||
instanceData, err := t.fetchInstanceData(context.Background(), project, instance)
|
||||
if err != nil {
|
||||
fmt.Printf("error fetching instance data: %v\n", err)
|
||||
return "", false
|
||||
}
|
||||
|
||||
region, ok := instanceData["region"].(string)
|
||||
if !ok {
|
||||
return "", false
|
||||
}
|
||||
|
||||
databaseVersion, ok := instanceData["databaseVersion"].(string)
|
||||
if !ok {
|
||||
return "", false
|
||||
}
|
||||
|
||||
var dbType string
|
||||
if strings.Contains(databaseVersion, "POSTGRES") {
|
||||
dbType = "postgres"
|
||||
} else if strings.Contains(databaseVersion, "MYSQL") {
|
||||
dbType = "mysql"
|
||||
} else if strings.Contains(databaseVersion, "SQLSERVER") {
|
||||
dbType = "mssql"
|
||||
} else {
|
||||
return "", false
|
||||
}
|
||||
|
||||
tmpl, err := template.New("cloud-sql-connection").Parse(cloudSQLConnectionMessageTemplate)
|
||||
if err != nil {
|
||||
return fmt.Sprintf("template parsing error: %v", err), false
|
||||
}
|
||||
|
||||
data := struct {
|
||||
Project string
|
||||
Region string
|
||||
Instance string
|
||||
DBType string
|
||||
DBTypeUpper string
|
||||
Database string
|
||||
}{
|
||||
Project: project,
|
||||
Region: region,
|
||||
Instance: instance,
|
||||
DBType: dbType,
|
||||
DBTypeUpper: strings.ToUpper(dbType),
|
||||
Database: database,
|
||||
}
|
||||
|
||||
var b strings.Builder
|
||||
if err := tmpl.Execute(&b, data); err != nil {
|
||||
return fmt.Sprintf("template execution error: %v", err), false
|
||||
}
|
||||
|
||||
return b.String(), true
|
||||
}
|
||||
|
||||
func (t Tool) fetchInstanceData(ctx context.Context, project, instance string) (map[string]any, error) {
|
||||
urlString := fmt.Sprintf("%s/v1/projects/%s/instances/%s", t.BaseURL, project, instance)
|
||||
req, _ := http.NewRequest(http.MethodGet, urlString, nil)
|
||||
|
||||
tokenSource, err := google.DefaultTokenSource(ctx, "https://www.googleapis.com/auth/sqlservice.admin")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating token source: %w", err)
|
||||
}
|
||||
token, err := tokenSource.Token()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error retrieving token: %w", err)
|
||||
}
|
||||
req.Header.Set("Authorization", "Bearer "+token.AccessToken)
|
||||
|
||||
resp, err := t.Client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("unexpected status code fetching instance data: %d, response body: %s", resp.StatusCode, string(body))
|
||||
}
|
||||
|
||||
var data map[string]any
|
||||
if err := json.Unmarshal(body, &data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return data, nil
|
||||
}
|
||||
@@ -1,80 +0,0 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package cloudsqlwaitforoperation_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
cloudsqlwaitforoperation "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation"
|
||||
)
|
||||
|
||||
func TestParseFromYaml(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.ToolConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
tools:
|
||||
wait-for-thing:
|
||||
kind: cloud-sql-wait-for-operation
|
||||
source: some-source
|
||||
description: some description
|
||||
delay: 1s
|
||||
maxDelay: 5s
|
||||
multiplier: 1.5
|
||||
maxRetries: 5
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"wait-for-thing": cloudsqlwaitforoperation.Config{
|
||||
Name: "wait-for-thing",
|
||||
Kind: "cloud-sql-wait-for-operation",
|
||||
Source: "some-source",
|
||||
Description: "some description",
|
||||
AuthRequired: []string{},
|
||||
Delay: "1s",
|
||||
MaxDelay: "5s",
|
||||
Multiplier: 1.5,
|
||||
MaxRetries: 5,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -87,30 +87,18 @@ func convertParamToJSON(param any) (string, error) {
|
||||
|
||||
// PopulateTemplateWithJSON populate a Go template with a custom `json` array formatter
|
||||
func PopulateTemplateWithJSON(templateName, templateString string, data map[string]any) (string, error) {
|
||||
return PopulateTemplateWithFunc(templateName, templateString, data, template.FuncMap{
|
||||
funcMap := template.FuncMap{
|
||||
"json": convertParamToJSON,
|
||||
})
|
||||
}
|
||||
|
||||
// PopulateTemplate populate a Go template with no custom formatters
|
||||
func PopulateTemplate(templateName, templateString string, data map[string]any) (string, error) {
|
||||
return PopulateTemplateWithFunc(templateName, templateString, data, nil)
|
||||
}
|
||||
|
||||
// PopulateTemplateWithFunc populate a Go template with provided functions
|
||||
func PopulateTemplateWithFunc(templateName, templateString string, data map[string]any, funcMap template.FuncMap) (string, error) {
|
||||
tmpl := template.New(templateName)
|
||||
if funcMap != nil {
|
||||
tmpl = tmpl.Funcs(funcMap)
|
||||
}
|
||||
|
||||
parsedTmpl, err := tmpl.Parse(templateString)
|
||||
tmpl, err := template.New(templateName).Funcs(funcMap).Parse(templateString)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error parsing template '%s': %w", templateName, err)
|
||||
}
|
||||
|
||||
var result bytes.Buffer
|
||||
if err := parsedTmpl.Execute(&result, data); err != nil {
|
||||
err = tmpl.Execute(&result, data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error executing template '%s': %w", templateName, err)
|
||||
}
|
||||
return result.String(), nil
|
||||
|
||||
@@ -1,297 +0,0 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package tools_test
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
"text/template"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
)
|
||||
|
||||
func TestPopulateTemplate(t *testing.T) {
|
||||
tcs := []struct {
|
||||
name string
|
||||
templateName string
|
||||
templateString string
|
||||
data map[string]any
|
||||
want string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "simple string substitution",
|
||||
templateName: "test",
|
||||
templateString: "Hello {{.name}}!",
|
||||
data: map[string]any{"name": "World"},
|
||||
want: "Hello World!",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "multiple substitutions",
|
||||
templateName: "test",
|
||||
templateString: "{{.greeting}} {{.name}}, you are {{.age}} years old",
|
||||
data: map[string]any{"greeting": "Hello", "name": "Alice", "age": 30},
|
||||
want: "Hello Alice, you are 30 years old",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "empty template",
|
||||
templateName: "test",
|
||||
templateString: "",
|
||||
data: map[string]any{},
|
||||
want: "",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "no substitutions",
|
||||
templateName: "test",
|
||||
templateString: "Plain text without templates",
|
||||
data: map[string]any{},
|
||||
want: "Plain text without templates",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "invalid template syntax",
|
||||
templateName: "test",
|
||||
templateString: "{{.name",
|
||||
data: map[string]any{"name": "World"},
|
||||
want: "",
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "missing field",
|
||||
templateName: "test",
|
||||
templateString: "{{.missing}}",
|
||||
data: map[string]any{"name": "World"},
|
||||
want: "<no value>",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "invalid function call",
|
||||
templateName: "test",
|
||||
templateString: "{{.name.invalid}}",
|
||||
data: map[string]any{"name": "World"},
|
||||
want: "",
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
got, err := tools.PopulateTemplate(tc.templateName, tc.templateString, tc.data)
|
||||
if tc.wantErr {
|
||||
if err == nil {
|
||||
t.Fatalf("expected error, got nil")
|
||||
}
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got); diff != "" {
|
||||
t.Fatalf("incorrect result (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPopulateTemplateWithFunc(t *testing.T) {
|
||||
// Custom function for testing
|
||||
customFuncs := template.FuncMap{
|
||||
"upper": strings.ToUpper,
|
||||
"add": func(a, b int) int {
|
||||
return a + b
|
||||
},
|
||||
}
|
||||
|
||||
tcs := []struct {
|
||||
name string
|
||||
templateName string
|
||||
templateString string
|
||||
data map[string]any
|
||||
funcMap template.FuncMap
|
||||
want string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "with custom upper function",
|
||||
templateName: "test",
|
||||
templateString: "{{upper .text}}",
|
||||
data: map[string]any{"text": "hello"},
|
||||
funcMap: customFuncs,
|
||||
want: "HELLO",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with custom add function",
|
||||
templateName: "test",
|
||||
templateString: "Result: {{add .x .y}}",
|
||||
data: map[string]any{"x": 5, "y": 3},
|
||||
funcMap: customFuncs,
|
||||
want: "Result: 8",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "nil funcMap",
|
||||
templateName: "test",
|
||||
templateString: "Hello {{.name}}",
|
||||
data: map[string]any{"name": "World"},
|
||||
funcMap: nil,
|
||||
want: "Hello World",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "combine custom function with regular substitution",
|
||||
templateName: "test",
|
||||
templateString: "{{upper .greeting}} {{.name}}!",
|
||||
data: map[string]any{"greeting": "hello", "name": "Alice"},
|
||||
funcMap: customFuncs,
|
||||
want: "HELLO Alice!",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "undefined function",
|
||||
templateName: "test",
|
||||
templateString: "{{undefined .text}}",
|
||||
data: map[string]any{"text": "hello"},
|
||||
funcMap: nil,
|
||||
want: "",
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "wrong number of arguments",
|
||||
templateName: "test",
|
||||
templateString: "{{upper}}",
|
||||
data: map[string]any{},
|
||||
funcMap: template.FuncMap{"upper": strings.ToUpper},
|
||||
want: "",
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
got, err := tools.PopulateTemplateWithFunc(tc.templateName, tc.templateString, tc.data, tc.funcMap)
|
||||
if tc.wantErr {
|
||||
if err == nil {
|
||||
t.Fatalf("expected error, got nil")
|
||||
}
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got); diff != "" {
|
||||
t.Fatalf("incorrect result (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPopulateTemplateWithJSON(t *testing.T) {
|
||||
tcs := []struct {
|
||||
name string
|
||||
templateName string
|
||||
templateString string
|
||||
data map[string]any
|
||||
want string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "json string",
|
||||
templateName: "test",
|
||||
templateString: "Data: {{json .value}}",
|
||||
data: map[string]any{"value": "hello"},
|
||||
want: `Data: "hello"`,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "json number",
|
||||
templateName: "test",
|
||||
templateString: "Number: {{json .num}}",
|
||||
data: map[string]any{"num": 42},
|
||||
want: "Number: 42",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "json boolean",
|
||||
templateName: "test",
|
||||
templateString: "Bool: {{json .flag}}",
|
||||
data: map[string]any{"flag": true},
|
||||
want: "Bool: true",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "json array",
|
||||
templateName: "test",
|
||||
templateString: "Array: {{json .items}}",
|
||||
data: map[string]any{"items": []any{"a", "b", "c"}},
|
||||
want: `Array: ["a","b","c"]`,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "json object",
|
||||
templateName: "test",
|
||||
templateString: "Object: {{json .obj}}",
|
||||
data: map[string]any{"obj": map[string]any{"name": "Alice", "age": 30}},
|
||||
want: `Object: {"age":30,"name":"Alice"}`,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "json null",
|
||||
templateName: "test",
|
||||
templateString: "Null: {{json .nullValue}}",
|
||||
data: map[string]any{"nullValue": nil},
|
||||
want: "Null: null",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "combine json with regular substitution",
|
||||
templateName: "test",
|
||||
templateString: "User {{.name}} has data: {{json .data}}",
|
||||
data: map[string]any{"name": "Bob", "data": map[string]any{"id": 123}},
|
||||
want: `User Bob has data: {"id":123}`,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "missing field for json",
|
||||
templateName: "test",
|
||||
templateString: "{{json .missing}}",
|
||||
data: map[string]any{},
|
||||
want: "null",
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
got, err := tools.PopulateTemplateWithJSON(tc.templateName, tc.templateString, tc.data)
|
||||
if tc.wantErr {
|
||||
if err == nil {
|
||||
t.Fatalf("expected error, got nil")
|
||||
}
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got); diff != "" {
|
||||
t.Fatalf("incorrect result (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -131,7 +131,7 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
if err := util.ValidateDocumentPath(parentPath); err != nil {
|
||||
return nil, fmt.Errorf("invalid parent document path: %w", err)
|
||||
}
|
||||
|
||||
|
||||
// List subcollections of the specified document
|
||||
docRef := t.Client.Doc(parentPath)
|
||||
collectionRefs, err = docRef.Collections(ctx).GetAll()
|
||||
|
||||
@@ -1,548 +0,0 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package firestorequery
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
firestoreapi "cloud.google.com/go/firestore"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
firestoreds "github.com/googleapis/genai-toolbox/internal/sources/firestore"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/firestore/util"
|
||||
)
|
||||
|
||||
// Constants for tool configuration
|
||||
const (
|
||||
kind = "firestore-query"
|
||||
defaultLimit = 100
|
||||
)
|
||||
|
||||
// Firestore operators
|
||||
var validOperators = map[string]bool{
|
||||
"<": true,
|
||||
"<=": true,
|
||||
">": true,
|
||||
">=": true,
|
||||
"==": true,
|
||||
"!=": true,
|
||||
"array-contains": true,
|
||||
"array-contains-any": true,
|
||||
"in": true,
|
||||
"not-in": true,
|
||||
}
|
||||
|
||||
// Error messages
|
||||
const (
|
||||
errFilterParseFailed = "failed to parse filters: %w"
|
||||
errQueryExecutionFailed = "failed to execute query: %w"
|
||||
errTemplateParseFailed = "failed to parse template: %w"
|
||||
errTemplateExecFailed = "failed to execute template: %w"
|
||||
errLimitParseFailed = "failed to parse limit value '%s': %w"
|
||||
errSelectFieldParseFailed = "failed to parse select field: %w"
|
||||
)
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("tool kind %q already registered", kind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
// compatibleSource defines the interface for sources that can provide a Firestore client
|
||||
type compatibleSource interface {
|
||||
FirestoreClient() *firestoreapi.Client
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
var _ compatibleSource = &firestoreds.Source{}
|
||||
|
||||
var compatibleSources = [...]string{firestoreds.SourceKind}
|
||||
|
||||
// Config represents the configuration for the Firestore query tool
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Source string `yaml:"source" validate:"required"`
|
||||
Description string `yaml:"description" validate:"required"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
|
||||
// Template fields
|
||||
CollectionPath string `yaml:"collectionPath" validate:"required"`
|
||||
Filters string `yaml:"filters"` // JSON string template
|
||||
Select []string `yaml:"select"` // Fields to select
|
||||
OrderBy map[string]any `yaml:"orderBy"` // Order by configuration
|
||||
Limit string `yaml:"limit"` // Limit template (can be a number or template)
|
||||
AnalyzeQuery bool `yaml:"analyzeQuery"` // Analyze query (boolean, not parameterizable)
|
||||
|
||||
// Parameters for template substitution
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.ToolConfig = Config{}
|
||||
|
||||
// ToolConfigKind returns the kind of tool configuration
|
||||
func (cfg Config) ToolConfigKind() string {
|
||||
return kind
|
||||
}
|
||||
|
||||
// Initialize creates a new Tool instance from the configuration
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
// verify source exists
|
||||
rawS, ok := srcs[cfg.Source]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
|
||||
}
|
||||
|
||||
// verify the source is compatible
|
||||
s, ok := rawS.(compatibleSource)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
// Set default limit if not specified
|
||||
if cfg.Limit == "" {
|
||||
cfg.Limit = fmt.Sprintf("%d", defaultLimit)
|
||||
}
|
||||
|
||||
// Create MCP manifest
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
Description: cfg.Description,
|
||||
InputSchema: cfg.Parameters.McpManifest(),
|
||||
}
|
||||
|
||||
// finish tool setup
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
Client: s.FirestoreClient(),
|
||||
CollectionPathTemplate: cfg.CollectionPath,
|
||||
FiltersTemplate: cfg.Filters,
|
||||
SelectTemplate: cfg.Select,
|
||||
OrderByTemplate: cfg.OrderBy,
|
||||
LimitTemplate: cfg.Limit,
|
||||
AnalyzeQuery: cfg.AnalyzeQuery,
|
||||
Parameters: cfg.Parameters,
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: cfg.Parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
// Tool represents the Firestore query tool
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
|
||||
Client *firestoreapi.Client
|
||||
CollectionPathTemplate string
|
||||
FiltersTemplate string
|
||||
SelectTemplate []string
|
||||
OrderByTemplate map[string]any
|
||||
LimitTemplate string
|
||||
AnalyzeQuery bool
|
||||
Parameters tools.Parameters
|
||||
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
// SimplifiedFilter represents the simplified filter format
|
||||
type SimplifiedFilter struct {
|
||||
And []SimplifiedFilter `json:"and,omitempty"`
|
||||
Or []SimplifiedFilter `json:"or,omitempty"`
|
||||
Field string `json:"field,omitempty"`
|
||||
Op string `json:"op,omitempty"`
|
||||
Value interface{} `json:"value,omitempty"`
|
||||
}
|
||||
|
||||
// OrderByConfig represents ordering configuration
|
||||
type OrderByConfig struct {
|
||||
Field string `json:"field"`
|
||||
Direction string `json:"direction"`
|
||||
}
|
||||
|
||||
// GetDirection returns the Firestore direction constant
|
||||
func (o *OrderByConfig) GetDirection() firestoreapi.Direction {
|
||||
if strings.EqualFold(o.Direction, "DESCENDING") || strings.EqualFold(o.Direction, "DESC") {
|
||||
return firestoreapi.Desc
|
||||
}
|
||||
return firestoreapi.Asc
|
||||
}
|
||||
|
||||
// QueryResult represents a document result from the query
|
||||
type QueryResult struct {
|
||||
ID string `json:"id"`
|
||||
Path string `json:"path"`
|
||||
Data map[string]any `json:"data"`
|
||||
CreateTime interface{} `json:"createTime,omitempty"`
|
||||
UpdateTime interface{} `json:"updateTime,omitempty"`
|
||||
ReadTime interface{} `json:"readTime,omitempty"`
|
||||
}
|
||||
|
||||
// QueryResponse represents the full response including optional metrics
|
||||
type QueryResponse struct {
|
||||
Documents []QueryResult `json:"documents"`
|
||||
ExplainMetrics map[string]any `json:"explainMetrics,omitempty"`
|
||||
}
|
||||
|
||||
// Invoke executes the Firestore query based on the provided parameters
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
paramsMap := params.AsMap()
|
||||
|
||||
// Process collection path with template substitution
|
||||
collectionPath, err := tools.PopulateTemplate("collectionPath", t.CollectionPathTemplate, paramsMap)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to process collection path: %w", err)
|
||||
}
|
||||
|
||||
// Build the query
|
||||
query, err := t.buildQuery(collectionPath, paramsMap)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Execute the query and return results
|
||||
return t.executeQuery(ctx, query)
|
||||
}
|
||||
|
||||
// buildQuery constructs the Firestore query from parameters
|
||||
func (t Tool) buildQuery(collectionPath string, params map[string]any) (*firestoreapi.Query, error) {
|
||||
collection := t.Client.Collection(collectionPath)
|
||||
query := collection.Query
|
||||
|
||||
// Process and apply filters if template is provided
|
||||
if t.FiltersTemplate != "" {
|
||||
// Apply template substitution to filters
|
||||
filtersJSON, err := tools.PopulateTemplateWithJSON("filters", t.FiltersTemplate, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to process filters template: %w", err)
|
||||
}
|
||||
|
||||
// Parse the simplified filter format
|
||||
var simplifiedFilter SimplifiedFilter
|
||||
if err := json.Unmarshal([]byte(filtersJSON), &simplifiedFilter); err != nil {
|
||||
return nil, fmt.Errorf(errFilterParseFailed, err)
|
||||
}
|
||||
|
||||
// Convert simplified filter to Firestore filter
|
||||
if filter := t.convertToFirestoreFilter(simplifiedFilter); filter != nil {
|
||||
query = query.WhereEntity(filter)
|
||||
}
|
||||
}
|
||||
|
||||
// Process select fields
|
||||
selectFields, err := t.processSelectFields(params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(selectFields) > 0 {
|
||||
query = query.Select(selectFields...)
|
||||
}
|
||||
|
||||
// Process and apply ordering
|
||||
orderBy, err := t.getOrderBy(params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if orderBy != nil {
|
||||
query = query.OrderBy(orderBy.Field, orderBy.GetDirection())
|
||||
}
|
||||
|
||||
// Process and apply limit
|
||||
limit, err := t.getLimit(params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
query = query.Limit(limit)
|
||||
|
||||
// Apply analyze options if enabled
|
||||
if t.AnalyzeQuery {
|
||||
query = query.WithRunOptions(firestoreapi.ExplainOptions{
|
||||
Analyze: true,
|
||||
})
|
||||
}
|
||||
|
||||
return &query, nil
|
||||
}
|
||||
|
||||
// convertToFirestoreFilter converts simplified filter format to Firestore EntityFilter
|
||||
func (t Tool) convertToFirestoreFilter(filter SimplifiedFilter) firestoreapi.EntityFilter {
|
||||
// Handle AND filters
|
||||
if len(filter.And) > 0 {
|
||||
filters := make([]firestoreapi.EntityFilter, 0, len(filter.And))
|
||||
for _, f := range filter.And {
|
||||
if converted := t.convertToFirestoreFilter(f); converted != nil {
|
||||
filters = append(filters, converted)
|
||||
}
|
||||
}
|
||||
if len(filters) > 0 {
|
||||
return firestoreapi.AndFilter{Filters: filters}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Handle OR filters
|
||||
if len(filter.Or) > 0 {
|
||||
filters := make([]firestoreapi.EntityFilter, 0, len(filter.Or))
|
||||
for _, f := range filter.Or {
|
||||
if converted := t.convertToFirestoreFilter(f); converted != nil {
|
||||
filters = append(filters, converted)
|
||||
}
|
||||
}
|
||||
if len(filters) > 0 {
|
||||
return firestoreapi.OrFilter{Filters: filters}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Handle simple property filter
|
||||
if filter.Field != "" && filter.Op != "" && filter.Value != nil {
|
||||
if validOperators[filter.Op] {
|
||||
// Convert the value using the Firestore native JSON converter
|
||||
convertedValue, err := util.JSONToFirestoreValue(filter.Value, t.Client)
|
||||
if err != nil {
|
||||
// If conversion fails, use the original value
|
||||
convertedValue = filter.Value
|
||||
}
|
||||
|
||||
return firestoreapi.PropertyFilter{
|
||||
Path: filter.Field,
|
||||
Operator: filter.Op,
|
||||
Value: convertedValue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// processSelectFields processes the select fields with parameter substitution
|
||||
func (t Tool) processSelectFields(params map[string]any) ([]string, error) {
|
||||
var selectFields []string
|
||||
|
||||
// Process configured select fields with template substitution
|
||||
for _, field := range t.SelectTemplate {
|
||||
// Check if it's a template
|
||||
if strings.Contains(field, "{{") {
|
||||
processed, err := tools.PopulateTemplate("selectField", field, params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if processed != "" {
|
||||
// The processed field might be an array format [a b c] or a single value
|
||||
trimmedProcessed := strings.TrimSpace(processed)
|
||||
|
||||
// Check if it's in array format [a b c]
|
||||
if strings.HasPrefix(trimmedProcessed, "[") && strings.HasSuffix(trimmedProcessed, "]") {
|
||||
// Remove brackets and split by spaces
|
||||
arrayContent := strings.TrimPrefix(trimmedProcessed, "[")
|
||||
arrayContent = strings.TrimSuffix(arrayContent, "]")
|
||||
fields := strings.Fields(arrayContent) // Fields splits by any whitespace
|
||||
for _, f := range fields {
|
||||
if f != "" {
|
||||
selectFields = append(selectFields, f)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
selectFields = append(selectFields, processed)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
selectFields = append(selectFields, field)
|
||||
}
|
||||
}
|
||||
|
||||
return selectFields, nil
|
||||
}
|
||||
|
||||
// getOrderBy processes the orderBy configuration with parameter substitution
|
||||
func (t Tool) getOrderBy(params map[string]any) (*OrderByConfig, error) {
|
||||
if t.OrderByTemplate == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
orderBy := &OrderByConfig{}
|
||||
|
||||
// Process field
|
||||
field, err := t.getOrderByForKey("field", params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
orderBy.Field = field
|
||||
|
||||
// Process direction
|
||||
direction, err := t.getOrderByForKey("direction", params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
orderBy.Direction = direction
|
||||
|
||||
if orderBy.Field == "" {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return orderBy, nil
|
||||
}
|
||||
|
||||
func (t Tool) getOrderByForKey(key string, params map[string]any) (string, error) {
|
||||
value, ok := t.OrderByTemplate[key].(string)
|
||||
if !ok {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
processedValue, err := tools.PopulateTemplate(fmt.Sprintf("orderBy%s", key), value, params)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return processedValue, nil
|
||||
}
|
||||
|
||||
// processLimit processes the limit field with parameter substitution
|
||||
func (t Tool) getLimit(params map[string]any) (int, error) {
|
||||
limit := defaultLimit
|
||||
if t.LimitTemplate != "" {
|
||||
processedValue, err := tools.PopulateTemplate("limit", t.LimitTemplate, params)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
// Try to parse as integer
|
||||
if processedValue != "" {
|
||||
parsedLimit, err := strconv.Atoi(processedValue)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf(errLimitParseFailed, processedValue, err)
|
||||
}
|
||||
limit = parsedLimit
|
||||
}
|
||||
}
|
||||
return limit, nil
|
||||
}
|
||||
|
||||
// executeQuery runs the query and formats the results
|
||||
func (t Tool) executeQuery(ctx context.Context, query *firestoreapi.Query) (any, error) {
|
||||
docIterator := query.Documents(ctx)
|
||||
docs, err := docIterator.GetAll()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(errQueryExecutionFailed, err)
|
||||
}
|
||||
|
||||
// Convert results to structured format
|
||||
results := make([]QueryResult, len(docs))
|
||||
for i, doc := range docs {
|
||||
results[i] = QueryResult{
|
||||
ID: doc.Ref.ID,
|
||||
Path: doc.Ref.Path,
|
||||
Data: doc.Data(),
|
||||
CreateTime: doc.CreateTime,
|
||||
UpdateTime: doc.UpdateTime,
|
||||
ReadTime: doc.ReadTime,
|
||||
}
|
||||
}
|
||||
|
||||
// Return with explain metrics if requested
|
||||
if t.AnalyzeQuery {
|
||||
explainMetrics, err := t.getExplainMetrics(docIterator)
|
||||
if err == nil && explainMetrics != nil {
|
||||
response := QueryResponse{
|
||||
Documents: results,
|
||||
ExplainMetrics: explainMetrics,
|
||||
}
|
||||
return response, nil
|
||||
}
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
// getExplainMetrics extracts explain metrics from the query iterator
|
||||
func (t Tool) getExplainMetrics(docIterator *firestoreapi.DocumentIterator) (map[string]any, error) {
|
||||
explainMetrics, err := docIterator.ExplainMetrics()
|
||||
if err != nil || explainMetrics == nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
metricsData := make(map[string]any)
|
||||
|
||||
// Add plan summary if available
|
||||
if explainMetrics.PlanSummary != nil {
|
||||
planSummary := make(map[string]any)
|
||||
planSummary["indexesUsed"] = explainMetrics.PlanSummary.IndexesUsed
|
||||
metricsData["planSummary"] = planSummary
|
||||
}
|
||||
|
||||
// Add execution stats if available
|
||||
if explainMetrics.ExecutionStats != nil {
|
||||
executionStats := make(map[string]any)
|
||||
executionStats["resultsReturned"] = explainMetrics.ExecutionStats.ResultsReturned
|
||||
executionStats["readOperations"] = explainMetrics.ExecutionStats.ReadOperations
|
||||
|
||||
if explainMetrics.ExecutionStats.ExecutionDuration != nil {
|
||||
executionStats["executionDuration"] = explainMetrics.ExecutionStats.ExecutionDuration.String()
|
||||
}
|
||||
|
||||
if explainMetrics.ExecutionStats.DebugStats != nil {
|
||||
executionStats["debugStats"] = *explainMetrics.ExecutionStats.DebugStats
|
||||
}
|
||||
|
||||
metricsData["executionStats"] = executionStats
|
||||
}
|
||||
|
||||
return metricsData, nil
|
||||
}
|
||||
|
||||
// ParseParams parses and validates input parameters
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
return tools.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
// Manifest returns the tool manifest
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
// McpManifest returns the MCP manifest
|
||||
func (t Tool) McpManifest() tools.McpManifest {
|
||||
return t.mcpManifest
|
||||
}
|
||||
|
||||
// Authorized checks if the tool is authorized based on verified auth services
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
func (t Tool) RequiresClientAuthorization() bool {
|
||||
return false
|
||||
}
|
||||
@@ -1,492 +0,0 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package firestorequery_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequery"
|
||||
)
|
||||
|
||||
func TestParseFromYamlFirestoreQuery(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.ToolConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example with parameterized collection path",
|
||||
in: `
|
||||
tools:
|
||||
query_users_tool:
|
||||
kind: firestore-query
|
||||
source: my-firestore-instance
|
||||
description: Query users collection with parameterized path
|
||||
collectionPath: "users/{{.userId}}/documents"
|
||||
parameters:
|
||||
- name: userId
|
||||
type: string
|
||||
description: The user ID to query documents for
|
||||
required: true
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"query_users_tool": firestorequery.Config{
|
||||
Name: "query_users_tool",
|
||||
Kind: "firestore-query",
|
||||
Source: "my-firestore-instance",
|
||||
Description: "Query users collection with parameterized path",
|
||||
CollectionPath: "users/{{.userId}}/documents",
|
||||
AuthRequired: []string{},
|
||||
Parameters: tools.Parameters{
|
||||
tools.NewStringParameterWithRequired("userId", "The user ID to query documents for", true),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with parameterized filters",
|
||||
in: `
|
||||
tools:
|
||||
query_products_tool:
|
||||
kind: firestore-query
|
||||
source: prod-firestore
|
||||
description: Query products with dynamic filters
|
||||
collectionPath: "products"
|
||||
filters: |
|
||||
{
|
||||
"and": [
|
||||
{"field": "category", "op": "==", "value": {"stringValue": "{{.category}}"}},
|
||||
{"field": "price", "op": "<=", "value": {"doubleValue": {{.maxPrice}}}}
|
||||
]
|
||||
}
|
||||
parameters:
|
||||
- name: category
|
||||
type: string
|
||||
description: Product category to filter by
|
||||
required: true
|
||||
- name: maxPrice
|
||||
type: float
|
||||
description: Maximum price for products
|
||||
required: true
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"query_products_tool": firestorequery.Config{
|
||||
Name: "query_products_tool",
|
||||
Kind: "firestore-query",
|
||||
Source: "prod-firestore",
|
||||
Description: "Query products with dynamic filters",
|
||||
CollectionPath: "products",
|
||||
Filters: `{
|
||||
"and": [
|
||||
{"field": "category", "op": "==", "value": {"stringValue": "{{.category}}"}},
|
||||
{"field": "price", "op": "<=", "value": {"doubleValue": {{.maxPrice}}}}
|
||||
]
|
||||
}
|
||||
`,
|
||||
AuthRequired: []string{},
|
||||
Parameters: tools.Parameters{
|
||||
tools.NewStringParameterWithRequired("category", "Product category to filter by", true),
|
||||
tools.NewFloatParameterWithRequired("maxPrice", "Maximum price for products", true),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with select fields and orderBy",
|
||||
in: `
|
||||
tools:
|
||||
query_orders_tool:
|
||||
kind: firestore-query
|
||||
source: orders-firestore
|
||||
description: Query orders with field selection
|
||||
collectionPath: "orders"
|
||||
select:
|
||||
- orderId
|
||||
- customerName
|
||||
- totalAmount
|
||||
orderBy:
|
||||
field: "{{.sortField}}"
|
||||
direction: "DESCENDING"
|
||||
limit: 50
|
||||
parameters:
|
||||
- name: sortField
|
||||
type: string
|
||||
description: Field to sort by
|
||||
required: true
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"query_orders_tool": firestorequery.Config{
|
||||
Name: "query_orders_tool",
|
||||
Kind: "firestore-query",
|
||||
Source: "orders-firestore",
|
||||
Description: "Query orders with field selection",
|
||||
CollectionPath: "orders",
|
||||
Select: []string{"orderId", "customerName", "totalAmount"},
|
||||
OrderBy: map[string]any{
|
||||
"field": "{{.sortField}}",
|
||||
"direction": "DESCENDING",
|
||||
},
|
||||
Limit: "50",
|
||||
AuthRequired: []string{},
|
||||
Parameters: tools.Parameters{
|
||||
tools.NewStringParameterWithRequired("sortField", "Field to sort by", true),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with auth requirements and complex filters",
|
||||
in: `
|
||||
tools:
|
||||
secure_query_tool:
|
||||
kind: firestore-query
|
||||
source: secure-firestore
|
||||
description: Query with authentication and complex filters
|
||||
collectionPath: "{{.collection}}"
|
||||
filters: |
|
||||
{
|
||||
"or": [
|
||||
{
|
||||
"and": [
|
||||
{"field": "status", "op": "==", "value": {"stringValue": "{{.status}}"}},
|
||||
{"field": "priority", "op": ">=", "value": {"integerValue": "{{.minPriority}}"}}
|
||||
]
|
||||
},
|
||||
{"field": "urgent", "op": "==", "value": {"booleanValue": true}}
|
||||
]
|
||||
}
|
||||
analyzeQuery: true
|
||||
authRequired:
|
||||
- google-auth-service
|
||||
- api-key-service
|
||||
parameters:
|
||||
- name: collection
|
||||
type: string
|
||||
description: Collection name to query
|
||||
required: true
|
||||
- name: status
|
||||
type: string
|
||||
description: Status to filter by
|
||||
required: true
|
||||
- name: minPriority
|
||||
type: integer
|
||||
description: Minimum priority level
|
||||
default: 1
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"secure_query_tool": firestorequery.Config{
|
||||
Name: "secure_query_tool",
|
||||
Kind: "firestore-query",
|
||||
Source: "secure-firestore",
|
||||
Description: "Query with authentication and complex filters",
|
||||
CollectionPath: "{{.collection}}",
|
||||
Filters: `{
|
||||
"or": [
|
||||
{
|
||||
"and": [
|
||||
{"field": "status", "op": "==", "value": {"stringValue": "{{.status}}"}},
|
||||
{"field": "priority", "op": ">=", "value": {"integerValue": "{{.minPriority}}"}}
|
||||
]
|
||||
},
|
||||
{"field": "urgent", "op": "==", "value": {"booleanValue": true}}
|
||||
]
|
||||
}
|
||||
`,
|
||||
AnalyzeQuery: true,
|
||||
AuthRequired: []string{"google-auth-service", "api-key-service"},
|
||||
Parameters: tools.Parameters{
|
||||
tools.NewStringParameterWithRequired("collection", "Collection name to query", true),
|
||||
tools.NewStringParameterWithRequired("status", "Status to filter by", true),
|
||||
tools.NewIntParameterWithDefault("minPriority", 1, "Minimum priority level"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with Firestore native JSON value types and template parameters",
|
||||
in: `
|
||||
tools:
|
||||
query_with_typed_values:
|
||||
kind: firestore-query
|
||||
source: typed-firestore
|
||||
description: Query with Firestore native JSON value types
|
||||
collectionPath: "countries"
|
||||
filters: |
|
||||
{
|
||||
"or": [
|
||||
{"field": "continent", "op": "==", "value": {"stringValue": "{{.continent}}"}},
|
||||
{
|
||||
"and": [
|
||||
{"field": "area", "op": ">", "value": {"integerValue": "2000000"}},
|
||||
{"field": "area", "op": "<", "value": {"integerValue": "3000000"}},
|
||||
{"field": "population", "op": ">=", "value": {"integerValue": "{{.minPopulation}}"}},
|
||||
{"field": "gdp", "op": ">", "value": {"doubleValue": {{.minGdp}}}},
|
||||
{"field": "isActive", "op": "==", "value": {"booleanValue": {{.isActive}}}},
|
||||
{"field": "lastUpdated", "op": ">=", "value": {"timestampValue": "{{.startDate}}"}}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
parameters:
|
||||
- name: continent
|
||||
type: string
|
||||
description: Continent to filter by
|
||||
required: true
|
||||
- name: minPopulation
|
||||
type: string
|
||||
description: Minimum population as string
|
||||
required: true
|
||||
- name: minGdp
|
||||
type: float
|
||||
description: Minimum GDP value
|
||||
required: true
|
||||
- name: isActive
|
||||
type: boolean
|
||||
description: Filter by active status
|
||||
required: true
|
||||
- name: startDate
|
||||
type: string
|
||||
description: Start date in RFC3339 format
|
||||
required: true
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"query_with_typed_values": firestorequery.Config{
|
||||
Name: "query_with_typed_values",
|
||||
Kind: "firestore-query",
|
||||
Source: "typed-firestore",
|
||||
Description: "Query with Firestore native JSON value types",
|
||||
CollectionPath: "countries",
|
||||
Filters: `{
|
||||
"or": [
|
||||
{"field": "continent", "op": "==", "value": {"stringValue": "{{.continent}}"}},
|
||||
{
|
||||
"and": [
|
||||
{"field": "area", "op": ">", "value": {"integerValue": "2000000"}},
|
||||
{"field": "area", "op": "<", "value": {"integerValue": "3000000"}},
|
||||
{"field": "population", "op": ">=", "value": {"integerValue": "{{.minPopulation}}"}},
|
||||
{"field": "gdp", "op": ">", "value": {"doubleValue": {{.minGdp}}}},
|
||||
{"field": "isActive", "op": "==", "value": {"booleanValue": {{.isActive}}}},
|
||||
{"field": "lastUpdated", "op": ">=", "value": {"timestampValue": "{{.startDate}}"}}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
`,
|
||||
AuthRequired: []string{},
|
||||
Parameters: tools.Parameters{
|
||||
tools.NewStringParameterWithRequired("continent", "Continent to filter by", true),
|
||||
tools.NewStringParameterWithRequired("minPopulation", "Minimum population as string", true),
|
||||
tools.NewFloatParameterWithRequired("minGdp", "Minimum GDP value", true),
|
||||
tools.NewBooleanParameterWithRequired("isActive", "Filter by active status", true),
|
||||
tools.NewStringParameterWithRequired("startDate", "Start date in RFC3339 format", true),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFromYamlMultipleQueryTools(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
in := `
|
||||
tools:
|
||||
query_user_posts:
|
||||
kind: firestore-query
|
||||
source: social-firestore
|
||||
description: Query user posts with filtering
|
||||
collectionPath: "users/{{.userId}}/posts"
|
||||
filters: |
|
||||
{
|
||||
"and": [
|
||||
{"field": "visibility", "op": "==", "value": {"stringValue": "{{.visibility}}"}},
|
||||
{"field": "createdAt", "op": ">=", "value": {"timestampValue": "{{.startDate}}"}}
|
||||
]
|
||||
}
|
||||
select:
|
||||
- title
|
||||
- content
|
||||
- likes
|
||||
orderBy:
|
||||
field: createdAt
|
||||
direction: "{{.sortOrder}}"
|
||||
limit: 20
|
||||
parameters:
|
||||
- name: userId
|
||||
type: string
|
||||
description: User ID whose posts to query
|
||||
required: true
|
||||
- name: visibility
|
||||
type: string
|
||||
description: Post visibility (public, private, friends)
|
||||
required: true
|
||||
- name: startDate
|
||||
type: string
|
||||
description: Start date for posts
|
||||
required: true
|
||||
- name: sortOrder
|
||||
type: string
|
||||
description: Sort order (ASCENDING or DESCENDING)
|
||||
default: "DESCENDING"
|
||||
query_inventory:
|
||||
kind: firestore-query
|
||||
source: inventory-firestore
|
||||
description: Query inventory items
|
||||
collectionPath: "warehouses/{{.warehouseId}}/inventory"
|
||||
filters: |
|
||||
{
|
||||
"field": "quantity", "op": "<", "value": {"integerValue": "{{.threshold}}"}}
|
||||
parameters:
|
||||
- name: warehouseId
|
||||
type: string
|
||||
description: Warehouse ID to check inventory
|
||||
required: true
|
||||
- name: threshold
|
||||
type: integer
|
||||
description: Quantity threshold for low stock
|
||||
required: true
|
||||
query_transactions:
|
||||
kind: firestore-query
|
||||
source: finance-firestore
|
||||
description: Query financial transactions
|
||||
collectionPath: "accounts/{{.accountId}}/transactions"
|
||||
filters: |
|
||||
{
|
||||
"or": [
|
||||
{"field": "type", "op": "==", "value": {"stringValue": "{{.transactionType}}"}},
|
||||
{"field": "amount", "op": ">", "value": {"doubleValue": {{.minAmount}}}}
|
||||
]
|
||||
}
|
||||
analyzeQuery: true
|
||||
authRequired:
|
||||
- finance-auth
|
||||
parameters:
|
||||
- name: accountId
|
||||
type: string
|
||||
description: Account ID for transactions
|
||||
required: true
|
||||
- name: transactionType
|
||||
type: string
|
||||
description: Type of transaction
|
||||
default: "all"
|
||||
- name: minAmount
|
||||
type: float
|
||||
description: Minimum transaction amount
|
||||
default: 0
|
||||
`
|
||||
want := server.ToolConfigs{
|
||||
"query_user_posts": firestorequery.Config{
|
||||
Name: "query_user_posts",
|
||||
Kind: "firestore-query",
|
||||
Source: "social-firestore",
|
||||
Description: "Query user posts with filtering",
|
||||
CollectionPath: "users/{{.userId}}/posts",
|
||||
Filters: `{
|
||||
"and": [
|
||||
{"field": "visibility", "op": "==", "value": {"stringValue": "{{.visibility}}"}},
|
||||
{"field": "createdAt", "op": ">=", "value": {"timestampValue": "{{.startDate}}"}}
|
||||
]
|
||||
}
|
||||
`,
|
||||
Select: []string{"title", "content", "likes"},
|
||||
OrderBy: map[string]any{
|
||||
"field": "createdAt",
|
||||
"direction": "{{.sortOrder}}",
|
||||
},
|
||||
Limit: "20",
|
||||
AuthRequired: []string{},
|
||||
Parameters: tools.Parameters{
|
||||
tools.NewStringParameterWithRequired("userId", "User ID whose posts to query", true),
|
||||
tools.NewStringParameterWithRequired("visibility", "Post visibility (public, private, friends)", true),
|
||||
tools.NewStringParameterWithRequired("startDate", "Start date for posts", true),
|
||||
tools.NewStringParameterWithDefault("sortOrder", "DESCENDING", "Sort order (ASCENDING or DESCENDING)"),
|
||||
},
|
||||
},
|
||||
"query_inventory": firestorequery.Config{
|
||||
Name: "query_inventory",
|
||||
Kind: "firestore-query",
|
||||
Source: "inventory-firestore",
|
||||
Description: "Query inventory items",
|
||||
CollectionPath: "warehouses/{{.warehouseId}}/inventory",
|
||||
Filters: `{
|
||||
"field": "quantity", "op": "<", "value": {"integerValue": "{{.threshold}}"}}
|
||||
`,
|
||||
AuthRequired: []string{},
|
||||
Parameters: tools.Parameters{
|
||||
tools.NewStringParameterWithRequired("warehouseId", "Warehouse ID to check inventory", true),
|
||||
tools.NewIntParameterWithRequired("threshold", "Quantity threshold for low stock", true),
|
||||
},
|
||||
},
|
||||
"query_transactions": firestorequery.Config{
|
||||
Name: "query_transactions",
|
||||
Kind: "firestore-query",
|
||||
Source: "finance-firestore",
|
||||
Description: "Query financial transactions",
|
||||
CollectionPath: "accounts/{{.accountId}}/transactions",
|
||||
Filters: `{
|
||||
"or": [
|
||||
{"field": "type", "op": "==", "value": {"stringValue": "{{.transactionType}}"}},
|
||||
{"field": "amount", "op": ">", "value": {"doubleValue": {{.minAmount}}}}
|
||||
]
|
||||
}
|
||||
`,
|
||||
AnalyzeQuery: true,
|
||||
AuthRequired: []string{"finance-auth"},
|
||||
Parameters: tools.Parameters{
|
||||
tools.NewStringParameterWithRequired("accountId", "Account ID for transactions", true),
|
||||
tools.NewStringParameterWithDefault("transactionType", "all", "Type of transaction"),
|
||||
tools.NewFloatParameterWithDefault("minAmount", 0, "Minimum transaction amount"),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err = yaml.UnmarshalContext(ctx, testutils.FormatYaml(in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
}
|
||||
@@ -195,5 +195,5 @@ func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
}
|
||||
|
||||
func (t Tool) RequiresClientAuthorization() bool {
|
||||
return t.UseClientOAuth
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -19,7 +19,6 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
@@ -67,14 +66,6 @@ type ToolConfig interface {
|
||||
|
||||
type AccessToken string
|
||||
|
||||
func (token AccessToken) ParseBearerToken() (string, error) {
|
||||
headerParts := strings.Split(string(token), " ")
|
||||
if len(headerParts) != 2 || strings.ToLower(headerParts[0]) != "bearer" {
|
||||
return "", fmt.Errorf("authorization header must be in the format 'Bearer <token>': %w", ErrUnauthorized)
|
||||
}
|
||||
return headerParts[1], nil
|
||||
}
|
||||
|
||||
type Tool interface {
|
||||
Invoke(context.Context, ParamValues, AccessToken) (any, error)
|
||||
ParseParams(map[string]any, map[string]map[string]any) (ParamValues, error)
|
||||
|
||||
@@ -1,178 +0,0 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package yugabytedbsql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources/yugabytedb"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/yugabyte/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
const kind string = "yugabytedb-sql"
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("tool kind %q already registered", kind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type compatibleSource interface {
|
||||
YugabyteDBPool() *pgxpool.Pool
|
||||
}
|
||||
|
||||
var compatibleSources = [...]string{yugabytedb.SourceKind}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Source string `yaml:"source" validate:"required"`
|
||||
Description string `yaml:"description" validate:"required"`
|
||||
Statement string `yaml:"statement" validate:"required"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
TemplateParameters tools.Parameters `yaml:"templateParameters"`
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.ToolConfig = Config{}
|
||||
|
||||
func (cfg Config) ToolConfigKind() string {
|
||||
return kind
|
||||
}
|
||||
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
// verify source exists
|
||||
rawS, ok := srcs[cfg.Source]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
|
||||
}
|
||||
|
||||
// verify the source is compatible
|
||||
s, ok := rawS.(compatibleSource)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
allParameters, paramManifest, paramMcpManifest, err := tools.ProcessParameters(cfg.TemplateParameters, cfg.Parameters)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
Description: cfg.Description,
|
||||
InputSchema: paramMcpManifest,
|
||||
}
|
||||
|
||||
// finish tool setup
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: cfg.Parameters,
|
||||
TemplateParameters: cfg.TemplateParameters,
|
||||
AllParams: allParameters,
|
||||
Statement: cfg.Statement,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
Pool: s.YugabyteDBPool(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: paramManifest, AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
TemplateParameters tools.Parameters `yaml:"templateParameters"`
|
||||
AllParams tools.Parameters `yaml:"allParams"`
|
||||
|
||||
Pool *pgxpool.Pool
|
||||
Statement string
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
paramsMap := params.AsMap()
|
||||
newStatement, err := tools.ResolveTemplateParams(t.TemplateParameters, t.Statement, paramsMap)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to extract template params %w", err)
|
||||
}
|
||||
|
||||
newParams, err := tools.GetParams(t.Parameters, paramsMap)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to extract standard params %w", err)
|
||||
}
|
||||
sliceParams := newParams.AsSlice()
|
||||
results, err := t.Pool.Query(ctx, newStatement, sliceParams...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to execute query: %w", err)
|
||||
}
|
||||
|
||||
fields := results.FieldDescriptions()
|
||||
|
||||
var out []any
|
||||
for results.Next() {
|
||||
v, err := results.Values()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to parse row: %w", err)
|
||||
}
|
||||
vMap := make(map[string]any)
|
||||
for i, f := range fields {
|
||||
vMap[f.Name] = v[i]
|
||||
}
|
||||
out = append(out, vMap)
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
return tools.ParseParams(t.AllParams, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
func (t Tool) McpManifest() tools.McpManifest {
|
||||
return t.mcpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
func (t Tool) RequiresClientAuthorization() bool {
|
||||
return false
|
||||
}
|
||||
@@ -1,214 +0,0 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package yugabytedbsql_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/yugabytedbsql"
|
||||
)
|
||||
|
||||
func TestParseFromYamlYugabyteDBSQL(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.ToolConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic valid config",
|
||||
in: `
|
||||
tools:
|
||||
hotel_search:
|
||||
kind: yugabytedb-sql
|
||||
source: yb-source
|
||||
description: search hotels by city
|
||||
statement: |
|
||||
SELECT * FROM hotels WHERE city = $1;
|
||||
authRequired:
|
||||
- auth-service-a
|
||||
- auth-service-b
|
||||
parameters:
|
||||
- name: city
|
||||
type: string
|
||||
description: city name
|
||||
authServices:
|
||||
- name: auth-service-a
|
||||
field: user_id
|
||||
- name: auth-service-b
|
||||
field: user_id
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"hotel_search": yugabytedbsql.Config{
|
||||
Name: "hotel_search",
|
||||
Kind: "yugabytedb-sql",
|
||||
Source: "yb-source",
|
||||
Description: "search hotels by city",
|
||||
Statement: "SELECT * FROM hotels WHERE city = $1;\n",
|
||||
AuthRequired: []string{"auth-service-a", "auth-service-b"},
|
||||
Parameters: []tools.Parameter{
|
||||
tools.NewStringParameterWithAuth("city", "city name",
|
||||
[]tools.ParamAuthService{
|
||||
{Name: "auth-service-a", Field: "user_id"},
|
||||
{Name: "auth-service-b", Field: "user_id"},
|
||||
},
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestFailParseFromYamlYugabyteDBSQL(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
cases := []struct {
|
||||
desc string
|
||||
in string
|
||||
}{
|
||||
{
|
||||
desc: "missing required field (statement)",
|
||||
in: `
|
||||
tools:
|
||||
tool1:
|
||||
kind: yugabytedb-sql
|
||||
source: yb-source
|
||||
description: incomplete config
|
||||
`,
|
||||
},
|
||||
{
|
||||
desc: "unknown field (foo)",
|
||||
in: `
|
||||
tools:
|
||||
tool2:
|
||||
kind: yugabytedb-sql
|
||||
source: yb-source
|
||||
description: test
|
||||
statement: SELECT 1;
|
||||
foo: bar
|
||||
`,
|
||||
},
|
||||
}
|
||||
for _, tc := range cases {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
cfg := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &cfg)
|
||||
if err == nil {
|
||||
t.Fatalf("expected error but got none")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFromYamlWithTemplateParamsYugabyteDB(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.ToolConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
tools:
|
||||
example_tool:
|
||||
kind: yugabytedb-sql
|
||||
source: my-yb-instance
|
||||
description: some description
|
||||
statement: |
|
||||
SELECT * FROM SQL_STATEMENT;
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: some description
|
||||
templateParameters:
|
||||
- name: tableName
|
||||
type: string
|
||||
description: The table to select hotels from.
|
||||
- name: fieldArray
|
||||
type: array
|
||||
description: The columns to return for the query.
|
||||
items:
|
||||
name: column
|
||||
type: string
|
||||
description: A column name that will be returned from the query.
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"example_tool": yugabytedbsql.Config{
|
||||
Name: "example_tool",
|
||||
Kind: "yugabytedb-sql",
|
||||
Source: "my-yb-instance",
|
||||
Description: "some description",
|
||||
Statement: "SELECT * FROM SQL_STATEMENT;\n",
|
||||
AuthRequired: []string{},
|
||||
Parameters: []tools.Parameter{
|
||||
tools.NewStringParameter("name", "some description"),
|
||||
},
|
||||
TemplateParameters: []tools.Parameter{
|
||||
tools.NewStringParameter("tableName", "The table to select hotels from."),
|
||||
tools.NewArrayParameter("fieldArray", "The columns to return for the query.", tools.NewStringParameter("column", "A column name that will be returned from the query.")),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
@@ -29,7 +29,6 @@ import (
|
||||
|
||||
bigqueryapi "cloud.google.com/go/bigquery"
|
||||
"github.com/google/uuid"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/tests"
|
||||
"golang.org/x/oauth2/google"
|
||||
@@ -73,7 +72,7 @@ func initBigQueryConnection(project string) (*bigqueryapi.Client, error) {
|
||||
|
||||
func TestBigQueryToolEndpoints(t *testing.T) {
|
||||
sourceConfig := getBigQueryVars(t)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Minute)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
|
||||
defer cancel()
|
||||
|
||||
var args []string
|
||||
@@ -360,11 +359,6 @@ func addBigQueryPrebuiltToolsConfig(t *testing.T, config map[string]any) map[str
|
||||
"my-google-auth",
|
||||
},
|
||||
}
|
||||
tools["my-client-auth-exec-sql-tool"] = map[string]any{
|
||||
"kind": "bigquery-execute-sql",
|
||||
"source": "my-client-auth-source",
|
||||
"description": "Tool to execute sql",
|
||||
}
|
||||
tools["my-forecast-tool"] = map[string]any{
|
||||
"kind": "bigquery-forecast",
|
||||
"source": "my-instance",
|
||||
@@ -378,11 +372,6 @@ func addBigQueryPrebuiltToolsConfig(t *testing.T, config map[string]any) map[str
|
||||
"my-google-auth",
|
||||
},
|
||||
}
|
||||
tools["my-client-auth-forecast-tool"] = map[string]any{
|
||||
"kind": "bigquery-forecast",
|
||||
"source": "my-client-auth-source",
|
||||
"description": "Tool to forecast time series data with auth.",
|
||||
}
|
||||
tools["my-list-dataset-ids-tool"] = map[string]any{
|
||||
"kind": "bigquery-list-dataset-ids",
|
||||
"source": "my-instance",
|
||||
@@ -396,11 +385,6 @@ func addBigQueryPrebuiltToolsConfig(t *testing.T, config map[string]any) map[str
|
||||
"my-google-auth",
|
||||
},
|
||||
}
|
||||
tools["my-client-auth-list-dataset-ids-tool"] = map[string]any{
|
||||
"kind": "bigquery-list-dataset-ids",
|
||||
"source": "my-client-auth-source",
|
||||
"description": "Tool to list dataset",
|
||||
}
|
||||
tools["my-get-dataset-info-tool"] = map[string]any{
|
||||
"kind": "bigquery-get-dataset-info",
|
||||
"source": "my-instance",
|
||||
@@ -414,11 +398,6 @@ func addBigQueryPrebuiltToolsConfig(t *testing.T, config map[string]any) map[str
|
||||
"my-google-auth",
|
||||
},
|
||||
}
|
||||
tools["my-client-auth-get-dataset-info-tool"] = map[string]any{
|
||||
"kind": "bigquery-get-dataset-info",
|
||||
"source": "my-client-auth-source",
|
||||
"description": "Tool to show dataset metadata",
|
||||
}
|
||||
tools["my-list-table-ids-tool"] = map[string]any{
|
||||
"kind": "bigquery-list-table-ids",
|
||||
"source": "my-instance",
|
||||
@@ -432,11 +411,6 @@ func addBigQueryPrebuiltToolsConfig(t *testing.T, config map[string]any) map[str
|
||||
"my-google-auth",
|
||||
},
|
||||
}
|
||||
tools["my-client-auth-list-table-ids-tool"] = map[string]any{
|
||||
"kind": "bigquery-list-table-ids",
|
||||
"source": "my-client-auth-source",
|
||||
"description": "Tool to list table within a dataset",
|
||||
}
|
||||
tools["my-get-table-info-tool"] = map[string]any{
|
||||
"kind": "bigquery-get-table-info",
|
||||
"source": "my-instance",
|
||||
@@ -450,11 +424,6 @@ func addBigQueryPrebuiltToolsConfig(t *testing.T, config map[string]any) map[str
|
||||
"my-google-auth",
|
||||
},
|
||||
}
|
||||
tools["my-client-auth-get-table-info-tool"] = map[string]any{
|
||||
"kind": "bigquery-get-table-info",
|
||||
"source": "my-client-auth-source",
|
||||
"description": "Tool to show dataset metadata",
|
||||
}
|
||||
tools["my-conversational-analytics-tool"] = map[string]any{
|
||||
"kind": "bigquery-conversational-analytics",
|
||||
"source": "my-instance",
|
||||
@@ -468,11 +437,6 @@ func addBigQueryPrebuiltToolsConfig(t *testing.T, config map[string]any) map[str
|
||||
"my-google-auth",
|
||||
},
|
||||
}
|
||||
tools["my-client-auth-conversational-analytics-tool"] = map[string]any{
|
||||
"kind": "bigquery-conversational-analytics",
|
||||
"source": "my-client-auth-source",
|
||||
"description": "Tool to ask BigQuery conversational analytics",
|
||||
}
|
||||
config["tools"] = tools
|
||||
return config
|
||||
}
|
||||
@@ -537,13 +501,6 @@ func runBigQueryExecuteSqlToolInvokeTest(t *testing.T, select1Want, invokeParamW
|
||||
t.Fatalf("error getting Google ID token: %s", err)
|
||||
}
|
||||
|
||||
// Get access token
|
||||
accessToken, err := sources.GetIAMAccessToken(t.Context())
|
||||
if err != nil {
|
||||
t.Fatalf("error getting access token from ADC: %s", err)
|
||||
}
|
||||
accessToken = "Bearer " + accessToken
|
||||
|
||||
// Test tool invoke endpoint
|
||||
invokeTcs := []struct {
|
||||
name string
|
||||
@@ -637,29 +594,6 @@ func runBigQueryExecuteSqlToolInvokeTest(t *testing.T, select1Want, invokeParamW
|
||||
requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)),
|
||||
isErr: true,
|
||||
},
|
||||
{
|
||||
name: "Invoke my-client-auth-exec-sql-tool with auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-exec-sql-tool/invoke",
|
||||
requestHeader: map[string]string{"Authorization": accessToken},
|
||||
requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)),
|
||||
want: "[{\"f0_\":1}]",
|
||||
isErr: false,
|
||||
},
|
||||
{
|
||||
name: "Invoke my-client-auth-exec-sql-tool without auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-exec-sql-tool/invoke",
|
||||
requestHeader: map[string]string{},
|
||||
requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)),
|
||||
isErr: true,
|
||||
},
|
||||
{
|
||||
|
||||
name: "Invoke my-client-auth-exec-sql-tool with invalid auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-exec-sql-tool/invoke",
|
||||
requestHeader: map[string]string{"Authorization": "Bearer invalid-token"},
|
||||
requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)),
|
||||
isErr: true,
|
||||
},
|
||||
}
|
||||
for _, tc := range invokeTcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
@@ -820,13 +754,6 @@ func runBigQueryForecastToolInvokeTest(t *testing.T, tableName string) {
|
||||
t.Fatalf("error getting Google ID token: %s", err)
|
||||
}
|
||||
|
||||
// Get access token
|
||||
accessToken, err := sources.GetIAMAccessToken(t.Context())
|
||||
if err != nil {
|
||||
t.Fatalf("error getting access token from ADC: %s", err)
|
||||
}
|
||||
accessToken = "Bearer " + accessToken
|
||||
|
||||
historyDataTable := strings.ReplaceAll(tableName, "`", "")
|
||||
historyDataQuery := fmt.Sprintf("SELECT ts, data, id FROM %s", tableName)
|
||||
|
||||
@@ -884,29 +811,6 @@ func runBigQueryForecastToolInvokeTest(t *testing.T, tableName string) {
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data"}`, historyDataTable))),
|
||||
isErr: true,
|
||||
},
|
||||
{
|
||||
name: "Invoke my-client-auth-forecast-tool with auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-forecast-tool/invoke",
|
||||
requestHeader: map[string]string{"Authorization": accessToken},
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data"}`, historyDataTable))),
|
||||
want: `"forecast_timestamp"`,
|
||||
isErr: false,
|
||||
},
|
||||
{
|
||||
name: "Invoke my-client-auth-forecast-tool without auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-forecast-tool/invoke",
|
||||
requestHeader: map[string]string{},
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data"}`, historyDataTable))),
|
||||
isErr: true,
|
||||
},
|
||||
{
|
||||
|
||||
name: "Invoke my-client-auth-forecast-tool with invalid auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-forecast-tool/invoke",
|
||||
requestHeader: map[string]string{"Authorization": "Bearer invalid-token"},
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data"}`, historyDataTable))),
|
||||
isErr: true,
|
||||
},
|
||||
}
|
||||
for _, tc := range invokeTcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
@@ -1037,13 +941,6 @@ func runBigQueryListDatasetToolInvokeTest(t *testing.T, datasetWant string) {
|
||||
t.Fatalf("error getting Google ID token: %s", err)
|
||||
}
|
||||
|
||||
// Get access token
|
||||
accessToken, err := sources.GetIAMAccessToken(t.Context())
|
||||
if err != nil {
|
||||
t.Fatalf("error getting access token from ADC: %s", err)
|
||||
}
|
||||
accessToken = "Bearer " + accessToken
|
||||
|
||||
// Test tool invoke endpoint
|
||||
invokeTcs := []struct {
|
||||
name string
|
||||
@@ -1084,29 +981,6 @@ func runBigQueryListDatasetToolInvokeTest(t *testing.T, datasetWant string) {
|
||||
isErr: false,
|
||||
want: datasetWant,
|
||||
},
|
||||
{
|
||||
name: "Invoke my-client-auth-list-dataset-ids-tool with auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-list-dataset-ids-tool/invoke",
|
||||
requestHeader: map[string]string{"Authorization": accessToken},
|
||||
requestBody: bytes.NewBuffer([]byte(`{}`)),
|
||||
isErr: false,
|
||||
want: datasetWant,
|
||||
},
|
||||
{
|
||||
name: "Invoke my-client-auth-list-dataset-ids-tool without auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-list-dataset-ids-tool/invoke",
|
||||
requestHeader: map[string]string{},
|
||||
requestBody: bytes.NewBuffer([]byte(`{}`)),
|
||||
isErr: true,
|
||||
},
|
||||
{
|
||||
|
||||
name: "Invoke my-client-auth-list-dataset-ids-tool with invalid auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-list-dataset-ids-tool/invoke",
|
||||
requestHeader: map[string]string{"Authorization": "Bearer invalid-token"},
|
||||
requestBody: bytes.NewBuffer([]byte(`{}`)),
|
||||
isErr: true,
|
||||
},
|
||||
}
|
||||
for _, tc := range invokeTcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
@@ -1159,13 +1033,6 @@ func runBigQueryGetDatasetInfoToolInvokeTest(t *testing.T, datasetName, datasetI
|
||||
t.Fatalf("error getting Google ID token: %s", err)
|
||||
}
|
||||
|
||||
// Get access token
|
||||
accessToken, err := sources.GetIAMAccessToken(t.Context())
|
||||
if err != nil {
|
||||
t.Fatalf("error getting access token from ADC: %s", err)
|
||||
}
|
||||
accessToken = "Bearer " + accessToken
|
||||
|
||||
// Test tool invoke endpoint
|
||||
invokeTcs := []struct {
|
||||
name string
|
||||
@@ -1234,29 +1101,6 @@ func runBigQueryGetDatasetInfoToolInvokeTest(t *testing.T, datasetName, datasetI
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
|
||||
isErr: true,
|
||||
},
|
||||
{
|
||||
name: "Invoke my-client-auth-get-dataset-info-tool with auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-get-dataset-info-tool/invoke",
|
||||
requestHeader: map[string]string{"Authorization": accessToken},
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
|
||||
want: datasetInfoWant,
|
||||
isErr: false,
|
||||
},
|
||||
{
|
||||
name: "Invoke my-client-auth-get-dataset-info-tool without auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-get-dataset-info-tool/invoke",
|
||||
requestHeader: map[string]string{},
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
|
||||
isErr: true,
|
||||
},
|
||||
{
|
||||
|
||||
name: "Invoke my-client-auth-get-dataset-info-tool with invalid auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-get-dataset-info-tool/invoke",
|
||||
requestHeader: map[string]string{"Authorization": "Bearer invalid-token"},
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
|
||||
isErr: true,
|
||||
},
|
||||
}
|
||||
for _, tc := range invokeTcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
@@ -1309,13 +1153,6 @@ func runBigQueryListTableIdsToolInvokeTest(t *testing.T, datasetName, tablename_
|
||||
t.Fatalf("error getting Google ID token: %s", err)
|
||||
}
|
||||
|
||||
// Get access token
|
||||
accessToken, err := sources.GetIAMAccessToken(t.Context())
|
||||
if err != nil {
|
||||
t.Fatalf("error getting access token from ADC: %s", err)
|
||||
}
|
||||
accessToken = "Bearer " + accessToken
|
||||
|
||||
// Test tool invoke endpoint
|
||||
invokeTcs := []struct {
|
||||
name string
|
||||
@@ -1384,29 +1221,6 @@ func runBigQueryListTableIdsToolInvokeTest(t *testing.T, datasetName, tablename_
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
|
||||
isErr: true,
|
||||
},
|
||||
{
|
||||
name: "Invoke my-client-auth-list-table-ids-tool with auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-list-table-ids-tool/invoke",
|
||||
requestHeader: map[string]string{"Authorization": accessToken},
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
|
||||
want: tablename_want,
|
||||
isErr: false,
|
||||
},
|
||||
{
|
||||
name: "Invoke my-client-auth-list-table-ids-tool without auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-list-table-ids-tool/invoke",
|
||||
requestHeader: map[string]string{},
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
|
||||
isErr: true,
|
||||
},
|
||||
{
|
||||
|
||||
name: "Invoke my-client-auth-list-table-ids-tool with invalid auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-list-table-ids-tool/invoke",
|
||||
requestHeader: map[string]string{"Authorization": "Bearer invalid-token"},
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
|
||||
isErr: true,
|
||||
},
|
||||
}
|
||||
for _, tc := range invokeTcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
@@ -1459,13 +1273,6 @@ func runBigQueryGetTableInfoToolInvokeTest(t *testing.T, datasetName, tableName,
|
||||
t.Fatalf("error getting Google ID token: %s", err)
|
||||
}
|
||||
|
||||
// Get access token
|
||||
accessToken, err := sources.GetIAMAccessToken(t.Context())
|
||||
if err != nil {
|
||||
t.Fatalf("error getting access token from ADC: %s", err)
|
||||
}
|
||||
accessToken = "Bearer " + accessToken
|
||||
|
||||
// Test tool invoke endpoint
|
||||
invokeTcs := []struct {
|
||||
name string
|
||||
@@ -1534,29 +1341,6 @@ func runBigQueryGetTableInfoToolInvokeTest(t *testing.T, datasetName, tableName,
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\", \"table\":\"%s\"}", datasetName, tableName))),
|
||||
isErr: true,
|
||||
},
|
||||
{
|
||||
name: "Invoke my-client-auth-get-table-info-tool with auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-get-table-info-tool/invoke",
|
||||
requestHeader: map[string]string{"Authorization": accessToken},
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\", \"table\":\"%s\"}", datasetName, tableName))),
|
||||
want: tableInfoWant,
|
||||
isErr: false,
|
||||
},
|
||||
{
|
||||
name: "Invoke my-client-auth-get-table-info-tool without auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-get-table-info-tool/invoke",
|
||||
requestHeader: map[string]string{},
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\", \"table\":\"%s\"}", datasetName, tableName))),
|
||||
isErr: true,
|
||||
},
|
||||
{
|
||||
|
||||
name: "Invoke my-client-auth-get-table-info-tool with invalid auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-get-table-info-tool/invoke",
|
||||
requestHeader: map[string]string{"Authorization": "Bearer invalid-token"},
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\", \"table\":\"%s\"}", datasetName, tableName))),
|
||||
isErr: true,
|
||||
},
|
||||
}
|
||||
for _, tc := range invokeTcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
@@ -1609,13 +1393,6 @@ func runBigQueryConversationalAnalyticsInvokeTest(t *testing.T, datasetName, tab
|
||||
t.Fatalf("error getting Google ID token: %s", err)
|
||||
}
|
||||
|
||||
// Get access token
|
||||
accessToken, err := sources.GetIAMAccessToken(t.Context())
|
||||
if err != nil {
|
||||
t.Fatalf("error getting access token from ADC: %s", err)
|
||||
}
|
||||
accessToken = "Bearer " + accessToken
|
||||
|
||||
tableRefsJSON := fmt.Sprintf(`[{"projectId":"%s","datasetId":"%s","tableId":"%s"}]`, BigqueryProject, datasetName, tableName)
|
||||
|
||||
invokeTcs := []struct {
|
||||
@@ -1655,38 +1432,6 @@ func runBigQueryConversationalAnalyticsInvokeTest(t *testing.T, datasetName, tab
|
||||
requestBody: bytes.NewBuffer([]byte(`{"user_query_with_context": "What are the names in the table?"}`)),
|
||||
isErr: true,
|
||||
},
|
||||
{
|
||||
name: "Invoke my-client-auth-conversational-analytics-tool with auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-conversational-analytics-tool/invoke",
|
||||
requestHeader: map[string]string{"Authorization": accessToken},
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(
|
||||
`{"user_query_with_context": "What are the names in the table?", "table_references": %q}`,
|
||||
tableRefsJSON,
|
||||
))),
|
||||
want: "[{\"f0_\":1}]",
|
||||
isErr: false,
|
||||
},
|
||||
{
|
||||
name: "Invoke my-client-auth-conversational-analytics-tool without auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-conversational-analytics-tool/invoke",
|
||||
requestHeader: map[string]string{},
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(
|
||||
`{"user_query_with_context": "What are the names in the table?", "table_references": %q}`,
|
||||
tableRefsJSON,
|
||||
))),
|
||||
isErr: true,
|
||||
},
|
||||
{
|
||||
|
||||
name: "Invoke my-client-auth-conversational-analytics-tool with invalid auth token",
|
||||
api: "http://127.0.0.1:5000/api/tool/my-client-auth-conversational-analytics-tool/invoke",
|
||||
requestHeader: map[string]string{"Authorization": "Bearer invalid-token"},
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(
|
||||
`{"user_query_with_context": "What are the names in the table?", "table_references": %q}`,
|
||||
tableRefsJSON,
|
||||
))),
|
||||
isErr: true,
|
||||
},
|
||||
}
|
||||
for _, tc := range invokeTcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
|
||||
@@ -1,284 +0,0 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package cloudsql
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"sync"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/tests"
|
||||
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation"
|
||||
)
|
||||
|
||||
var (
|
||||
cloudsqlWaitToolKind = "cloud-sql-wait-for-operation"
|
||||
)
|
||||
|
||||
type cloudsqlOperation struct {
|
||||
Name string `json:"name"`
|
||||
Status string `json:"status"`
|
||||
TargetLink string `json:"targetLink"`
|
||||
OperationType string `json:"operationType"`
|
||||
Error *struct {
|
||||
Errors []struct {
|
||||
Code string `json:"code"`
|
||||
Message string `json:"message"`
|
||||
} `json:"errors"`
|
||||
} `json:"error,omitempty"`
|
||||
}
|
||||
|
||||
type cloudsqlInstance struct {
|
||||
Region string `json:"region"`
|
||||
DatabaseVersion string `json:"databaseVersion"`
|
||||
}
|
||||
|
||||
type cloudsqlHandler struct {
|
||||
mu sync.Mutex
|
||||
operations map[string]*cloudsqlOperation
|
||||
instances map[string]*cloudsqlInstance
|
||||
}
|
||||
|
||||
func (h *cloudsqlHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
h.mu.Lock()
|
||||
defer h.mu.Unlock()
|
||||
|
||||
if match, _ := regexp.MatchString("/v1/projects/p1/operations/.*", r.URL.Path); match {
|
||||
parts := regexp.MustCompile("/").Split(r.URL.Path, -1)
|
||||
opName := parts[len(parts)-1]
|
||||
|
||||
op, ok := h.operations[opName]
|
||||
if !ok {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
if op.Status != "DONE" {
|
||||
op.Status = "DONE"
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
if err := json.NewEncoder(w).Encode(op); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
} else if match, _ := regexp.MatchString("/v1/projects/p1/instances/.*", r.URL.Path); match {
|
||||
parts := regexp.MustCompile("/").Split(r.URL.Path, -1)
|
||||
instanceName := parts[len(parts)-1]
|
||||
|
||||
instance, ok := h.instances[instanceName]
|
||||
if !ok {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
if err := json.NewEncoder(w).Encode(instance); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
} else {
|
||||
http.NotFound(w, r)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCloudSQLWaitToolEndpoints(t *testing.T) {
|
||||
h := &cloudsqlHandler{
|
||||
operations: map[string]*cloudsqlOperation{
|
||||
"op1": {Name: "op1", Status: "PENDING", OperationType: "CREATE_DATABASE"},
|
||||
"op2": {Name: "op2", Status: "PENDING", OperationType: "CREATE_DATABASE", Error: &struct {
|
||||
Errors []struct {
|
||||
Code string `json:"code"`
|
||||
Message string `json:"message"`
|
||||
} `json:"errors"`
|
||||
}{
|
||||
Errors: []struct {
|
||||
Code string `json:"code"`
|
||||
Message string `json:"message"`
|
||||
}{
|
||||
{Code: "ERROR_CODE", Message: "failed"},
|
||||
},
|
||||
}},
|
||||
"op3": {Name: "op3", Status: "PENDING", OperationType: "CREATE"},
|
||||
},
|
||||
instances: map[string]*cloudsqlInstance{
|
||||
"i1": {Region: "r1", DatabaseVersion: "POSTGRES_13"},
|
||||
},
|
||||
}
|
||||
server := httptest.NewServer(h)
|
||||
defer server.Close()
|
||||
|
||||
h.operations["op1"].TargetLink = fmt.Sprintf("%s/v1/projects/p1/instances/i1/databases/d1", server.URL)
|
||||
h.operations["op2"].TargetLink = fmt.Sprintf("%s/v1/projects/p1/instances/i2/databases/d2", server.URL)
|
||||
h.operations["op3"].TargetLink = fmt.Sprintf("%s/v1/projects/p1/instances/i1", server.URL)
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
|
||||
defer cancel()
|
||||
|
||||
var args []string
|
||||
|
||||
toolsFile := getCloudSQLWaitToolsConfig(server.URL)
|
||||
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
|
||||
if err != nil {
|
||||
t.Fatalf("command initialization returned an error: %s", err)
|
||||
}
|
||||
defer cleanup()
|
||||
|
||||
waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
|
||||
defer cancel()
|
||||
out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
|
||||
if err != nil {
|
||||
t.Logf("toolbox command logs: \n%s", out)
|
||||
t.Fatalf("toolbox didn't start successfully: %s", err)
|
||||
}
|
||||
|
||||
tcs := []struct {
|
||||
name string
|
||||
toolName string
|
||||
body string
|
||||
want string
|
||||
expectError bool
|
||||
wantSubstring bool
|
||||
}{
|
||||
{
|
||||
name: "successful operation",
|
||||
toolName: "wait-for-op1",
|
||||
body: `{"project": "p1", "operation": "op1"}`,
|
||||
want: "Your Cloud SQL resource is ready",
|
||||
wantSubstring: true,
|
||||
},
|
||||
{
|
||||
name: "failed operation",
|
||||
toolName: "wait-for-op2",
|
||||
body: `{"project": "p1", "operation": "op2"}`,
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "non-database create operation",
|
||||
toolName: "wait-for-op3",
|
||||
body: `{"project": "p1", "operation": "op3"}`,
|
||||
want: `{"name":"op3","status":"DONE","targetLink":"` + h.operations["op3"].TargetLink + `","operationType":"CREATE"}`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
api := fmt.Sprintf("http://127.0.0.1:5000/api/tool/%s/invoke", tc.toolName)
|
||||
req, err := http.NewRequest(http.MethodPost, api, bytes.NewBufferString(tc.body))
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create request: %s", err)
|
||||
}
|
||||
req.Header.Add("Content-type", "application/json")
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to send request: %s", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if tc.expectError {
|
||||
if resp.StatusCode == http.StatusOK {
|
||||
t.Fatal("expected error but got status 200")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
bodyBytes, _ := io.ReadAll(resp.Body)
|
||||
t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
|
||||
}
|
||||
|
||||
if tc.wantSubstring {
|
||||
var result struct {
|
||||
Result string `json:"result"`
|
||||
}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
|
||||
t.Fatalf("failed to decode response: %v", err)
|
||||
}
|
||||
|
||||
if !bytes.Contains([]byte(result.Result), []byte(tc.want)) {
|
||||
t.Fatalf("unexpected result: got %q, want substring %q", result.Result, tc.want)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
var result struct {
|
||||
Result string `json:"result"`
|
||||
}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
|
||||
t.Fatalf("failed to decode response: %v", err)
|
||||
}
|
||||
|
||||
var tempString string
|
||||
if err := json.Unmarshal([]byte(result.Result), &tempString); err != nil {
|
||||
t.Fatalf("failed to unmarshal outer JSON string: %v", err)
|
||||
}
|
||||
|
||||
var got, want map[string]any
|
||||
if err := json.Unmarshal([]byte(tempString), &got); err != nil {
|
||||
t.Fatalf("failed to unmarshal inner JSON object: %v", err)
|
||||
}
|
||||
|
||||
if err := json.Unmarshal([]byte(tc.want), &want); err != nil {
|
||||
t.Fatalf("failed to unmarshal want: %v", err)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("unexpected result: got %+v, want %+v", got, want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func getCloudSQLWaitToolsConfig(baseURL string) map[string]any {
|
||||
return map[string]any{
|
||||
"sources": map[string]any{
|
||||
"test-source": map[string]any{
|
||||
"kind": "http",
|
||||
"baseUrl": baseURL,
|
||||
},
|
||||
},
|
||||
"tools": map[string]any{
|
||||
"wait-for-op1": map[string]any{
|
||||
"kind": cloudsqlWaitToolKind,
|
||||
"source": "test-source",
|
||||
"description": "wait for op1",
|
||||
"baseURL": baseURL,
|
||||
"authRequired": []string{},
|
||||
},
|
||||
"wait-for-op2": map[string]any{
|
||||
"kind": cloudsqlWaitToolKind,
|
||||
"source": "test-source",
|
||||
"description": "wait for op2",
|
||||
"baseURL": baseURL,
|
||||
"authRequired": []string{},
|
||||
},
|
||||
"wait-for-op3": map[string]any{
|
||||
"kind": cloudsqlWaitToolKind,
|
||||
"source": "test-source",
|
||||
"description": "wait for op3",
|
||||
"baseURL": baseURL,
|
||||
"authRequired": []string{},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -131,8 +131,6 @@ func TestFirestoreToolEndpoints(t *testing.T) {
|
||||
// Run specific Firestore tool tests
|
||||
runFirestoreGetDocumentsTest(t, docPath1, docPath2)
|
||||
runFirestoreQueryCollectionTest(t, testCollectionName)
|
||||
runFirestoreQueryTest(t, testCollectionName)
|
||||
runFirestoreQuerySelectArrayTest(t, testCollectionName)
|
||||
runFirestoreListCollectionsTest(t, testCollectionName, testSubCollectionName, docPath1)
|
||||
runFirestoreAddDocumentsTest(t, testCollectionName)
|
||||
runFirestoreUpdateDocumentTest(t, testCollectionName, testDocID1)
|
||||
@@ -564,63 +562,6 @@ func getFirestoreToolsConfig(sourceConfig map[string]any) map[string]any {
|
||||
"source": "my-instance",
|
||||
"description": "Query a Firestore collection",
|
||||
},
|
||||
"firestore-query-param": map[string]any{
|
||||
"kind": "firestore-query",
|
||||
"source": "my-instance",
|
||||
"description": "Query a Firestore collection with parameterizable filters",
|
||||
"collectionPath": "{{.collection}}",
|
||||
"filters": `{
|
||||
"field": "age", "op": "{{.operator}}", "value": {"integerValue": "{{.ageValue}}"}
|
||||
}`,
|
||||
"limit": 10,
|
||||
"parameters": []map[string]any{
|
||||
{
|
||||
"name": "collection",
|
||||
"type": "string",
|
||||
"description": "Collection to query",
|
||||
"required": true,
|
||||
},
|
||||
{
|
||||
"name": "operator",
|
||||
"type": "string",
|
||||
"description": "Comparison operator",
|
||||
"required": true,
|
||||
},
|
||||
{
|
||||
"name": "ageValue",
|
||||
"type": "string",
|
||||
"description": "Age value to compare",
|
||||
"required": true,
|
||||
},
|
||||
},
|
||||
},
|
||||
"firestore-query-select-array": map[string]any{
|
||||
"kind": "firestore-query",
|
||||
"source": "my-instance",
|
||||
"description": "Query with array-based select fields",
|
||||
"collectionPath": "{{.collection}}",
|
||||
"select": []string{"{{.fields}}"},
|
||||
"limit": 10,
|
||||
"parameters": []map[string]any{
|
||||
{
|
||||
"name": "collection",
|
||||
"type": "string",
|
||||
"description": "Collection to query",
|
||||
"required": true,
|
||||
},
|
||||
{
|
||||
"name": "fields",
|
||||
"type": "array",
|
||||
"description": "Fields to select",
|
||||
"required": true,
|
||||
"items": map[string]any{
|
||||
"name": "field",
|
||||
"type": "string",
|
||||
"description": "field",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"firestore-get-rules": map[string]any{
|
||||
"kind": "firestore-get-rules",
|
||||
"source": "my-instance",
|
||||
@@ -1415,246 +1356,6 @@ func runFirestoreDeleteDocumentsTest(t *testing.T, docPath string) {
|
||||
}
|
||||
}
|
||||
|
||||
func runFirestoreQueryTest(t *testing.T, collectionName string) {
|
||||
invokeTcs := []struct {
|
||||
name string
|
||||
api string
|
||||
requestBody io.Reader
|
||||
wantRegex string
|
||||
isErr bool
|
||||
}{
|
||||
{
|
||||
name: "query with parameterized filters - age greater than",
|
||||
api: "http://127.0.0.1:5000/api/tool/firestore-query-param/invoke",
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{
|
||||
"collection": "%s",
|
||||
"operator": ">",
|
||||
"ageValue": "25"
|
||||
}`, collectionName))),
|
||||
wantRegex: `"name":"Alice"`,
|
||||
isErr: false,
|
||||
},
|
||||
{
|
||||
name: "query with parameterized filters - exact name match",
|
||||
api: "http://127.0.0.1:5000/api/tool/firestore-query-param/invoke",
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{
|
||||
"collection": "%s",
|
||||
"operator": "==",
|
||||
"ageValue": "25"
|
||||
}`, collectionName))),
|
||||
wantRegex: `"name":"Bob"`,
|
||||
isErr: false,
|
||||
},
|
||||
{
|
||||
name: "query with parameterized filters - age less than or equal",
|
||||
api: "http://127.0.0.1:5000/api/tool/firestore-query-param/invoke",
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{
|
||||
"collection": "%s",
|
||||
"operator": "<=",
|
||||
"ageValue": "29"
|
||||
}`, collectionName))),
|
||||
wantRegex: `"name":"Bob"`,
|
||||
isErr: false,
|
||||
},
|
||||
{
|
||||
name: "missing required parameter",
|
||||
api: "http://127.0.0.1:5000/api/tool/firestore-query-param/invoke",
|
||||
requestBody: bytes.NewBuffer([]byte(`{"collection": "test", "operator": ">"}`)),
|
||||
isErr: true,
|
||||
},
|
||||
{
|
||||
name: "query non-existent collection with parameters",
|
||||
api: "http://127.0.0.1:5000/api/tool/firestore-query-param/invoke",
|
||||
requestBody: bytes.NewBuffer([]byte(`{
|
||||
"collection": "non-existent-collection",
|
||||
"operator": "==",
|
||||
"ageValue": "30"
|
||||
}`)),
|
||||
wantRegex: `^\[\]$`, // Empty array
|
||||
isErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range invokeTcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create request: %s", err)
|
||||
}
|
||||
req.Header.Add("Content-type", "application/json")
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to send request: %s", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
if tc.isErr {
|
||||
return
|
||||
}
|
||||
bodyBytes, _ := io.ReadAll(resp.Body)
|
||||
t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
|
||||
}
|
||||
|
||||
var body map[string]interface{}
|
||||
err = json.NewDecoder(resp.Body).Decode(&body)
|
||||
if err != nil {
|
||||
t.Fatalf("error parsing response body: %v", err)
|
||||
}
|
||||
|
||||
got, ok := body["result"].(string)
|
||||
if !ok {
|
||||
t.Fatalf("unable to find result in response body")
|
||||
}
|
||||
|
||||
if tc.wantRegex != "" {
|
||||
matched, err := regexp.MatchString(tc.wantRegex, got)
|
||||
if err != nil {
|
||||
t.Fatalf("invalid regex pattern: %v", err)
|
||||
}
|
||||
if !matched {
|
||||
t.Fatalf("result does not match expected pattern.\nGot: %s\nWant pattern: %s", got, tc.wantRegex)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func runFirestoreQuerySelectArrayTest(t *testing.T, collectionName string) {
|
||||
invokeTcs := []struct {
|
||||
name string
|
||||
api string
|
||||
requestBody io.Reader
|
||||
wantRegex string
|
||||
validateFields bool
|
||||
isErr bool
|
||||
}{
|
||||
{
|
||||
name: "query with array select fields - single field",
|
||||
api: "http://127.0.0.1:5000/api/tool/firestore-query-select-array/invoke",
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{
|
||||
"collection": "%s",
|
||||
"fields": ["name"]
|
||||
}`, collectionName))),
|
||||
wantRegex: `"name":"`,
|
||||
validateFields: true,
|
||||
isErr: false,
|
||||
},
|
||||
{
|
||||
name: "query with array select fields - multiple fields",
|
||||
api: "http://127.0.0.1:5000/api/tool/firestore-query-select-array/invoke",
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{
|
||||
"collection": "%s",
|
||||
"fields": ["name", "age"]
|
||||
}`, collectionName))),
|
||||
wantRegex: `"name":".*"age":`,
|
||||
validateFields: true,
|
||||
isErr: false,
|
||||
},
|
||||
{
|
||||
name: "query with empty array select fields",
|
||||
api: "http://127.0.0.1:5000/api/tool/firestore-query-select-array/invoke",
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{
|
||||
"collection": "%s",
|
||||
"fields": []
|
||||
}`, collectionName))),
|
||||
wantRegex: `\[.*\]`, // Should return documents with all fields
|
||||
isErr: false,
|
||||
},
|
||||
{
|
||||
name: "missing fields parameter",
|
||||
api: "http://127.0.0.1:5000/api/tool/firestore-query-select-array/invoke",
|
||||
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"collection": "%s"}`, collectionName))),
|
||||
isErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range invokeTcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create request: %s", err)
|
||||
}
|
||||
req.Header.Add("Content-type", "application/json")
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to send request: %s", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
if tc.isErr {
|
||||
return
|
||||
}
|
||||
bodyBytes, _ := io.ReadAll(resp.Body)
|
||||
t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
|
||||
}
|
||||
|
||||
var body map[string]interface{}
|
||||
err = json.NewDecoder(resp.Body).Decode(&body)
|
||||
if err != nil {
|
||||
t.Fatalf("error parsing response body: %v", err)
|
||||
}
|
||||
|
||||
got, ok := body["result"].(string)
|
||||
if !ok {
|
||||
t.Fatalf("unable to find result in response body")
|
||||
}
|
||||
|
||||
if tc.wantRegex != "" {
|
||||
matched, err := regexp.MatchString(tc.wantRegex, got)
|
||||
if err != nil {
|
||||
t.Fatalf("invalid regex pattern: %v", err)
|
||||
}
|
||||
if !matched {
|
||||
t.Fatalf("result does not match expected pattern.\nGot: %s\nWant pattern: %s", got, tc.wantRegex)
|
||||
}
|
||||
}
|
||||
|
||||
// Additional validation for field selection
|
||||
if tc.validateFields {
|
||||
// Parse the result to check if only selected fields are present
|
||||
var results []map[string]interface{}
|
||||
err = json.Unmarshal([]byte(got), &results)
|
||||
if err != nil {
|
||||
t.Fatalf("error parsing result as JSON array: %v", err)
|
||||
}
|
||||
|
||||
// For single field test, ensure only 'name' field is present in data
|
||||
if tc.name == "query with array select fields - single field" && len(results) > 0 {
|
||||
for _, result := range results {
|
||||
if data, ok := result["data"].(map[string]interface{}); ok {
|
||||
if _, hasName := data["name"]; !hasName {
|
||||
t.Fatalf("expected 'name' field in data, but not found")
|
||||
}
|
||||
// The 'age' field should not be present when only 'name' is selected
|
||||
if _, hasAge := data["age"]; hasAge {
|
||||
t.Fatalf("unexpected 'age' field in data when only 'name' was selected")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For multiple fields test, ensure both fields are present
|
||||
if tc.name == "query with array select fields - multiple fields" && len(results) > 0 {
|
||||
for _, result := range results {
|
||||
if data, ok := result["data"].(map[string]interface{}); ok {
|
||||
if _, hasName := data["name"]; !hasName {
|
||||
t.Fatalf("expected 'name' field in data, but not found")
|
||||
}
|
||||
if _, hasAge := data["age"]; !hasAge {
|
||||
t.Fatalf("expected 'age' field in data, but not found")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func runFirestoreQueryCollectionTest(t *testing.T, collectionName string) {
|
||||
invokeTcs := []struct {
|
||||
name string
|
||||
@@ -1684,7 +1385,7 @@ func runFirestoreQueryCollectionTest(t *testing.T, collectionName string) {
|
||||
"orderBy": "{\"field\": \"age\", \"direction\": \"DESCENDING\"}",
|
||||
"limit": 2
|
||||
}`, collectionName))),
|
||||
wantRegex: `"age":35.*"age":30`, // Should be ordered by age descending (Charlie=35, Alice=30)
|
||||
wantRegex: `"age":35.*"age":30`, // Should be ordered by age descending (Charlie=35, Alice=30, Bob=25)
|
||||
isErr: false,
|
||||
},
|
||||
{
|
||||
|
||||
@@ -273,7 +273,6 @@ func RunToolInvokeTest(t *testing.T, select1Want string, options ...InvokeTestOp
|
||||
if err != nil {
|
||||
t.Fatalf("error getting access token from ADC: %s", err)
|
||||
}
|
||||
accessToken = "Bearer " + accessToken
|
||||
|
||||
// Test tool invoke endpoint
|
||||
invokeTcs := []struct {
|
||||
@@ -842,7 +841,6 @@ func RunMCPToolCallMethod(t *testing.T, myFailToolWant, select1Want string, opti
|
||||
if err != nil {
|
||||
t.Fatalf("error getting access token from ADC: %s", err)
|
||||
}
|
||||
accessToken = "Bearer " + accessToken
|
||||
|
||||
idToken, err := GetGoogleIdToken(ClientId)
|
||||
if err != nil {
|
||||
|
||||
@@ -1,157 +0,0 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package yugabytedb
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/tests"
|
||||
"github.com/yugabyte/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
var (
|
||||
YBDB_SOURCE_KIND = "yugabytedb"
|
||||
YBDB_TOOL_KIND = "yugabytedb-sql"
|
||||
YBDB_DATABASE = os.Getenv("YUGABYTEDB_DATABASE")
|
||||
YBDB_HOST = os.Getenv("YUGABYTEDB_HOST")
|
||||
YBDB_PORT = os.Getenv("YUGABYTEDB_PORT")
|
||||
YBDB_USER = os.Getenv("YUGABYTEDB_USER")
|
||||
YBDB_PASS = os.Getenv("YUGABYTEDB_PASS")
|
||||
YBDB_LB = os.Getenv("YUGABYTEDB_LOADBALANCE")
|
||||
)
|
||||
|
||||
func getYBVars(t *testing.T) map[string]any {
|
||||
switch "" {
|
||||
case YBDB_DATABASE:
|
||||
t.Fatal("'YUGABYTEDB_DATABASE' not set")
|
||||
case YBDB_HOST:
|
||||
t.Fatal("'YUGABYTEDB_HOST' not set")
|
||||
case YBDB_PORT:
|
||||
t.Fatal("'YUGABYTEDB_PORT' not set")
|
||||
case YBDB_USER:
|
||||
t.Fatal("'YUGABYTEDB_USER' not set")
|
||||
case YBDB_PASS:
|
||||
t.Fatal("'YUGABYTEDB_PASS' not set")
|
||||
case YBDB_LB:
|
||||
fmt.Printf("YUGABYTEDB_LOADBALANCE value not set. Setting default value: false")
|
||||
YBDB_LB = "false"
|
||||
}
|
||||
|
||||
return map[string]any{
|
||||
"kind": YBDB_SOURCE_KIND,
|
||||
"host": YBDB_HOST,
|
||||
"port": YBDB_PORT,
|
||||
"database": YBDB_DATABASE,
|
||||
"user": YBDB_USER,
|
||||
"password": YBDB_PASS,
|
||||
"loadBalance": YBDB_LB,
|
||||
}
|
||||
}
|
||||
|
||||
func initYBConnectionPool(host, port, user, pass, dbname, loadBalance string) (*pgxpool.Pool, error) {
|
||||
dsn := fmt.Sprintf("postgres://%s:%s@%s:%s/%s?load_balance=%s", user, pass, host, port, dbname, loadBalance)
|
||||
pool, err := pgxpool.New(context.Background(), dsn)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to create YugabyteDB connection pool: %w", err)
|
||||
}
|
||||
return pool, nil
|
||||
}
|
||||
|
||||
// SetupYugabyteDBSQLTable creates and inserts data into a table of tool
|
||||
// compatible with yugabytedb-sql tool
|
||||
func SetupYugabyteDBSQLTable(t *testing.T, ctx context.Context, pool *pgxpool.Pool, create_statement, insert_statement, tableName string, params []any) func(*testing.T) {
|
||||
err := pool.Ping(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to connect to test database: %s", err)
|
||||
}
|
||||
|
||||
// Create table
|
||||
_, err = pool.Query(ctx, create_statement)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create test table %s: %s", tableName, err)
|
||||
}
|
||||
|
||||
// Insert test data
|
||||
_, err = pool.Query(ctx, insert_statement, params...)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to insert test data: %s", err)
|
||||
}
|
||||
|
||||
return func(t *testing.T) {
|
||||
// tear down test
|
||||
_, err = pool.Exec(ctx, fmt.Sprintf("DROP TABLE %s;", tableName))
|
||||
if err != nil {
|
||||
t.Errorf("Teardown failed: %s", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestYugabyteDB(t *testing.T) {
|
||||
sourceConfig := getYBVars(t)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
|
||||
defer cancel()
|
||||
|
||||
var args []string
|
||||
|
||||
pool, err := initYBConnectionPool(YBDB_HOST, YBDB_PORT, YBDB_USER, YBDB_PASS, YBDB_DATABASE, YBDB_LB)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create YugabyteDB connection pool: %s", err)
|
||||
}
|
||||
|
||||
tableNameParam := "param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||
tableNameAuth := "auth_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||
tableNameTemplateParam := "template_param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||
|
||||
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := tests.GetPostgresSQLParamToolInfo(tableNameParam)
|
||||
teardownTable1 := SetupYugabyteDBSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
|
||||
defer teardownTable1(t)
|
||||
|
||||
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := tests.GetPostgresSQLAuthToolInfo(tableNameAuth)
|
||||
teardownTable2 := SetupYugabyteDBSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
||||
defer teardownTable2(t)
|
||||
|
||||
toolsFile := tests.GetToolsConfig(sourceConfig, YBDB_TOOL_KIND, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, authToolStmt)
|
||||
tmplSelectCombined, tmplSelectFilterCombined := tests.GetPostgresSQLTmplToolStatement()
|
||||
toolsFile = tests.AddTemplateParamConfig(t, toolsFile, YBDB_TOOL_KIND, tmplSelectCombined, tmplSelectFilterCombined, "")
|
||||
|
||||
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
|
||||
if err != nil {
|
||||
t.Fatalf("command initialization returned an error: %s", err)
|
||||
}
|
||||
defer cleanup()
|
||||
|
||||
waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
|
||||
defer cancel()
|
||||
out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
|
||||
if err != nil {
|
||||
t.Logf("toolbox command logs: \n%s", out)
|
||||
t.Fatalf("toolbox didn't start successfully: %s", err)
|
||||
}
|
||||
|
||||
select1Want, mcpMyFailToolWant, _, mcpSelect1Want := tests.GetPostgresWants()
|
||||
|
||||
tests.RunToolGetTest(t)
|
||||
tests.RunToolInvokeTest(t, select1Want)
|
||||
tests.RunMCPToolCallMethod(t, mcpMyFailToolWant, mcpSelect1Want)
|
||||
tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam)
|
||||
}
|
||||
Reference in New Issue
Block a user